From c145a238d768aa17c3aebe120c20a46bfbec6b99 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 23 Jul 2023 18:16:14 -0500 Subject: [PATCH 001/434] fix: pin python-semantic-release to fix release process (#1200) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d46552da..c7598b26 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -130,7 +130,7 @@ jobs: uses: actions/setup-python@v4 - name: Install python-semantic-release - run: pipx install python-semantic-release + run: pipx install python-semantic-release==7.34.6 - name: Get Release Tag id: release_tag From 249395a6c42a8c4712e62852ec4cbe423111800c Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 23 Jul 2023 23:24:05 +0000 Subject: [PATCH 002/434] 0.71.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5fc57d15..fca089a1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.71.3 (2023-07-23) + +### Fix + +* Pin python-semantic-release to fix release process ([#1200](https://github.com/python-zeroconf/python-zeroconf/issues/1200)) ([`c145a23`](https://github.com/python-zeroconf/python-zeroconf/commit/c145a238d768aa17c3aebe120c20a46bfbec6b99)) + ## v0.71.2 (2023-07-23) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 3966e60a..4f44812c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.71.2" +version = "0.71.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 66fa45d9..5a8119e7 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.71.2' +__version__ = '0.71.3' __license__ = 'LGPL' From fed3dec88fd87c7a5a11bd2513f6c80d9967b15e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 24 Jul 2023 10:52:35 -0500 Subject: [PATCH 003/434] chore: add cpython beta to CI (#1203) --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c7598b26..677dfad1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,6 +41,7 @@ jobs: - "3.9" - "3.10" - "3.11" + - "3.12.0-beta.4" - "pypy-3.7" os: - ubuntu-latest From b272d75abd982f3be1f4b20f683cac38011cc6f4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 24 Jul 2023 11:04:50 -0500 Subject: [PATCH 004/434] fix: cleanup naming from previous refactoring in ServiceInfo (#1202) --- src/zeroconf/_services/__init__.py | 2 - src/zeroconf/_services/info.py | 62 ++++++++++++++++-------------- 2 files changed, 33 insertions(+), 31 deletions(-) diff --git a/src/zeroconf/_services/__init__.py b/src/zeroconf/_services/__init__.py index 968b5daf..cf54d7f0 100644 --- a/src/zeroconf/_services/__init__.py +++ b/src/zeroconf/_services/__init__.py @@ -46,7 +46,6 @@ def update_service(self, zc: 'Zeroconf', type_: str, name: str) -> None: class Signal: - __slots__ = ('_handlers',) def __init__(self) -> None: @@ -62,7 +61,6 @@ def registration_interface(self) -> 'SignalRegistrationInterface': class SignalRegistrationInterface: - __slots__ = ('_handlers',) def __init__(self, handlers: List[Callable[..., None]]) -> None: diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 8ff1f665..d3e6f082 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -21,9 +21,9 @@ """ import asyncio -import ipaddress import random from functools import lru_cache +from ipaddress import IPv4Address, IPv6Address, _BaseAddress, ip_address from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast from .._dns import ( @@ -90,7 +90,7 @@ def instance_name_from_service_info(info: "ServiceInfo") -> str: return info.name[: -len(service_name) - 1] -_cached_ip_addresses = lru_cache(maxsize=256)(ipaddress.ip_address) +_cached_ip_addresses = lru_cache(maxsize=256)(ip_address) class ServiceInfo(RecordUpdateListener): @@ -158,8 +158,8 @@ def __init__( self.type = type_ self._name = name self.key = name.lower() - self._ipv4_addresses: List[ipaddress.IPv4Address] = [] - self._ipv6_addresses: List[ipaddress.IPv6Address] = [] + self._ipv4_addresses: List[IPv4Address] = [] + self._ipv6_addresses: List[IPv6Address] = [] if addresses is not None: self.addresses = addresses elif parsed_addresses is not None: @@ -260,7 +260,7 @@ def addresses_by_version(self, version: IPVersion) -> List[bytes]: def ip_addresses_by_version( self, version: IPVersion - ) -> Union[List[ipaddress.IPv4Address], List[ipaddress.IPv6Address], List[ipaddress._BaseAddress]]: + ) -> Union[List[IPv4Address], List[IPv6Address], List[_BaseAddress]]: """List ip_address objects matching IP version. Addresses are guaranteed to be returned in LIFO (last in, first out) @@ -273,7 +273,7 @@ def ip_addresses_by_version( def _ip_addresses_by_version_value( self, version_value: int - ) -> Union[List[ipaddress.IPv4Address], List[ipaddress.IPv6Address], List[ipaddress._BaseAddress]]: + ) -> Union[List[IPv4Address], List[IPv6Address], List[_BaseAddress]]: """Backend for addresses_by_version that uses the raw value.""" if version_value == _IPVersion_All_value: return [*self._ipv4_addresses, *self._ipv6_addresses] @@ -366,31 +366,31 @@ def get_name(self) -> str: def _get_ip_addresses_from_cache_lifo( self, zc: 'Zeroconf', now: float, type: int - ) -> List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]]: + ) -> List[Union[IPv4Address, IPv6Address]]: """Set IPv6 addresses from the cache.""" - address_list: List[Union[ipaddress.IPv4Address, ipaddress.IPv6Address]] = [] + address_list: List[Union[IPv4Address, IPv6Address]] = [] for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue try: - ip_address = _cached_ip_addresses(record.address) + ip_addr = _cached_ip_addresses(record.address) except ValueError: continue else: - address_list.append(ip_address) + address_list.append(ip_addr) address_list.reverse() # Reverse to get LIFO order return address_list def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: """Set IPv6 addresses from the cache.""" self._ipv6_addresses = cast( - "List[ipaddress.IPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) + "List[IPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) ) def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: """Set IPv4 addresses from the cache.""" self._ipv4_addresses = cast( - "List[ipaddress.IPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) + "List[IPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) ) def update_record(self, zc: 'Zeroconf', now: float, record: Optional[DNSRecord]) -> None: @@ -431,46 +431,49 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo if record.is_expired(now): return False - if record.key == self.server_key and isinstance(record, DNSAddress): + record_key = record.key + if record_key == self.server_key and type(record) is DNSAddress: try: ip_addr = _cached_ip_addresses(record.address) except ValueError as ex: log.warning("Encountered invalid address while processing %s: %s", record, ex) return False - if ip_addr.version == 4: - if not self._ipv4_addresses: + if type(ip_addr) is IPv4Address: + if self._ipv4_addresses: self._set_ipv4_addresses_from_cache(zc, now) - if ip_addr not in self._ipv4_addresses: - self._ipv4_addresses.insert(0, ip_addr) + ipv4_addresses = self._ipv4_addresses + if ip_addr not in ipv4_addresses: + ipv4_addresses.insert(0, ip_addr) return True - elif ip_addr != self._ipv4_addresses[0]: - self._ipv4_addresses.remove(ip_addr) - self._ipv4_addresses.insert(0, ip_addr) + elif ip_addr != ipv4_addresses[0]: + ipv4_addresses.remove(ip_addr) + ipv4_addresses.insert(0, ip_addr) return False if not self._ipv6_addresses: self._set_ipv6_addresses_from_cache(zc, now) + ipv6_addresses = self._ipv6_addresses if ip_addr not in self._ipv6_addresses: - self._ipv6_addresses.insert(0, ip_addr) + ipv6_addresses.insert(0, ip_addr) return True elif ip_addr != self._ipv6_addresses[0]: - self._ipv6_addresses.remove(ip_addr) - self._ipv6_addresses.insert(0, ip_addr) + ipv6_addresses.remove(ip_addr) + ipv6_addresses.insert(0, ip_addr) return False - if record.key != self.key: + if record_key != self.key: return False - if record.type == _TYPE_TXT and isinstance(record, DNSText): + if record.type == _TYPE_TXT and type(record) is DNSText: self._set_text(record.text) return True - if record.type == _TYPE_SRV and isinstance(record, DNSService): + if record.type == _TYPE_SRV and type(record) is DNSService: old_server_key = self.server_key self.name = record.name self.server = record.server @@ -495,16 +498,17 @@ def dns_addresses( name = self.server or self.name ttl = override_ttl if override_ttl is not None else self.host_ttl class_ = _CLASS_IN | _CLASS_UNIQUE + version_value = version.value return [ DNSAddress( name, - _TYPE_AAAA if address.version == 6 else _TYPE_A, + _TYPE_AAAA if type(ip_addr) is IPv6Address else _TYPE_A, class_, ttl, - address.packed, + ip_addr.packed, created=created, ) - for address in self._ip_addresses_by_version_value(version.value) + for ip_addr in self._ip_addresses_by_version_value(version_value) ] def dns_pointer(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSPointer: From 391c698c403dcf18debfb57d529d5f2bd4316c73 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 24 Jul 2023 16:13:08 +0000 Subject: [PATCH 005/434] 0.71.4 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fca089a1..78e89ecc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.71.4 (2023-07-24) + +### Fix + +* Cleanup naming from previous refactoring in ServiceInfo ([#1202](https://github.com/python-zeroconf/python-zeroconf/issues/1202)) ([`b272d75`](https://github.com/python-zeroconf/python-zeroconf/commit/b272d75abd982f3be1f4b20f683cac38011cc6f4)) + ## v0.71.3 (2023-07-23) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 4f44812c..a7624b94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.71.3" +version = "0.71.4" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 5a8119e7..f427c220 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.71.3' +__version__ = '0.71.4' __license__ = 'LGPL' From d92aad287ac6bd6394ebf955fe5d1d2b4b8490e3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 29 Jul 2023 08:35:51 -0500 Subject: [PATCH 006/434] chore: add test for concurrent waiting on AsyncServiceInfo (#1204) --- tests/services/test_info.py | 73 ++++++++++++++++++++++++++++++++++++ tests/services/test_types.py | 4 -- 2 files changed, 73 insertions(+), 4 deletions(-) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 13f48392..64a51bd1 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -18,6 +18,7 @@ import zeroconf as r from zeroconf import DNSAddress, const +from zeroconf._services import info from zeroconf._services.info import ServiceInfo from zeroconf._utils.net import IPVersion from zeroconf.asyncio import AsyncZeroconf @@ -1427,3 +1428,75 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x02'] await aiozc.async_close() + + +@pytest.mark.asyncio +@patch.object(info, "_LISTENER_TIME", 10000000) +async def test_release_wait_when_new_recorded_added_concurrency(): + """Test that concurrent async_request returns as soon as new matching records are added to the cache.""" + type_ = "_http._tcp.local." + registration_name = "multiareccon.%s" % type_ + desc = {'path': '/~paulsm/'} + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + host = "multahostcon.local." + await aiozc.zeroconf.async_wait_for_start() + + # New kwarg way + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) + tasks = [asyncio.create_task(info.async_request(aiozc.zeroconf, timeout=200000)) for _ in range(10)] + await asyncio.sleep(0.1) + for task in tasks: + assert not task.done() + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + generated.add_answer_at_time( + r.DNSNsec( + registration_name, + const._TYPE_NSEC, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + registration_name, + [const._TYPE_AAAA], + ), + 0, + ) + generated.add_answer_at_time( + r.DNSService( + registration_name, + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + 10000, + 0, + 0, + 80, + host, + ), + 0, + ) + generated.add_answer_at_time( + r.DNSAddress( + host, + const._TYPE_A, + const._CLASS_IN, + 10000, + b'\x7f\x00\x00\x01', + ), + 0, + ) + generated.add_answer_at_time( + r.DNSText( + registration_name, + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 10000, + b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + ), + 0, + ) + await asyncio.sleep(0) + for task in tasks: + assert not task.done() + aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + _, pending = await asyncio.wait(tasks, timeout=2) + assert not pending + assert info.addresses == [b'\x7f\x00\x00\x01'] + await aiozc.async_close() diff --git a/tests/services/test_types.py b/tests/services/test_types.py index a8b36b8e..1afe6d53 100644 --- a/tests/services/test_types.py +++ b/tests/services/test_types.py @@ -48,7 +48,6 @@ def test_integration_with_listener(disable_duplicate_packet_suppression): ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) @@ -81,7 +80,6 @@ def test_integration_with_listener_v6_records(disable_duplicate_packet_suppressi ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) @@ -114,7 +112,6 @@ def test_integration_with_listener_ipv6(disable_duplicate_packet_suppression): ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(ip_version=r.IPVersion.V6Only, timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) @@ -147,7 +144,6 @@ def test_integration_with_subtype_and_listener(disable_duplicate_packet_suppress ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) assert discovery_type in service_types _clear_cache(zeroconf_registrar) From 8019a73c952f2fc4c88d849aab970fafedb316d8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 1 Aug 2023 14:35:42 -1000 Subject: [PATCH 007/434] fix: improve performance of ServiceInfo.async_request (#1205) --- src/zeroconf/_services/info.py | 41 ++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 17 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index d3e6f082..cc1db05f 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -39,11 +39,7 @@ from .._logger import log from .._protocol.outgoing import DNSOutgoing from .._updates import RecordUpdate, RecordUpdateListener -from .._utils.asyncio import ( - get_running_loop, - run_coro_with_timeout, - wait_event_or_timeout, -) +from .._utils.asyncio import get_running_loop, run_coro_with_timeout from .._utils.name import service_type_name from .._utils.net import IPVersion, _encode_address from .._utils.time import current_time_millis, millis_to_seconds @@ -131,6 +127,7 @@ class ServiceInfo(RecordUpdateListener): "host_ttl", "other_ttl", "interface_index", + "_new_records_futures", ) def __init__( @@ -177,7 +174,7 @@ def __init__( self.host_ttl = host_ttl self.other_ttl = other_ttl self.interface_index = interface_index - self._notify_event: Optional[asyncio.Event] = None + self._new_records_futures: List[asyncio.Future] = [] @property def name(self) -> str: @@ -235,9 +232,14 @@ def properties(self) -> Dict: async def async_wait(self, timeout: float) -> None: """Calling task waits for a given number of milliseconds or until notified.""" - if self._notify_event is None: - self._notify_event = asyncio.Event() - await wait_event_or_timeout(self._notify_event, timeout=millis_to_seconds(timeout)) + loop = asyncio.get_running_loop() + future = loop.create_future() + self._new_records_futures.append(future) + handle = loop.call_later(millis_to_seconds(timeout), future.set_result, None) + try: + await future + finally: + handle.cancel() def addresses_by_version(self, version: IPVersion) -> List[bytes]: """List addresses matching IP version. @@ -409,9 +411,11 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU This method will be run in the event loop. """ - if self._process_records_threadsafe(zc, now, records) and self._notify_event: - self._notify_event.set() - self._notify_event.clear() + if self._process_records_threadsafe(zc, now, records) and self._new_records_futures: + for future in self._new_records_futures: + if not future.done(): + future.set_result(None) + self._new_records_futures.clear() def _process_records_threadsafe(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> bool: """Thread safe record updating. @@ -591,12 +595,13 @@ def set_server_if_missing(self) -> None: self.server = self.name self.server_key = self.server.lower() - def load_from_cache(self, zc: 'Zeroconf') -> bool: + def load_from_cache(self, zc: 'Zeroconf', now: Optional[float] = None) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. """ - now = current_time_millis() + if not now: + now = current_time_millis() original_server_key = self.server_key cached_srv_record = zc.cache.get_by_details(self.name, _TYPE_SRV, _CLASS_IN) if cached_srv_record: @@ -664,11 +669,13 @@ async def async_request( """ if not zc.started: await zc.async_wait_for_start() - if self.load_from_cache(zc): + + now = current_time_millis() + + if self.load_from_cache(zc, now): return True first_request = True - now = current_time_millis() delay = _LISTENER_TIME next_ = now last = now + timeout @@ -683,7 +690,7 @@ async def async_request( ) first_request = False if not out.questions: - return self.load_from_cache(zc) + return self.load_from_cache(zc, now) zc.async_send(out, addr, port) next_ = now + delay delay *= 2 From 1310f122bca6b283c7b3ac1d1cbcac5dcafb2adb Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 2 Aug 2023 00:43:33 +0000 Subject: [PATCH 008/434] 0.71.5 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78e89ecc..f0b29708 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.71.5 (2023-08-02) + +### Fix + +* Improve performance of ServiceInfo.async_request ([#1205](https://github.com/python-zeroconf/python-zeroconf/issues/1205)) ([`8019a73`](https://github.com/python-zeroconf/python-zeroconf/commit/8019a73c952f2fc4c88d849aab970fafedb316d8)) + ## v0.71.4 (2023-07-24) ### Fix diff --git a/pyproject.toml b/pyproject.toml index a7624b94..6ce539a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.71.4" +version = "0.71.5" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index f427c220..6cc3d2af 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.71.4' +__version__ = '0.71.5' __license__ = 'LGPL' From 126849c92be8cec9253fba9faa591029d992fcc3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 1 Aug 2023 18:49:18 -1000 Subject: [PATCH 009/434] feat: speed up processing incoming records (#1206) --- src/zeroconf/_dns.pxd | 26 +++++++++++++++++++++----- src/zeroconf/_dns.py | 20 +++++++++++++------- 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index cd4f1f9e..5908ff1b 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -1,6 +1,8 @@ import cython +from ._protocol.incoming cimport DNSIncoming + cdef object _LEN_BYTE cdef object _LEN_SHORT @@ -9,9 +11,9 @@ cdef object _LEN_INT cdef object _NAME_COMPRESSION_MIN_SIZE cdef object _BASE_MAX_SIZE -cdef object _EXPIRE_FULL_TIME_MS -cdef object _EXPIRE_STALE_TIME_MS -cdef object _RECENT_TIME_MS +cdef cython.uint _EXPIRE_FULL_TIME_MS +cdef cython.uint _EXPIRE_STALE_TIME_MS +cdef cython.uint _RECENT_TIME_MS cdef object _CLASS_UNIQUE cdef object _CLASS_MASK @@ -34,11 +36,25 @@ cdef class DNSQuestion(DNSEntry): cdef class DNSRecord(DNSEntry): - cdef public object ttl - cdef public object created + cdef public cython.float ttl + cdef public cython.float created cdef _suppressed_by_answer(self, DNSRecord answer) + @cython.locals( + answers=cython.list, + ) + cpdef suppressed_by(self, DNSIncoming msg) + + cpdef get_expiration_time(self, cython.uint percent) + + cpdef is_expired(self, cython.float now) + + cpdef is_stale(self, cython.float now) + + cpdef is_recent(self, cython.float now) + + cpdef reset_ttl(self, DNSRecord other) cdef class DNSAddress(DNSRecord): diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 34d7fdb2..561b16ff 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -40,6 +40,8 @@ _EXPIRE_STALE_TIME_MS = 500 _RECENT_TIME_MS = 250 +_float = float +_int = int if TYPE_CHECKING: from ._protocol.incoming import DNSIncoming @@ -172,32 +174,36 @@ def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use def suppressed_by(self, msg: 'DNSIncoming') -> bool: """Returns true if any answer in a message can suffice for the information held in this record.""" - return any(self._suppressed_by_answer(record) for record in msg.answers) + answers = msg.answers + for record in answers: + if self._suppressed_by_answer(record): + return True + return False - def _suppressed_by_answer(self, other) -> bool: # type: ignore[no-untyped-def] + def _suppressed_by_answer(self, other: 'DNSRecord') -> bool: """Returns true if another record has same name, type and class, and if its TTL is at least half of this record's.""" return self == other and other.ttl > (self.ttl / 2) - def get_expiration_time(self, percent: int) -> float: + def get_expiration_time(self, percent: _int) -> float: """Returns the time at which this record will have expired by a certain percentage.""" return self.created + (percent * self.ttl * 10) # TODO: Switch to just int here - def get_remaining_ttl(self, now: float) -> Union[int, float]: + def get_remaining_ttl(self, now: _float) -> Union[int, float]: """Returns the remaining TTL in seconds.""" return max(0, millis_to_seconds((self.created + (_EXPIRE_FULL_TIME_MS * self.ttl)) - now)) - def is_expired(self, now: float) -> bool: + def is_expired(self, now: _float) -> bool: """Returns true if this record has expired.""" return self.created + (_EXPIRE_FULL_TIME_MS * self.ttl) <= now - def is_stale(self, now: float) -> bool: + def is_stale(self, now: _float) -> bool: """Returns true if this record is at least half way expired.""" return self.created + (_EXPIRE_STALE_TIME_MS * self.ttl) <= now - def is_recent(self, now: float) -> bool: + def is_recent(self, now: _float) -> bool: """Returns true if the record more than one quarter of its TTL remaining.""" return self.created + (_RECENT_TIME_MS * self.ttl) > now From 063b5d9c8ff9daa4ddd1a9d4f2f3d2a5b2c652c4 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 2 Aug 2023 04:57:16 +0000 Subject: [PATCH 010/434] 0.72.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0b29708..f4549b7f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.72.0 (2023-08-02) + +### Feature + +* Speed up processing incoming records ([#1206](https://github.com/python-zeroconf/python-zeroconf/issues/1206)) ([`126849c`](https://github.com/python-zeroconf/python-zeroconf/commit/126849c92be8cec9253fba9faa591029d992fcc3)) + ## v0.71.5 (2023-08-02) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 6ce539a6..1f54db8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.71.5" +version = "0.72.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 6cc3d2af..d4562bae 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.71.5' +__version__ = '0.72.0' __license__ = 'LGPL' From 2233b6bc4ceeee5524d2ee88ecae8234173feb5f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 2 Aug 2023 21:36:52 -1000 Subject: [PATCH 011/434] fix: race with InvalidStateError when async_request times out (#1208) --- src/zeroconf/_services/info.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index cc1db05f..b75d6277 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -89,6 +89,12 @@ def instance_name_from_service_info(info: "ServiceInfo") -> str: _cached_ip_addresses = lru_cache(maxsize=256)(ip_address) +def _set_future_none_if_not_done(fut: asyncio.Future) -> None: + """Set a future to None if it is not done.""" + if not fut.done(): # pragma: no branch + fut.set_result(None) + + class ServiceInfo(RecordUpdateListener): """Service information. @@ -235,7 +241,7 @@ async def async_wait(self, timeout: float) -> None: loop = asyncio.get_running_loop() future = loop.create_future() self._new_records_futures.append(future) - handle = loop.call_later(millis_to_seconds(timeout), future.set_result, None) + handle = loop.call_later(millis_to_seconds(timeout), _set_future_none_if_not_done, future) try: await future finally: From ffe8fd5ecb6abd11362fea865e81ae987c86f7e7 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 3 Aug 2023 07:44:37 +0000 Subject: [PATCH 012/434] 0.72.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f4549b7f..fae7ab17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.72.1 (2023-08-03) + +### Fix + +* Race with InvalidStateError when async_request times out ([#1208](https://github.com/python-zeroconf/python-zeroconf/issues/1208)) ([`2233b6b`](https://github.com/python-zeroconf/python-zeroconf/commit/2233b6bc4ceeee5524d2ee88ecae8234173feb5f)) + ## v0.72.0 (2023-08-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 1f54db8e..edde1594 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.72.0" +version = "0.72.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d4562bae..81b6ebd4 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.72.0' +__version__ = '0.72.1' __license__ = 'LGPL' From 5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 2 Aug 2023 22:16:49 -1000 Subject: [PATCH 013/434] fix: revert DNSIncoming cimport in _dns.pxd (#1209) --- src/zeroconf/_dns.pxd | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 5908ff1b..289cd1a1 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -1,8 +1,6 @@ import cython -from ._protocol.incoming cimport DNSIncoming - cdef object _LEN_BYTE cdef object _LEN_SHORT @@ -44,7 +42,7 @@ cdef class DNSRecord(DNSEntry): @cython.locals( answers=cython.list, ) - cpdef suppressed_by(self, DNSIncoming msg) + cpdef suppressed_by(self, object msg) cpdef get_expiration_time(self, cython.uint percent) From 07cf846cc9d3d9eba32be961df04099e2ba4d9cd Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 3 Aug 2023 08:25:05 +0000 Subject: [PATCH 014/434] 0.72.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fae7ab17..3bf73cb5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.72.2 (2023-08-03) + +### Fix + +* Revert DNSIncoming cimport in _dns.pxd ([#1209](https://github.com/python-zeroconf/python-zeroconf/issues/1209)) ([`5f14b6d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1)) + ## v0.72.1 (2023-08-03) ### Fix diff --git a/pyproject.toml b/pyproject.toml index edde1594..498e1c2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.72.1" +version = "0.72.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 81b6ebd4..5b819741 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.72.1' +__version__ = '0.72.2' __license__ = 'LGPL' From 3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 2 Aug 2023 23:16:39 -1000 Subject: [PATCH 015/434] fix: revert adding typing to DNSRecord.suppressed_by (#1210) --- src/zeroconf/_dns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 561b16ff..f26d02b7 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -180,7 +180,7 @@ def suppressed_by(self, msg: 'DNSIncoming') -> bool: return True return False - def _suppressed_by_answer(self, other: 'DNSRecord') -> bool: + def _suppressed_by_answer(self, other) -> bool: # type: ignore[no-untyped-def] """Returns true if another record has same name, type and class, and if its TTL is at least half of this record's.""" return self == other and other.ttl > (self.ttl / 2) From cbca88cfa6a225bcd193b2144f1d59564adc22ce Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 3 Aug 2023 09:27:33 +0000 Subject: [PATCH 016/434] 0.72.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3bf73cb5..b3951797 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.72.3 (2023-08-03) + +### Fix + +* Revert adding typing to DNSRecord.suppressed_by ([#1210](https://github.com/python-zeroconf/python-zeroconf/issues/1210)) ([`3dba5ae`](https://github.com/python-zeroconf/python-zeroconf/commit/3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a)) + ## v0.72.2 (2023-08-03) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 498e1c2f..fcc3757a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.72.2" +version = "0.72.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 5b819741..77aa1796 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.72.2' +__version__ = '0.72.3' __license__ = 'LGPL' From 53a694f60e675ae0560e727be6b721b401c2b68f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 3 Aug 2023 13:41:35 -1000 Subject: [PATCH 017/434] feat: add a cache to service_type_name (#1211) --- src/zeroconf/_utils/name.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/zeroconf/_utils/name.py b/src/zeroconf/_utils/name.py index 7fa667a1..adccb3e5 100644 --- a/src/zeroconf/_utils/name.py +++ b/src/zeroconf/_utils/name.py @@ -35,6 +35,7 @@ ) +@lru_cache(maxsize=512) def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: disable=too-many-branches """ Validate a fully qualified service name, instance or subtype. [rfc6763] From 0114836a16f88456081836750e08735b443d83b3 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 3 Aug 2023 23:49:31 +0000 Subject: [PATCH 018/434] 0.73.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b3951797..93b8cd40 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.73.0 (2023-08-03) + +### Feature + +* Add a cache to service_type_name ([#1211](https://github.com/python-zeroconf/python-zeroconf/issues/1211)) ([`53a694f`](https://github.com/python-zeroconf/python-zeroconf/commit/53a694f60e675ae0560e727be6b721b401c2b68f)) + ## v0.72.3 (2023-08-03) ### Fix diff --git a/pyproject.toml b/pyproject.toml index fcc3757a..52baeee0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.72.3" +version = "0.73.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 77aa1796..e5471f85 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.72.3' +__version__ = '0.73.0' __license__ = 'LGPL' From 32a016e0bcaa116d9f98396dc83f73ae27d3e555 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 3 Aug 2023 14:19:40 -1000 Subject: [PATCH 019/434] chore: fix some legacy python2 formatting in examples (#1214) --- examples/async_apple_scanner.py | 2 +- examples/async_browser.py | 2 +- examples/async_service_info_request.py | 6 +++--- examples/browser.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index 88b54e4a..ff558f82 100644 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -59,7 +59,7 @@ async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: if info.properties: print(" Properties are:") for key, value in info.properties.items(): - print(f" {key}: {value}") + print(f" {key!r}: {value!r}") else: print(" No properties") else: diff --git a/examples/async_browser.py b/examples/async_browser.py index 71c5e670..f7fb7151 100644 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -41,7 +41,7 @@ async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name if info.properties: print(" Properties are:") for key, value in info.properties.items(): - print(f" {key}: {value}") + print(f" {key!r}: {value!r}") else: print(" No properties") else: diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py index 5276c122..5bb24761 100644 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -9,7 +9,7 @@ import argparse import asyncio import logging -from typing import Any, Optional, cast +from typing import Any, List, Optional, cast from zeroconf import IPVersion, ServiceBrowser, ServiceStateChange, Zeroconf from zeroconf.asyncio import AsyncServiceInfo, AsyncZeroconf @@ -21,7 +21,7 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: zeroconf = aiozc.zeroconf while True: await asyncio.sleep(5) - infos = [] + infos: List[AsyncServiceInfo] = [] for name in zeroconf.cache.names(): if not name.endswith(HAP_TYPE): continue @@ -38,7 +38,7 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: if info.properties: print(" Properties are:") for key, value in info.properties.items(): - print(f" {key}: {value}") + print(f" {key!r}: {value!r}") else: print(" No properties") else: diff --git a/examples/browser.py b/examples/browser.py index fc815e3f..60933e2a 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -36,7 +36,7 @@ def on_service_state_change( if info.properties: print(" Properties are:") for key, value in info.properties.items(): - print(f" {key}: {value}") + print(f" {key!r}: {value!r}") else: print(" No properties") else: From 99a6f98e44a1287ba537eabb852b1b69923402f0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 3 Aug 2023 14:36:40 -1000 Subject: [PATCH 020/434] feat: speed up unpacking text records in ServiceInfo (#1212) --- src/zeroconf/_services/info.py | 36 +++++++++++++++++++++++++--------- 1 file changed, 27 insertions(+), 9 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index b75d6277..02b7137a 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -172,7 +172,7 @@ def __init__( self.priority = priority self.server = server if server else None self.server_key = server.lower() if server else None - self._properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]] = {} + self._properties: Optional[Dict[Union[str, bytes], Optional[Union[str, bytes]]]] = None if isinstance(properties, bytes): self._set_text(properties) else: @@ -226,7 +226,7 @@ def addresses(self, value: List[bytes]) -> None: self._ipv6_addresses.append(addr) @property - def properties(self) -> Dict: + def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: """If properties were set in the constructor this property returns the original dictionary of type `Dict[Union[bytes, str], Any]`. @@ -234,6 +234,10 @@ def properties(self) -> Dict: bytes and the values are either bytes, if there was a value, even empty, or `None`, if there was none. No further decoding is attempted. The type returned is `Dict[bytes, Optional[bytes]]`. """ + if self._properties is None: + self._unpack_text_into_properties() + if TYPE_CHECKING: + assert self._properties is not None return self._properties async def async_wait(self, timeout: float) -> None: @@ -317,10 +321,10 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st for addr in self._ip_addresses_by_version_value(version.value) ] - def _set_properties(self, properties: Dict) -> None: + def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: """Sets properties and text of this info from a dictionary""" self._properties = properties - list_ = [] + list_: List[bytes] = [] result = b'' for key, value in properties.items(): if isinstance(key, str): @@ -338,14 +342,25 @@ def _set_properties(self, properties: Dict) -> None: def _set_text(self, text: bytes) -> None: """Sets properties and text given a text field""" + if text == self.text: + return self.text = text + # Clear the properties cache + self._properties = None + + def _unpack_text_into_properties(self) -> None: + """Unpacks the text field into properties""" + text = self.text end = len(text) if end == 0: + # Properties should be set atomically + # in case another thread is reading them self._properties = {} return + result: Dict[Union[str, bytes], Optional[Union[str, bytes]]] = {} index = 0 - strs = [] + strs: List[bytes] = [] while index < end: length = text[index] index += 1 @@ -355,17 +370,20 @@ def _set_text(self, text: bytes) -> None: key: bytes value: Optional[bytes] for s in strs: - try: - key, value = s.split(b'=', 1) - except ValueError: + key_value = s.split(b'=', 1) + if len(key_value) == 2: + key, value = key_value + else: # No equals sign at all key = s value = None # Only update non-existent properties - if key and result.get(key) is None: + if key and key not in result: result[key] = value + # Properties should be set atomically + # in case another thread is reading them self._properties = result def get_name(self) -> str: From 0094e2684344c6b7edd7948924f093f1b4c19901 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 3 Aug 2023 14:36:50 -1000 Subject: [PATCH 021/434] fix: remove typing on reset_ttl for cython compat (#1213) --- src/zeroconf/_dns.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index f26d02b7..6c34f9dd 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -207,7 +207,7 @@ def is_recent(self, now: _float) -> bool: """Returns true if the record more than one quarter of its TTL remaining.""" return self.created + (_RECENT_TIME_MS * self.ttl) > now - def reset_ttl(self, other: 'DNSRecord') -> None: + def reset_ttl(self, other) -> None: # type: ignore[no-untyped-def] """Sets this record's TTL and created time to that of another record.""" self.set_created_ttl(other.created, other.ttl) From 8a9dc0bf41bfb350e82aaf07a432bf414f6bce87 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 4 Aug 2023 00:45:26 +0000 Subject: [PATCH 022/434] 0.74.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 10 ++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 93b8cd40..ae8d8368 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ +## v0.74.0 (2023-08-04) + +### Feature + +* Speed up unpacking text records in ServiceInfo ([#1212](https://github.com/python-zeroconf/python-zeroconf/issues/1212)) ([`99a6f98`](https://github.com/python-zeroconf/python-zeroconf/commit/99a6f98e44a1287ba537eabb852b1b69923402f0)) + +### Fix + +* Remove typing on reset_ttl for cython compat ([#1213](https://github.com/python-zeroconf/python-zeroconf/issues/1213)) ([`0094e26`](https://github.com/python-zeroconf/python-zeroconf/commit/0094e2684344c6b7edd7948924f093f1b4c19901)) + ## v0.73.0 (2023-08-03) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 52baeee0..40d0341a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.73.0" +version = "0.74.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index e5471f85..4de632fa 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.73.0' +__version__ = '0.74.0' __license__ = 'LGPL' From aff625dc6a5e816dad519644c4adac4f96980c04 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 13 Aug 2023 16:03:28 -0500 Subject: [PATCH 023/434] feat: speed up processing incoming records (#1216) --- src/zeroconf/_dns.pxd | 4 ++++ src/zeroconf/_dns.py | 7 ++++--- src/zeroconf/_handlers.py | 15 +++++++++------ 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 289cd1a1..5622a5ed 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -44,6 +44,8 @@ cdef class DNSRecord(DNSEntry): ) cpdef suppressed_by(self, object msg) + cpdef get_remaining_ttl(self, cython.float now) + cpdef get_expiration_time(self, cython.uint percent) cpdef is_expired(self, cython.float now) @@ -54,6 +56,8 @@ cdef class DNSRecord(DNSEntry): cpdef reset_ttl(self, DNSRecord other) + cpdef set_created_ttl(self, cython.float now, cython.float ttl) + cdef class DNSAddress(DNSRecord): cdef public cython.int _hash diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 6c34f9dd..73b0c751 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -26,7 +26,7 @@ from ._exceptions import AbstractMethodException from ._utils.net import _is_v6_address -from ._utils.time import current_time_millis, millis_to_seconds +from ._utils.time import current_time_millis from .const import _CLASS_MASK, _CLASS_UNIQUE, _CLASSES, _TYPE_ANY, _TYPES _LEN_BYTE = 1 @@ -193,7 +193,8 @@ def get_expiration_time(self, percent: _int) -> float: # TODO: Switch to just int here def get_remaining_ttl(self, now: _float) -> Union[int, float]: """Returns the remaining TTL in seconds.""" - return max(0, millis_to_seconds((self.created + (_EXPIRE_FULL_TIME_MS * self.ttl)) - now)) + remain = (self.created + (_EXPIRE_FULL_TIME_MS * self.ttl) - now) / 1000.0 + return 0 if remain < 0 else remain def is_expired(self, now: _float) -> bool: """Returns true if this record has expired.""" @@ -212,7 +213,7 @@ def reset_ttl(self, other) -> None: # type: ignore[no-untyped-def] another record.""" self.set_created_ttl(other.created, other.ttl) - def set_created_ttl(self, created: float, ttl: Union[float, int]) -> None: + def set_created_ttl(self, created: _float, ttl: Union[float, int]) -> None: """Set the created and ttl of a record.""" self.created = created self.ttl = ttl diff --git a/src/zeroconf/_handlers.py b/src/zeroconf/_handlers.py index 192496c1..be0d619f 100644 --- a/src/zeroconf/_handlers.py +++ b/src/zeroconf/_handlers.py @@ -408,6 +408,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes: Set[DNSRecord] = set() now = msg.now unique_types: Set[Tuple[str, int, int]] = set() + cache = self.cache for record in msg.answers: # Protect zeroconf from records that can cause denial of service. @@ -416,7 +417,9 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # ServiceBrowsers generating excessive queries refresh queries. # Apple uses a 15s minimum TTL, however we do not have the same # level of rate limit and safe guards so we use 1/4 of the recommended value. - if record.ttl and record.type == _TYPE_PTR and record.ttl < _DNS_PTR_MIN_TTL: + record_type = record.type + record_ttl = record.ttl + if record_ttl and record_type == _TYPE_PTR and record_ttl < _DNS_PTR_MIN_TTL: log.debug( "Increasing effective ttl of %s to minimum of %s to protect against excessive refreshes.", record, @@ -425,12 +428,12 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 - unique_types.add((record.name, record.type, record.class_)) + unique_types.add((record.name, record_type, record.class_)) if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) - maybe_entry = self.cache.async_get_unique(record) + maybe_entry = cache.async_get_unique(record) if not record.is_expired(now): if maybe_entry is not None: maybe_entry.reset_ttl(record) @@ -447,7 +450,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - self.cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, msg.answers, now) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, msg.answers, now) if updates: self.async_updates(now, updates) @@ -468,12 +471,12 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # processsed. new = False if other_adds or address_adds: - new = self.cache.async_add_records(itertools.chain(address_adds, other_adds)) + new = cache.async_add_records(itertools.chain(address_adds, other_adds)) # Removes are processed last since # ServiceInfo could generate an un-needed query # because the data was not yet populated. if removes: - self.cache.async_remove_records(removes) + cache.async_remove_records(removes) if updates: self.async_updates_complete(new) From 5df8a57a14d59687a3c22ea8ee063e265031e278 Mon Sep 17 00:00:00 2001 From: Eugenio Panadero Date: Sun, 13 Aug 2023 23:03:39 +0200 Subject: [PATCH 024/434] feat: expose flag to disable strict name checking in service registration (#1215) --- src/zeroconf/_core.py | 16 +++++++++++----- src/zeroconf/_services/info.py | 4 ++-- src/zeroconf/asyncio.py | 3 ++- tests/test_asyncio.py | 35 ++++++++++++++++++++++++++++++++++ tests/utils/test_name.py | 29 ++++++++++++++++++++++++++++ 5 files changed, 79 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index ab8e72e5..6a9c2c8a 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -620,6 +620,7 @@ def register_service( ttl: Optional[int] = None, allow_name_change: bool = False, cooperating_responders: bool = False, + strict: bool = True, ) -> None: """Registers service information to the network with a default TTL. Zeroconf will then respond to requests for information for that @@ -635,7 +636,7 @@ def register_service( assert self.loop is not None run_coro_with_timeout( await_awaitable( - self.async_register_service(info, ttl, allow_name_change, cooperating_responders) + self.async_register_service(info, ttl, allow_name_change, cooperating_responders, strict) ), self.loop, _REGISTER_TIME * _REGISTER_BROADCASTS, @@ -647,6 +648,7 @@ async def async_register_service( ttl: Optional[int] = None, allow_name_change: bool = False, cooperating_responders: bool = False, + strict: bool = True, ) -> Awaitable: """Registers service information to the network with a default TTL. Zeroconf will then respond to requests for information for that @@ -662,7 +664,7 @@ async def async_register_service( info.set_server_if_missing() await self.async_wait_for_start() - await self.async_check_service(info, allow_name_change, cooperating_responders) + await self.async_check_service(info, allow_name_change, cooperating_responders, strict) self.registry.async_add(info) return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) @@ -810,11 +812,15 @@ def unregister_all_services(self) -> None: ) async def async_check_service( - self, info: ServiceInfo, allow_name_change: bool, cooperating_responders: bool = False + self, + info: ServiceInfo, + allow_name_change: bool, + cooperating_responders: bool = False, + strict: bool = True, ) -> None: """Checks the network for a unique service name, modifying the ServiceInfo passed in if it is not unique.""" - instance_name = instance_name_from_service_info(info) + instance_name = instance_name_from_service_info(info, strict=strict) if cooperating_responders: return next_instance_number = 2 @@ -829,7 +835,7 @@ async def async_check_service( # change the name and look for a conflict info.name = f'{instance_name}-{next_instance_number}.{info.type}' next_instance_number += 1 - service_type_name(info.name) + service_type_name(info.name, strict=strict) next_time = now i = 0 diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 02b7137a..29ddb9a0 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -76,11 +76,11 @@ from .._core import Zeroconf -def instance_name_from_service_info(info: "ServiceInfo") -> str: +def instance_name_from_service_info(info: "ServiceInfo", strict: bool = True) -> str: """Calculate the instance name from the ServiceInfo.""" # This is kind of funky because of the subtype based tests # need to make subtypes a first class citizen - service_name = service_type_name(info.name) + service_name = service_type_name(info.name, strict=strict) if not info.type.endswith(service_name): raise BadTypeInNameException return info.name[: -len(service_name) - 1] diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 7ded0ecb..755757d7 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -180,6 +180,7 @@ async def async_register_service( ttl: Optional[int] = None, allow_name_change: bool = False, cooperating_responders: bool = False, + strict: bool = True, ) -> Awaitable: """Registers service information to the network with a default TTL. Zeroconf will then respond to requests for information for that @@ -192,7 +193,7 @@ async def async_register_service( and therefore can be awaited if necessary. """ return await self.zeroconf.async_register_service( - info, ttl, allow_name_change, cooperating_responders + info, ttl, allow_name_change, cooperating_responders, strict ) async def async_unregister_all_services(self) -> None: diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 66c81e00..cd067ae1 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -456,6 +456,41 @@ async def test_async_service_registration_name_does_not_match_type() -> None: await aiozc.async_close() +@pytest.mark.asyncio +async def test_async_service_registration_name_strict_check() -> None: + """Test registering services throws when the name does not comply.""" + zc = Zeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + type_ = "_ibisip_http._tcp.local." + name = "CustomerInformationService-F4D4895E9EEB" + registration_name = f"{name}.{type_}" + + desc = {'path': '/~paulsm/'} + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], + ) + with pytest.raises(BadTypeInNameException): + await zc.async_check_service(info, allow_name_change=False) + + with pytest.raises(BadTypeInNameException): + task = await aiozc.async_register_service(info) + await task + + await zc.async_check_service(info, allow_name_change=False, strict=False) + task = await aiozc.async_register_service(info, strict=False) + await task + + await aiozc.async_unregister_service(info) + await aiozc.async_close() + + @pytest.mark.asyncio async def test_async_tasks() -> None: """Test awaiting broadcast tasks""" diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index 3df73f5a..9604b775 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -2,10 +2,12 @@ """Unit tests for zeroconf._utils.name.""" +import socket import pytest from zeroconf import BadTypeInNameException +from zeroconf._services.info import ServiceInfo, instance_name_from_service_info from zeroconf._utils import name as nameutils @@ -25,6 +27,33 @@ def test_service_type_name_overlong_full_name(): nameutils.service_type_name(f"{long_name}._tivo-videostream._tcp.local.", strict=False) +@pytest.mark.parametrize( + "instance_name, service_type", + ( + ("CustomerInformationService-F4D4885E9EEB", "_ibisip_http._tcp.local."), + ("DeviceManagementService_F4D4885E9EEB", "_ibisip_http._tcp.local."), + ), +) +def test_service_type_name_non_strict_compliant_names(instance_name, service_type): + """Test service_type_name for valid names, but not strict-compliant.""" + desc = {'path': '/~paulsm/'} + service_name = f'{instance_name}.{service_type}' + service_server = 'ash-1.local.' + service_address = socket.inet_aton("10.0.1.2") + info = ServiceInfo( + service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + ) + assert info.get_name() == instance_name + + with pytest.raises(BadTypeInNameException): + nameutils.service_type_name(service_name) + with pytest.raises(BadTypeInNameException): + instance_name_from_service_info(info) + + nameutils.service_type_name(service_name, strict=False) + assert instance_name_from_service_info(info, strict=False) == instance_name + + def test_possible_types(): """Test possible types from name.""" assert nameutils.possible_types('.') == set() From 844c5544b85ca77303defa68057221273719bebe Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 13 Aug 2023 21:14:18 +0000 Subject: [PATCH 025/434] 0.75.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ae8d8368..5cc53d45 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.75.0 (2023-08-13) + +### Feature + +* Expose flag to disable strict name checking in service registration ([#1215](https://github.com/python-zeroconf/python-zeroconf/issues/1215)) ([`5df8a57`](https://github.com/python-zeroconf/python-zeroconf/commit/5df8a57a14d59687a3c22ea8ee063e265031e278)) +* Speed up processing incoming records ([#1216](https://github.com/python-zeroconf/python-zeroconf/issues/1216)) ([`aff625d`](https://github.com/python-zeroconf/python-zeroconf/commit/aff625dc6a5e816dad519644c4adac4f96980c04)) + ## v0.74.0 (2023-08-04) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 40d0341a..e1963e2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.74.0" +version = "0.75.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 4de632fa..82b6096d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.74.0' +__version__ = '0.75.0' __license__ = 'LGPL' From 69b33be3b2f9d4a27ef5154cae94afca048efffa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 13 Aug 2023 20:40:46 -0500 Subject: [PATCH 026/434] feat: improve performance responding to queries (#1217) --- src/zeroconf/_services/info.py | 73 +++++++++++++++++++--------------- src/zeroconf/const.py | 1 + 2 files changed, 43 insertions(+), 31 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 29ddb9a0..2f4ae59e 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -46,7 +46,7 @@ from ..const import ( _ADDRESS_RECORD_TYPES, _CLASS_IN, - _CLASS_UNIQUE, + _CLASS_IN_UNIQUE, _DNS_HOST_TTL, _DNS_OTHER_TTL, _FLAGS_QR_QUERY, @@ -388,7 +388,7 @@ def _unpack_text_into_properties(self) -> None: def get_name(self) -> str: """Name accessor""" - return self.name[: len(self.name) - len(self.type) - 1] + return self._name[: len(self._name) - len(self.type) - 1] def _get_ip_addresses_from_cache_lifo( self, zc: 'Zeroconf', now: float, type: int @@ -409,15 +409,21 @@ def _get_ip_addresses_from_cache_lifo( def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: """Set IPv6 addresses from the cache.""" - self._ipv6_addresses = cast( - "List[IPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) - ) + if TYPE_CHECKING: + self._ipv6_addresses = cast( + "List[IPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) + ) + else: + self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: """Set IPv4 addresses from the cache.""" - self._ipv4_addresses = cast( - "List[IPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) - ) + if TYPE_CHECKING: + self._ipv4_addresses = cast( + "List[IPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) + ) + else: + self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) def update_record(self, zc: 'Zeroconf', now: float, record: Optional[DNSRecord]) -> None: """Updates service information from a DNS record. @@ -523,9 +529,9 @@ def dns_addresses( created: Optional[float] = None, ) -> List[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" - name = self.server or self.name + name = self.server or self._name ttl = override_ttl if override_ttl is not None else self.host_ttl - class_ = _CLASS_IN | _CLASS_UNIQUE + class_ = _CLASS_IN_UNIQUE version_value = version.value return [ DNSAddress( @@ -546,30 +552,33 @@ def dns_pointer(self, override_ttl: Optional[int] = None, created: Optional[floa _TYPE_PTR, _CLASS_IN, override_ttl if override_ttl is not None else self.other_ttl, - self.name, + self._name, created, ) def dns_service(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSService: """Return DNSService from ServiceInfo.""" + port = self.port + if TYPE_CHECKING: + assert isinstance(port, int) return DNSService( - self.name, + self._name, _TYPE_SRV, - _CLASS_IN | _CLASS_UNIQUE, + _CLASS_IN_UNIQUE, override_ttl if override_ttl is not None else self.host_ttl, self.priority, self.weight, - cast(int, self.port), - self.server or self.name, + port, + self.server or self._name, created, ) def dns_text(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSText: """Return DNSText from ServiceInfo.""" return DNSText( - self.name, + self._name, _TYPE_TXT, - _CLASS_IN | _CLASS_UNIQUE, + _CLASS_IN_UNIQUE, override_ttl if override_ttl is not None else self.other_ttl, self.text, created, @@ -580,11 +589,11 @@ def dns_nsec( ) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( - self.name, + self._name, _TYPE_NSEC, - _CLASS_IN | _CLASS_UNIQUE, + _CLASS_IN_UNIQUE, override_ttl if override_ttl is not None else self.host_ttl, - self.name, + self._name, missing_types, created, ) @@ -593,12 +602,11 @@ def get_address_and_nsec_records( self, override_ttl: Optional[int] = None, created: Optional[float] = None ) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" - seen_types: Set[int] = set() + missing_types: Set[int] = _ADDRESS_RECORD_TYPES.copy() records: Set[DNSRecord] = set() for dns_address in self.dns_addresses(override_ttl, IPVersion.All, created): - seen_types.add(dns_address.type) + missing_types.discard(dns_address.type) records.add(dns_address) - missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types if missing_types: assert self.server is not None, "Service server must be set for NSEC record." records.add(self.dns_nsec(list(missing_types), override_ttl, created)) @@ -616,7 +624,7 @@ def set_server_if_missing(self) -> None: This function is for backwards compatibility. """ if self.server is None: - self.server = self.name + self.server = self._name self.server_key = self.server.lower() def load_from_cache(self, zc: 'Zeroconf', now: Optional[float] = None) -> bool: @@ -627,10 +635,10 @@ def load_from_cache(self, zc: 'Zeroconf', now: Optional[float] = None) -> bool: if not now: now = current_time_millis() original_server_key = self.server_key - cached_srv_record = zc.cache.get_by_details(self.name, _TYPE_SRV, _CLASS_IN) + cached_srv_record = zc.cache.get_by_details(self._name, _TYPE_SRV, _CLASS_IN) if cached_srv_record: self._process_record_threadsafe(zc, cached_srv_record, now) - cached_txt_record = zc.cache.get_by_details(self.name, _TYPE_TXT, _CLASS_IN) + cached_txt_record = zc.cache.get_by_details(self._name, _TYPE_TXT, _CLASS_IN) if cached_txt_record: self._process_record_threadsafe(zc, cached_txt_record, now) if original_server_key == self.server_key: @@ -732,10 +740,13 @@ def generate_request_query( ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) - out.add_question_or_one_cache(zc.cache, now, self.name, _TYPE_SRV, _CLASS_IN) - out.add_question_or_one_cache(zc.cache, now, self.name, _TYPE_TXT, _CLASS_IN) - out.add_question_or_all_cache(zc.cache, now, self.server or self.name, _TYPE_A, _CLASS_IN) - out.add_question_or_all_cache(zc.cache, now, self.server or self.name, _TYPE_AAAA, _CLASS_IN) + name = self._name + server_or_name = self.server or name + cache = zc.cache + out.add_question_or_one_cache(cache, now, name, _TYPE_SRV, _CLASS_IN) + out.add_question_or_one_cache(cache, now, name, _TYPE_TXT, _CLASS_IN) + out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_A, _CLASS_IN) + out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_AAAA, _CLASS_IN) if question_type == DNSQuestionType.QU: for question in out.questions: question.unicast = True @@ -743,7 +754,7 @@ def generate_request_query( def __eq__(self, other: object) -> bool: """Tests equality of service name""" - return isinstance(other, ServiceInfo) and other.name == self.name + return isinstance(other, ServiceInfo) and other._name == self._name def __repr__(self) -> str: """String representation""" diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index f87c1336..ca199df5 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -84,6 +84,7 @@ _CLASS_ANY = 255 _CLASS_MASK = 0x7FFF _CLASS_UNIQUE = 0x8000 +_CLASS_IN_UNIQUE = _CLASS_IN | _CLASS_UNIQUE _TYPE_A = 1 _TYPE_NS = 2 From 6d83f99e820e2edd2c31969c29b536eb3bb57d9a Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 14 Aug 2023 01:49:12 +0000 Subject: [PATCH 027/434] 0.76.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5cc53d45..ff7ead6e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.76.0 (2023-08-14) + +### Feature + +* Improve performance responding to queries ([#1217](https://github.com/python-zeroconf/python-zeroconf/issues/1217)) ([`69b33be`](https://github.com/python-zeroconf/python-zeroconf/commit/69b33be3b2f9d4a27ef5154cae94afca048efffa)) + ## v0.75.0 (2023-08-13) ### Feature diff --git a/pyproject.toml b/pyproject.toml index e1963e2f..568f4623 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.75.0" +version = "0.76.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 82b6096d..39a6b3fc 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.75.0' +__version__ = '0.76.0' __license__ = 'LGPL' From 12560a70c331e5d5043a06ca2ac50628d4d246f0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 10:34:50 -0500 Subject: [PATCH 028/434] chore: split AsyncEngine into _engine.py (#1218) --- src/zeroconf/_core.py | 335 +----------------------------- src/zeroconf/_engine.py | 368 +++++++++++++++++++++++++++++++++ tests/conftest.py | 14 +- tests/services/test_browser.py | 4 +- tests/test_core.py | 239 +-------------------- tests/test_engine.py | 271 ++++++++++++++++++++++++ tests/utils/test_asyncio.py | 2 +- 7 files changed, 655 insertions(+), 578 deletions(-) create mode 100644 src/zeroconf/_engine.py create mode 100644 tests/test_engine.py diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 6a9c2c8a..1548ec5b 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -21,17 +21,15 @@ """ import asyncio -import itertools import logging -import random -import socket import sys import threading -from types import TracebackType # noqa # used in type hints -from typing import Any, Awaitable, Dict, List, Optional, Tuple, Type, Union, cast +from types import TracebackType +from typing import Awaitable, Dict, List, Optional, Tuple, Type, Union from ._cache import DNSCache from ._dns import DNSQuestion, DNSQuestionType +from ._engine import AsyncEngine, _WrappedTransport from ._exceptions import NonUniqueNameException, NotRunningException from ._handlers import ( MulticastOutgoingQueue, @@ -48,7 +46,7 @@ from ._services.browser import ServiceBrowser from ._services.info import ServiceInfo, instance_name_from_service_info from ._services.registry import ServiceRegistry -from ._updates import RecordUpdate, RecordUpdateListener +from ._updates import RecordUpdateListener from ._utils.asyncio import ( await_awaitable, get_running_loop, @@ -67,11 +65,9 @@ ) from ._utils.time import current_time_millis, millis_to_seconds from .const import ( - _CACHE_CLEANUP_INTERVAL, _CHECK_TIME, _CLASS_IN, _CLASS_UNIQUE, - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL, _FLAGS_AA, _FLAGS_QR_QUERY, _FLAGS_QR_RESPONSE, @@ -86,7 +82,6 @@ _UNREGISTER_TIME, ) -_TC_DELAY_RANDOM_INTERVAL = (400, 500) # The maximum amont of time to delay a multicast # response in order to aggregate answers _AGGREGATION_DELAY = 500 # ms @@ -102,331 +97,9 @@ # 3000ms _PROTECTED_AGGREGATION_DELAY = 200 # ms -_CLOSE_TIMEOUT = 3000 # ms _REGISTER_BROADCASTS = 3 -class _WrappedTransport: - """A wrapper for transports.""" - - __slots__ = ( - 'transport', - 'is_ipv6', - 'sock', - 'fileno', - 'sock_name', - ) - - def __init__( - self, - transport: asyncio.DatagramTransport, - is_ipv6: bool, - sock: socket.socket, - fileno: int, - sock_name: Any, - ) -> None: - """Initialize the wrapped transport. - - These attributes are used when sending packets. - """ - self.transport = transport - self.is_ipv6 = is_ipv6 - self.sock = sock - self.fileno = fileno - self.sock_name = sock_name - - -def _make_wrapped_transport(transport: asyncio.DatagramTransport) -> _WrappedTransport: - """Make a wrapped transport.""" - sock: socket.socket = transport.get_extra_info('socket') - return _WrappedTransport( - transport=transport, - is_ipv6=sock.family == socket.AF_INET6, - sock=sock, - fileno=sock.fileno(), - sock_name=sock.getsockname(), - ) - - -class AsyncEngine: - """An engine wraps sockets in the event loop.""" - - __slots__ = ( - 'loop', - 'zc', - 'protocols', - 'readers', - 'senders', - 'running_event', - '_listen_socket', - '_respond_sockets', - '_cleanup_timer', - ) - - def __init__( - self, - zeroconf: 'Zeroconf', - listen_socket: Optional[socket.socket], - respond_sockets: List[socket.socket], - ) -> None: - self.loop: Optional[asyncio.AbstractEventLoop] = None - self.zc = zeroconf - self.protocols: List[AsyncListener] = [] - self.readers: List[_WrappedTransport] = [] - self.senders: List[_WrappedTransport] = [] - self.running_event: Optional[asyncio.Event] = None - self._listen_socket = listen_socket - self._respond_sockets = respond_sockets - self._cleanup_timer: Optional[asyncio.TimerHandle] = None - - def setup(self, loop: asyncio.AbstractEventLoop, loop_thread_ready: Optional[threading.Event]) -> None: - """Set up the instance.""" - self.loop = loop - self.running_event = asyncio.Event() - self.loop.create_task(self._async_setup(loop_thread_ready)) - - async def _async_setup(self, loop_thread_ready: Optional[threading.Event]) -> None: - """Set up the instance.""" - assert self.loop is not None - self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) - await self._async_create_endpoints() - assert self.running_event is not None - self.running_event.set() - if loop_thread_ready: - loop_thread_ready.set() - - async def _async_create_endpoints(self) -> None: - """Create endpoints to send and receive.""" - assert self.loop is not None - loop = self.loop - reader_sockets = [] - sender_sockets = [] - if self._listen_socket: - reader_sockets.append(self._listen_socket) - for s in self._respond_sockets: - if s not in reader_sockets: - reader_sockets.append(s) - sender_sockets.append(s) - - for s in reader_sockets: - transport, protocol = await loop.create_datagram_endpoint(lambda: AsyncListener(self.zc), sock=s) - self.protocols.append(cast(AsyncListener, protocol)) - self.readers.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) - if s in sender_sockets: - self.senders.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) - - def _async_cache_cleanup(self) -> None: - """Periodic cache cleanup.""" - now = current_time_millis() - self.zc.question_history.async_expire(now) - self.zc.record_manager.async_updates( - now, [RecordUpdate(record, record) for record in self.zc.cache.async_expire(now)] - ) - self.zc.record_manager.async_updates_complete(False) - assert self.loop is not None - self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) - - async def _async_close(self) -> None: - """Cancel and wait for the cleanup task to finish.""" - self._async_shutdown() - await asyncio.sleep(0) # flush out any call soons - assert self._cleanup_timer is not None - self._cleanup_timer.cancel() - - def _async_shutdown(self) -> None: - """Shutdown transports and sockets.""" - assert self.running_event is not None - self.running_event.clear() - for wrapped_transport in itertools.chain(self.senders, self.readers): - wrapped_transport.transport.close() - - def close(self) -> None: - """Close from sync context. - - While it is not expected during normal operation, - this function may raise EventLoopBlocked if the underlying - call to `_async_close` cannot be completed. - """ - assert self.loop is not None - # Guard against Zeroconf.close() being called from the eventloop - if get_running_loop() == self.loop: - self._async_shutdown() - return - if not self.loop.is_running(): - return - run_coro_with_timeout(self._async_close(), self.loop, _CLOSE_TIMEOUT) - - -class AsyncListener(asyncio.Protocol, QuietLogger): - - """A Listener is used by this module to listen on the multicast - group to which DNS messages are sent, allowing the implementation - to cache information as it arrives. - - It requires registration with an Engine object in order to have - the read() method called when a socket is available for reading.""" - - __slots__ = ('zc', 'data', 'last_time', 'transport', 'sock_description', '_deferred', '_timers') - - def __init__(self, zc: 'Zeroconf') -> None: - self.zc = zc - self.data: Optional[bytes] = None - self.last_time: float = 0 - self.last_message: Optional[DNSIncoming] = None - self.transport: Optional[_WrappedTransport] = None - self.sock_description: Optional[str] = None - self._deferred: Dict[str, List[DNSIncoming]] = {} - self._timers: Dict[str, asyncio.TimerHandle] = {} - super().__init__() - - def datagram_received( - self, data: bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] - ) -> None: - assert self.transport is not None - data_len = len(data) - debug = log.isEnabledFor(logging.DEBUG) - - if data_len > _MAX_MSG_ABSOLUTE: - # Guard against oversized packets to ensure bad implementations cannot overwhelm - # the system. - if debug: - log.debug( - "Discarding incoming packet with length %s, which is larger " - "than the absolute maximum size of %s", - data_len, - _MAX_MSG_ABSOLUTE, - ) - return - - now = current_time_millis() - if ( - self.data == data - and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time - and self.last_message is not None - and not self.last_message.has_qu_question() - ): - # Guard against duplicate packets - if debug: - log.debug( - 'Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]', - addrs, - self.sock_description, - data_len, - data, - ) - return - - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () - if len(addrs) == 2: - # https://github.com/python/mypy/issues/1178 - addr, port = addrs # type: ignore - scope = None - else: - # https://github.com/python/mypy/issues/1178 - addr, port, flow, scope = addrs # type: ignore - if debug: - log.debug('IPv6 scope_id %d associated to the receiving interface', scope) - v6_flow_scope = (flow, scope) - - msg = DNSIncoming(data, (addr, port), scope, now) - self.data = data - self.last_time = now - self.last_message = msg - if msg.valid: - if debug: - log.debug( - 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', - addr, - port, - self.sock_description, - msg, - data_len, - data, - ) - else: - if debug: - log.debug( - 'Received from %r:%r [socket %s]: (%d bytes) [%r]', - addr, - port, - self.sock_description, - data_len, - data, - ) - return - - if not msg.is_query(): - self.zc.handle_response(msg) - return - - self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) - - def handle_query_or_defer( - self, - msg: DNSIncoming, - addr: str, - port: int, - transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - ) -> None: - """Deal with incoming query packets. Provides a response if - possible.""" - if not msg.truncated: - self._respond_query(msg, addr, port, transport, v6_flow_scope) - return - - deferred = self._deferred.setdefault(addr, []) - # If we get the same packet we ignore it - for incoming in reversed(deferred): - if incoming.data == msg.data: - return - deferred.append(msg) - delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) - assert self.zc.loop is not None - self._cancel_any_timers_for_addr(addr) - self._timers[addr] = self.zc.loop.call_later( - delay, self._respond_query, None, addr, port, transport, v6_flow_scope - ) - - def _cancel_any_timers_for_addr(self, addr: str) -> None: - """Cancel any future truncated packet timers for the address.""" - if addr in self._timers: - self._timers.pop(addr).cancel() - - def _respond_query( - self, - msg: Optional[DNSIncoming], - addr: str, - port: int, - transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - ) -> None: - """Respond to a query and reassemble any truncated deferred packets.""" - self._cancel_any_timers_for_addr(addr) - packets = self._deferred.pop(addr, []) - if msg: - packets.append(msg) - - self.zc.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) - - def error_received(self, exc: Exception) -> None: - """Likely socket closed or IPv6.""" - # We preformat the message string with the socket as we want - # log_exception_once to log a warrning message once PER EACH - # different socket in case there are problems with multiple - # sockets - msg_str = f"Error with socket {self.sock_description}): %s" - self.log_exception_once(exc, msg_str, exc) - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - wrapped_transport = _make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) - self.transport = wrapped_transport - self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" - - def connection_lost(self, exc: Optional[Exception]) -> None: - """Handle connection lost.""" - - def async_send_with_transport( log_debug: bool, transport: _WrappedTransport, diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py new file mode 100644 index 00000000..689b2aa5 --- /dev/null +++ b/src/zeroconf/_engine.py @@ -0,0 +1,368 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import asyncio +import itertools +import logging +import random +import socket +import threading +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast + +from ._logger import QuietLogger, log +from ._protocol.incoming import DNSIncoming +from ._updates import RecordUpdate +from ._utils.asyncio import get_running_loop, run_coro_with_timeout +from ._utils.time import current_time_millis, millis_to_seconds +from .const import ( + _CACHE_CLEANUP_INTERVAL, + _DUPLICATE_PACKET_SUPPRESSION_INTERVAL, + _MAX_MSG_ABSOLUTE, +) + +if TYPE_CHECKING: + from ._core import Zeroconf + +_TC_DELAY_RANDOM_INTERVAL = (400, 500) + +_CLOSE_TIMEOUT = 3000 # ms + + +class _WrappedTransport: + """A wrapper for transports.""" + + __slots__ = ( + 'transport', + 'is_ipv6', + 'sock', + 'fileno', + 'sock_name', + ) + + def __init__( + self, + transport: asyncio.DatagramTransport, + is_ipv6: bool, + sock: socket.socket, + fileno: int, + sock_name: Any, + ) -> None: + """Initialize the wrapped transport. + + These attributes are used when sending packets. + """ + self.transport = transport + self.is_ipv6 = is_ipv6 + self.sock = sock + self.fileno = fileno + self.sock_name = sock_name + + +def _make_wrapped_transport(transport: asyncio.DatagramTransport) -> _WrappedTransport: + """Make a wrapped transport.""" + sock: socket.socket = transport.get_extra_info('socket') + return _WrappedTransport( + transport=transport, + is_ipv6=sock.family == socket.AF_INET6, + sock=sock, + fileno=sock.fileno(), + sock_name=sock.getsockname(), + ) + + +class AsyncEngine: + """An engine wraps sockets in the event loop.""" + + __slots__ = ( + 'loop', + 'zc', + 'protocols', + 'readers', + 'senders', + 'running_event', + '_listen_socket', + '_respond_sockets', + '_cleanup_timer', + ) + + def __init__( + self, + zeroconf: 'Zeroconf', + listen_socket: Optional[socket.socket], + respond_sockets: List[socket.socket], + ) -> None: + self.loop: Optional[asyncio.AbstractEventLoop] = None + self.zc = zeroconf + self.protocols: List[AsyncListener] = [] + self.readers: List[_WrappedTransport] = [] + self.senders: List[_WrappedTransport] = [] + self.running_event: Optional[asyncio.Event] = None + self._listen_socket = listen_socket + self._respond_sockets = respond_sockets + self._cleanup_timer: Optional[asyncio.TimerHandle] = None + + def setup(self, loop: asyncio.AbstractEventLoop, loop_thread_ready: Optional[threading.Event]) -> None: + """Set up the instance.""" + self.loop = loop + self.running_event = asyncio.Event() + self.loop.create_task(self._async_setup(loop_thread_ready)) + + async def _async_setup(self, loop_thread_ready: Optional[threading.Event]) -> None: + """Set up the instance.""" + assert self.loop is not None + self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) + await self._async_create_endpoints() + assert self.running_event is not None + self.running_event.set() + if loop_thread_ready: + loop_thread_ready.set() + + async def _async_create_endpoints(self) -> None: + """Create endpoints to send and receive.""" + assert self.loop is not None + loop = self.loop + reader_sockets = [] + sender_sockets = [] + if self._listen_socket: + reader_sockets.append(self._listen_socket) + for s in self._respond_sockets: + if s not in reader_sockets: + reader_sockets.append(s) + sender_sockets.append(s) + + for s in reader_sockets: + transport, protocol = await loop.create_datagram_endpoint(lambda: AsyncListener(self.zc), sock=s) + self.protocols.append(cast(AsyncListener, protocol)) + self.readers.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + if s in sender_sockets: + self.senders.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + + def _async_cache_cleanup(self) -> None: + """Periodic cache cleanup.""" + now = current_time_millis() + self.zc.question_history.async_expire(now) + self.zc.record_manager.async_updates( + now, [RecordUpdate(record, record) for record in self.zc.cache.async_expire(now)] + ) + self.zc.record_manager.async_updates_complete(False) + assert self.loop is not None + self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) + + async def _async_close(self) -> None: + """Cancel and wait for the cleanup task to finish.""" + self._async_shutdown() + await asyncio.sleep(0) # flush out any call soons + assert self._cleanup_timer is not None + self._cleanup_timer.cancel() + + def _async_shutdown(self) -> None: + """Shutdown transports and sockets.""" + assert self.running_event is not None + self.running_event.clear() + for wrapped_transport in itertools.chain(self.senders, self.readers): + wrapped_transport.transport.close() + + def close(self) -> None: + """Close from sync context. + + While it is not expected during normal operation, + this function may raise EventLoopBlocked if the underlying + call to `_async_close` cannot be completed. + """ + assert self.loop is not None + # Guard against Zeroconf.close() being called from the eventloop + if get_running_loop() == self.loop: + self._async_shutdown() + return + if not self.loop.is_running(): + return + run_coro_with_timeout(self._async_close(), self.loop, _CLOSE_TIMEOUT) + + +class AsyncListener(asyncio.Protocol, QuietLogger): + + """A Listener is used by this module to listen on the multicast + group to which DNS messages are sent, allowing the implementation + to cache information as it arrives. + + It requires registration with an Engine object in order to have + the read() method called when a socket is available for reading.""" + + __slots__ = ('zc', 'data', 'last_time', 'transport', 'sock_description', '_deferred', '_timers') + + def __init__(self, zc: 'Zeroconf') -> None: + self.zc = zc + self.data: Optional[bytes] = None + self.last_time: float = 0 + self.last_message: Optional[DNSIncoming] = None + self.transport: Optional[_WrappedTransport] = None + self.sock_description: Optional[str] = None + self._deferred: Dict[str, List[DNSIncoming]] = {} + self._timers: Dict[str, asyncio.TimerHandle] = {} + super().__init__() + + def datagram_received( + self, data: bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] + ) -> None: + assert self.transport is not None + data_len = len(data) + debug = log.isEnabledFor(logging.DEBUG) + + if data_len > _MAX_MSG_ABSOLUTE: + # Guard against oversized packets to ensure bad implementations cannot overwhelm + # the system. + if debug: + log.debug( + "Discarding incoming packet with length %s, which is larger " + "than the absolute maximum size of %s", + data_len, + _MAX_MSG_ABSOLUTE, + ) + return + + now = current_time_millis() + if ( + self.data == data + and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time + and self.last_message is not None + and not self.last_message.has_qu_question() + ): + # Guard against duplicate packets + if debug: + log.debug( + 'Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]', + addrs, + self.sock_description, + data_len, + data, + ) + return + + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () + if len(addrs) == 2: + # https://github.com/python/mypy/issues/1178 + addr, port = addrs # type: ignore + scope = None + else: + # https://github.com/python/mypy/issues/1178 + addr, port, flow, scope = addrs # type: ignore + if debug: # pragma: no branch + log.debug('IPv6 scope_id %d associated to the receiving interface', scope) + v6_flow_scope = (flow, scope) + + msg = DNSIncoming(data, (addr, port), scope, now) + self.data = data + self.last_time = now + self.last_message = msg + if msg.valid: + if debug: + log.debug( + 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', + addr, + port, + self.sock_description, + msg, + data_len, + data, + ) + else: + if debug: + log.debug( + 'Received from %r:%r [socket %s]: (%d bytes) [%r]', + addr, + port, + self.sock_description, + data_len, + data, + ) + return + + if not msg.is_query(): + self.zc.handle_response(msg) + return + + self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) + + def handle_query_or_defer( + self, + msg: DNSIncoming, + addr: str, + port: int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Deal with incoming query packets. Provides a response if + possible.""" + if not msg.truncated: + self._respond_query(msg, addr, port, transport, v6_flow_scope) + return + + deferred = self._deferred.setdefault(addr, []) + # If we get the same packet we ignore it + for incoming in reversed(deferred): + if incoming.data == msg.data: + return + deferred.append(msg) + delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) + assert self.zc.loop is not None + self._cancel_any_timers_for_addr(addr) + self._timers[addr] = self.zc.loop.call_later( + delay, self._respond_query, None, addr, port, transport, v6_flow_scope + ) + + def _cancel_any_timers_for_addr(self, addr: str) -> None: + """Cancel any future truncated packet timers for the address.""" + if addr in self._timers: + self._timers.pop(addr).cancel() + + def _respond_query( + self, + msg: Optional[DNSIncoming], + addr: str, + port: int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Respond to a query and reassemble any truncated deferred packets.""" + self._cancel_any_timers_for_addr(addr) + packets = self._deferred.pop(addr, []) + if msg: + packets.append(msg) + + self.zc.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) + + def error_received(self, exc: Exception) -> None: + """Likely socket closed or IPv6.""" + # We preformat the message string with the socket as we want + # log_exception_once to log a warrning message once PER EACH + # different socket in case there are problems with multiple + # sockets + msg_str = f"Error with socket {self.sock_description}): %s" + self.log_exception_once(exc, msg_str, exc) + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + wrapped_transport = _make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) + self.transport = wrapped_transport + self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle connection lost.""" diff --git a/tests/conftest.py b/tests/conftest.py index 71b00d48..34fdeb72 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,11 +4,11 @@ """ conftest for zeroconf tests. """ import threading -import unittest +from unittest.mock import patch import pytest -from zeroconf import _core, const +from zeroconf import _core, _engine, const @pytest.fixture(autouse=True) @@ -23,9 +23,7 @@ def verify_threads_ended(): @pytest.fixture def run_isolated(): """Change the mDNS port to run the test in isolation.""" - with unittest.mock.patch.object(_core, "_MDNS_PORT", 5454), unittest.mock.patch.object( - const, "_MDNS_PORT", 5454 - ): + with patch.object(_core, "_MDNS_PORT", 5454), patch.object(const, "_MDNS_PORT", 5454): yield @@ -36,7 +34,7 @@ def disable_duplicate_packet_suppression(): Some tests run too slowly because of the duplicate packet suppression. """ - with unittest.mock.patch.object( - _core, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0 - ), unittest.mock.patch.object(const, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0): + with patch.object(_engine, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0), patch.object( + const, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0 + ): yield diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 72e550c6..d13701ec 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -20,7 +20,7 @@ DNSPointer, DNSQuestion, Zeroconf, - _core, + _engine, _handlers, const, current_time_millis, @@ -1118,7 +1118,7 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: @patch.object(_handlers, '_DNS_PTR_MIN_TTL', 1) -@patch.object(_core, "_CACHE_CLEANUP_INTERVAL", 0.01) +@patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01) def test_service_browser_expire_callbacks(): """Test that the ServiceBrowser matching does not match partial names.""" # instantiate a zeroconf instance diff --git a/tests/test_core.py b/tests/test_core.py index ad116328..8f5322bd 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,7 +4,6 @@ """ Unit tests for zeroconf._core """ import asyncio -import itertools import logging import os import socket @@ -13,14 +12,13 @@ import time import unittest import unittest.mock -from typing import Set, cast -from unittest.mock import MagicMock, patch +from typing import cast +from unittest.mock import patch import pytest import zeroconf as r -from zeroconf import NotRunningException, Zeroconf, _core, const, current_time_millis -from zeroconf._protocol import outgoing +from zeroconf import NotRunningException, Zeroconf, const, current_time_millis from zeroconf.asyncio import AsyncZeroconf from . import _clear_cache, _inject_response, _wait_for_start, has_working_ipv6 @@ -47,60 +45,6 @@ async def make_query(): asyncio.run_coroutine_threadsafe(make_query(), zc.loop).result() -# This test uses asyncio because it needs to access the cache directly -# which is not threadsafe -@pytest.mark.asyncio -async def test_reaper(): - with patch.object(_core, "_CACHE_CLEANUP_INTERVAL", 0.01): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) - zeroconf = aiozc.zeroconf - cache = zeroconf.cache - original_entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) - record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') - record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') - zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) - question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) - now = r.current_time_millis() - other_known_answers: Set[r.DNSRecord] = { - r.DNSPointer( - "_hap._tcp.local.", - const._TYPE_PTR, - const._CLASS_IN, - 10000, - 'known-to-other._hap._tcp.local.', - ) - } - zeroconf.question_history.add_question_at_time(question, now, other_known_answers) - assert zeroconf.question_history.suppresses(question, now, other_known_answers) - entries_with_cache = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) - await asyncio.sleep(1.2) - entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) - assert zeroconf.cache.get(record_with_1s_ttl) is None - await aiozc.async_close() - assert not zeroconf.question_history.suppresses(question, now, other_known_answers) - assert entries != original_entries - assert entries_with_cache != original_entries - assert record_with_10s_ttl in entries - assert record_with_1s_ttl not in entries - - -@pytest.mark.asyncio -async def test_reaper_aborts_when_done(): - """Ensure cache cleanup stops when zeroconf is done.""" - with patch.object(_core, "_CACHE_CLEANUP_INTERVAL", 0.01): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) - zeroconf = aiozc.zeroconf - record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') - record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') - zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) - assert zeroconf.cache.get(record_with_10s_ttl) is not None - assert zeroconf.cache.get(record_with_1s_ttl) is not None - await aiozc.async_close() - await asyncio.sleep(1.2) - assert zeroconf.cache.get(record_with_10s_ttl) is not None - assert zeroconf.cache.get(record_with_1s_ttl) is not None - - class Framework(unittest.TestCase): def test_launch_and_close(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All) @@ -691,183 +635,6 @@ async def test_multiple_sync_instances_stared_from_async_close(): await asyncio.sleep(0) -def test_guard_against_oversized_packets(): - """Ensure we do not process oversized packets. - - These packets can quickly overwhelm the system. - """ - zc = Zeroconf(interfaces=['127.0.0.1']) - - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - - for i in range(5000): - generated.add_answer_at_time( - r.DNSText( - "packet{i}.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ), - 0, - ) - - try: - # We are patching to generate an oversized packet - with patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), patch.object( - outgoing, "_MAX_MSG_TYPICAL", 100000 - ): - over_sized_packet = generated.packets()[0] - assert len(over_sized_packet) > const._MAX_MSG_ABSOLUTE - except AttributeError: - # cannot patch with cython - zc.close() - return - - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - okpacket_record = r.DNSText( - "okpacket.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - - generated.add_answer_at_time( - okpacket_record, - 0, - ) - ok_packet = generated.packets()[0] - - # We cannot test though the network interface as some operating systems - # will guard against the oversized packet and we won't see it. - listener = _core.AsyncListener(zc) - listener.transport = unittest.mock.MagicMock() - - listener.datagram_received(ok_packet, ('127.0.0.1', const._MDNS_PORT)) - assert zc.cache.async_get_unique(okpacket_record) is not None - - listener.datagram_received(over_sized_packet, ('127.0.0.1', const._MDNS_PORT)) - assert ( - zc.cache.async_get_unique( - r.DNSText( - "packet0.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - ) - is None - ) - - logging.getLogger('zeroconf').setLevel(logging.INFO) - - listener.datagram_received(over_sized_packet, ('::1', const._MDNS_PORT, 1, 1)) - assert ( - zc.cache.async_get_unique( - r.DNSText( - "packet0.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - ) - is None - ) - - zc.close() - - -def test_guard_against_duplicate_packets(): - """Ensure we do not process duplicate packets. - These packets can quickly overwhelm the system. - """ - zc = Zeroconf(interfaces=['127.0.0.1']) - listener = _core.AsyncListener(zc) - listener.transport = MagicMock() - - query = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - query.add_question(question) - packet_with_qm_question = query.packets()[0] - - query3 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question3 = r.DNSQuestion("x._ay._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - query3.add_question(question3) - packet_with_qm_question2 = query3.packets()[0] - - query2 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question2 = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - question2.unicast = True - query2.add_question(question2) - packet_with_qu_question = query2.packets()[0] - - addrs = ("1.2.3.4", 43) - - with patch.object(_core, "current_time_millis") as _current_time_millis, patch.object( - listener, "handle_query_or_defer" - ) as _handle_query_or_defer: - start_time = current_time_millis() - - _current_time_millis.return_value = start_time - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - # Now walk time forward 1000 seconds - _current_time_millis.return_value = start_time + 1000 - # Now call with the same packet again and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question2, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet with qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call again with the same packet that has a qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - log.setLevel(logging.WARNING) - - # Call with the QM packet again - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - # Now call with garbage - listener.datagram_received(b'garbage', addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - zc.close() - - def test_shutdown_while_register_in_process(): """Test we can shutdown while registering a service in another thread.""" diff --git a/tests/test_engine.py b/tests/test_engine.py new file mode 100644 index 00000000..6ac00570 --- /dev/null +++ b/tests/test_engine.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python + + +""" Unit tests for zeroconf._core """ + +import asyncio +import itertools +import logging +import unittest +import unittest.mock +from typing import Set +from unittest.mock import MagicMock, patch + +import pytest + +import zeroconf as r +from zeroconf import Zeroconf, _engine, const, current_time_millis +from zeroconf._protocol import outgoing +from zeroconf.asyncio import AsyncZeroconf + +log = logging.getLogger('zeroconf') +original_logging_level = logging.NOTSET + + +def setup_module(): + global original_logging_level + original_logging_level = log.level + log.setLevel(logging.DEBUG) + + +def teardown_module(): + if original_logging_level != logging.NOTSET: + log.setLevel(original_logging_level) + + +def threadsafe_query(zc, protocol, *args): + async def make_query(): + protocol.handle_query_or_defer(*args) + + asyncio.run_coroutine_threadsafe(make_query(), zc.loop).result() + + +# This test uses asyncio because it needs to access the cache directly +# which is not threadsafe +@pytest.mark.asyncio +async def test_reaper(): + with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf = aiozc.zeroconf + cache = zeroconf.cache + original_entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') + record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) + question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) + now = r.current_time_millis() + other_known_answers: Set[r.DNSRecord] = { + r.DNSPointer( + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + 'known-to-other._hap._tcp.local.', + ) + } + zeroconf.question_history.add_question_at_time(question, now, other_known_answers) + assert zeroconf.question_history.suppresses(question, now, other_known_answers) + entries_with_cache = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + await asyncio.sleep(1.2) + entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + assert zeroconf.cache.get(record_with_1s_ttl) is None + await aiozc.async_close() + assert not zeroconf.question_history.suppresses(question, now, other_known_answers) + assert entries != original_entries + assert entries_with_cache != original_entries + assert record_with_10s_ttl in entries + assert record_with_1s_ttl not in entries + + +@pytest.mark.asyncio +async def test_reaper_aborts_when_done(): + """Ensure cache cleanup stops when zeroconf is done.""" + with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf = aiozc.zeroconf + record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') + record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) + assert zeroconf.cache.get(record_with_10s_ttl) is not None + assert zeroconf.cache.get(record_with_1s_ttl) is not None + await aiozc.async_close() + await asyncio.sleep(1.2) + assert zeroconf.cache.get(record_with_10s_ttl) is not None + assert zeroconf.cache.get(record_with_1s_ttl) is not None + + +def test_guard_against_oversized_packets(): + """Ensure we do not process oversized packets. + + These packets can quickly overwhelm the system. + """ + zc = Zeroconf(interfaces=['127.0.0.1']) + + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + + for i in range(5000): + generated.add_answer_at_time( + r.DNSText( + "packet{i}.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ), + 0, + ) + + try: + # We are patching to generate an oversized packet + with patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), patch.object( + outgoing, "_MAX_MSG_TYPICAL", 100000 + ): + over_sized_packet = generated.packets()[0] + assert len(over_sized_packet) > const._MAX_MSG_ABSOLUTE + except AttributeError: + # cannot patch with cython + zc.close() + return + + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + okpacket_record = r.DNSText( + "okpacket.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + + generated.add_answer_at_time( + okpacket_record, + 0, + ) + ok_packet = generated.packets()[0] + + # We cannot test though the network interface as some operating systems + # will guard against the oversized packet and we won't see it. + listener = _engine.AsyncListener(zc) + listener.transport = unittest.mock.MagicMock() + + listener.datagram_received(ok_packet, ('127.0.0.1', const._MDNS_PORT)) + assert zc.cache.async_get_unique(okpacket_record) is not None + + listener.datagram_received(over_sized_packet, ('127.0.0.1', const._MDNS_PORT)) + assert ( + zc.cache.async_get_unique( + r.DNSText( + "packet0.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + ) + is None + ) + + logging.getLogger('zeroconf').setLevel(logging.INFO) + + listener.datagram_received(over_sized_packet, ('::1', const._MDNS_PORT, 1, 1)) + assert ( + zc.cache.async_get_unique( + r.DNSText( + "packet0.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + ) + is None + ) + + zc.close() + + +def test_guard_against_duplicate_packets(): + """Ensure we do not process duplicate packets. + These packets can quickly overwhelm the system. + """ + zc = Zeroconf(interfaces=['127.0.0.1']) + listener = _engine.AsyncListener(zc) + listener.transport = MagicMock() + + query = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + query.add_question(question) + packet_with_qm_question = query.packets()[0] + + query3 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question3 = r.DNSQuestion("x._ay._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + query3.add_question(question3) + packet_with_qm_question2 = query3.packets()[0] + + query2 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question2 = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + question2.unicast = True + query2.add_question(question2) + packet_with_qu_question = query2.packets()[0] + + addrs = ("1.2.3.4", 43) + + with patch.object(_engine, "current_time_millis") as _current_time_millis, patch.object( + listener, "handle_query_or_defer" + ) as _handle_query_or_defer: + start_time = current_time_millis() + + _current_time_millis.return_value = start_time + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the same packet again and handle_query_or_defer should not fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + # Now walk time forward 1000 seconds + _current_time_millis.return_value = start_time + 1000 + # Now call with the same packet again and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question2, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet with qu question and handle_query_or_defer should fire + listener.datagram_received(packet_with_qu_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call again with the same packet that has a qu question and handle_query_or_defer should fire + listener.datagram_received(packet_with_qu_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + log.setLevel(logging.WARNING) + + # Call with the QM packet again + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the same packet again and handle_query_or_defer should not fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + # Now call with garbage + listener.datagram_received(b'garbage', addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + zc.close() diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index 0bd3d6e4..a0385515 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -14,7 +14,7 @@ import pytest from zeroconf import EventLoopBlocked -from zeroconf._core import _CLOSE_TIMEOUT +from zeroconf._engine import _CLOSE_TIMEOUT from zeroconf._utils import asyncio as aioutils from zeroconf.const import _LOADED_SYSTEM_TIMEOUT From e9cc5c83f3808d23d534de743bd35bc1372c5641 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 10:47:42 -0500 Subject: [PATCH 029/434] chore: prepare _engine.py to be able to be cythonized (#1219) --- src/zeroconf/_engine.py | 19 +++++++++++++++---- tests/test_engine.py | 18 ++++++++++++++++-- 2 files changed, 31 insertions(+), 6 deletions(-) diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 689b2aa5..00ecf51a 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -150,7 +150,9 @@ async def _async_create_endpoints(self) -> None: sender_sockets.append(s) for s in reader_sockets: - transport, protocol = await loop.create_datagram_endpoint(lambda: AsyncListener(self.zc), sock=s) + transport, protocol = await loop.create_datagram_endpoint( + lambda: AsyncListener(self.zc), sock=s # type: ignore[arg-type, return-value] + ) self.protocols.append(cast(AsyncListener, protocol)) self.readers.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) if s in sender_sockets: @@ -198,7 +200,7 @@ def close(self) -> None: run_coro_with_timeout(self._async_close(), self.loop, _CLOSE_TIMEOUT) -class AsyncListener(asyncio.Protocol, QuietLogger): +class AsyncListener: """A Listener is used by this module to listen on the multicast group to which DNS messages are sent, allowing the implementation @@ -207,7 +209,16 @@ class AsyncListener(asyncio.Protocol, QuietLogger): It requires registration with an Engine object in order to have the read() method called when a socket is available for reading.""" - __slots__ = ('zc', 'data', 'last_time', 'transport', 'sock_description', '_deferred', '_timers') + __slots__ = ( + 'zc', + 'data', + 'last_time', + 'last_message', + 'transport', + 'sock_description', + '_deferred', + '_timers', + ) def __init__(self, zc: 'Zeroconf') -> None: self.zc = zc @@ -357,7 +368,7 @@ def error_received(self, exc: Exception) -> None: # different socket in case there are problems with multiple # sockets msg_str = f"Error with socket {self.sock_description}): %s" - self.log_exception_once(exc, msg_str, exc) + QuietLogger.log_exception_once(exc, msg_str, exc) def connection_made(self, transport: asyncio.BaseTransport) -> None: wrapped_transport = _make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) diff --git a/tests/test_engine.py b/tests/test_engine.py index 6ac00570..2c7e14be 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -8,7 +8,7 @@ import logging import unittest import unittest.mock -from typing import Set +from typing import Set, Tuple, Union from unittest.mock import MagicMock, patch import pytest @@ -16,6 +16,7 @@ import zeroconf as r from zeroconf import Zeroconf, _engine, const, current_time_millis from zeroconf._protocol import outgoing +from zeroconf._protocol.incoming import DNSIncoming from zeroconf.asyncio import AsyncZeroconf log = logging.getLogger('zeroconf') @@ -188,7 +189,20 @@ def test_guard_against_duplicate_packets(): These packets can quickly overwhelm the system. """ zc = Zeroconf(interfaces=['127.0.0.1']) - listener = _engine.AsyncListener(zc) + + class SubListener(_engine.AsyncListener): + def handle_query_or_defer( + self, + msg: DNSIncoming, + addr: str, + port: int, + transport: _engine._WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Handle a query or defer it for later processing.""" + super().handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) + + listener = SubListener(zc) listener.transport = MagicMock() query = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) From f4c17ebc5109afab2afd5432e372c77ec7b673c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 11:49:22 -0500 Subject: [PATCH 030/434] chore: split _engine.py into _transport.py and _listener.py (#1222) --- src/zeroconf/_core.py | 3 +- src/zeroconf/_engine.py | 243 ++----------------------------------- src/zeroconf/_listener.py | 216 +++++++++++++++++++++++++++++++++ src/zeroconf/_transport.py | 67 ++++++++++ tests/conftest.py | 4 +- tests/test_engine.py | 209 +------------------------------ tests/test_listener.py | 219 +++++++++++++++++++++++++++++++++ 7 files changed, 518 insertions(+), 443 deletions(-) create mode 100644 src/zeroconf/_listener.py create mode 100644 src/zeroconf/_transport.py create mode 100644 tests/test_listener.py diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 1548ec5b..0f9b45df 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -29,7 +29,7 @@ from ._cache import DNSCache from ._dns import DNSQuestion, DNSQuestionType -from ._engine import AsyncEngine, _WrappedTransport +from ._engine import AsyncEngine from ._exceptions import NonUniqueNameException, NotRunningException from ._handlers import ( MulticastOutgoingQueue, @@ -46,6 +46,7 @@ from ._services.browser import ServiceBrowser from ._services.info import ServiceInfo, instance_name_from_service_info from ._services.registry import ServiceRegistry +from ._transport import _WrappedTransport from ._updates import RecordUpdateListener from ._utils.asyncio import ( await_awaitable, diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 00ecf51a..44435750 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -22,71 +22,23 @@ import asyncio import itertools -import logging -import random import socket import threading -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, List, Optional, cast -from ._logger import QuietLogger, log -from ._protocol.incoming import DNSIncoming from ._updates import RecordUpdate from ._utils.asyncio import get_running_loop, run_coro_with_timeout -from ._utils.time import current_time_millis, millis_to_seconds -from .const import ( - _CACHE_CLEANUP_INTERVAL, - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL, - _MAX_MSG_ABSOLUTE, -) +from ._utils.time import current_time_millis +from .const import _CACHE_CLEANUP_INTERVAL if TYPE_CHECKING: from ._core import Zeroconf -_TC_DELAY_RANDOM_INTERVAL = (400, 500) - -_CLOSE_TIMEOUT = 3000 # ms - - -class _WrappedTransport: - """A wrapper for transports.""" - - __slots__ = ( - 'transport', - 'is_ipv6', - 'sock', - 'fileno', - 'sock_name', - ) - - def __init__( - self, - transport: asyncio.DatagramTransport, - is_ipv6: bool, - sock: socket.socket, - fileno: int, - sock_name: Any, - ) -> None: - """Initialize the wrapped transport. - - These attributes are used when sending packets. - """ - self.transport = transport - self.is_ipv6 = is_ipv6 - self.sock = sock - self.fileno = fileno - self.sock_name = sock_name +from ._listener import AsyncListener +from ._transport import _WrappedTransport, make_wrapped_transport -def _make_wrapped_transport(transport: asyncio.DatagramTransport) -> _WrappedTransport: - """Make a wrapped transport.""" - sock: socket.socket = transport.get_extra_info('socket') - return _WrappedTransport( - transport=transport, - is_ipv6=sock.family == socket.AF_INET6, - sock=sock, - fileno=sock.fileno(), - sock_name=sock.getsockname(), - ) +_CLOSE_TIMEOUT = 3000 # ms class AsyncEngine: @@ -154,9 +106,9 @@ async def _async_create_endpoints(self) -> None: lambda: AsyncListener(self.zc), sock=s # type: ignore[arg-type, return-value] ) self.protocols.append(cast(AsyncListener, protocol)) - self.readers.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + self.readers.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) if s in sender_sockets: - self.senders.append(_make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + self.senders.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) def _async_cache_cleanup(self) -> None: """Periodic cache cleanup.""" @@ -198,182 +150,3 @@ def close(self) -> None: if not self.loop.is_running(): return run_coro_with_timeout(self._async_close(), self.loop, _CLOSE_TIMEOUT) - - -class AsyncListener: - - """A Listener is used by this module to listen on the multicast - group to which DNS messages are sent, allowing the implementation - to cache information as it arrives. - - It requires registration with an Engine object in order to have - the read() method called when a socket is available for reading.""" - - __slots__ = ( - 'zc', - 'data', - 'last_time', - 'last_message', - 'transport', - 'sock_description', - '_deferred', - '_timers', - ) - - def __init__(self, zc: 'Zeroconf') -> None: - self.zc = zc - self.data: Optional[bytes] = None - self.last_time: float = 0 - self.last_message: Optional[DNSIncoming] = None - self.transport: Optional[_WrappedTransport] = None - self.sock_description: Optional[str] = None - self._deferred: Dict[str, List[DNSIncoming]] = {} - self._timers: Dict[str, asyncio.TimerHandle] = {} - super().__init__() - - def datagram_received( - self, data: bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] - ) -> None: - assert self.transport is not None - data_len = len(data) - debug = log.isEnabledFor(logging.DEBUG) - - if data_len > _MAX_MSG_ABSOLUTE: - # Guard against oversized packets to ensure bad implementations cannot overwhelm - # the system. - if debug: - log.debug( - "Discarding incoming packet with length %s, which is larger " - "than the absolute maximum size of %s", - data_len, - _MAX_MSG_ABSOLUTE, - ) - return - - now = current_time_millis() - if ( - self.data == data - and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time - and self.last_message is not None - and not self.last_message.has_qu_question() - ): - # Guard against duplicate packets - if debug: - log.debug( - 'Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]', - addrs, - self.sock_description, - data_len, - data, - ) - return - - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () - if len(addrs) == 2: - # https://github.com/python/mypy/issues/1178 - addr, port = addrs # type: ignore - scope = None - else: - # https://github.com/python/mypy/issues/1178 - addr, port, flow, scope = addrs # type: ignore - if debug: # pragma: no branch - log.debug('IPv6 scope_id %d associated to the receiving interface', scope) - v6_flow_scope = (flow, scope) - - msg = DNSIncoming(data, (addr, port), scope, now) - self.data = data - self.last_time = now - self.last_message = msg - if msg.valid: - if debug: - log.debug( - 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', - addr, - port, - self.sock_description, - msg, - data_len, - data, - ) - else: - if debug: - log.debug( - 'Received from %r:%r [socket %s]: (%d bytes) [%r]', - addr, - port, - self.sock_description, - data_len, - data, - ) - return - - if not msg.is_query(): - self.zc.handle_response(msg) - return - - self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) - - def handle_query_or_defer( - self, - msg: DNSIncoming, - addr: str, - port: int, - transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - ) -> None: - """Deal with incoming query packets. Provides a response if - possible.""" - if not msg.truncated: - self._respond_query(msg, addr, port, transport, v6_flow_scope) - return - - deferred = self._deferred.setdefault(addr, []) - # If we get the same packet we ignore it - for incoming in reversed(deferred): - if incoming.data == msg.data: - return - deferred.append(msg) - delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) - assert self.zc.loop is not None - self._cancel_any_timers_for_addr(addr) - self._timers[addr] = self.zc.loop.call_later( - delay, self._respond_query, None, addr, port, transport, v6_flow_scope - ) - - def _cancel_any_timers_for_addr(self, addr: str) -> None: - """Cancel any future truncated packet timers for the address.""" - if addr in self._timers: - self._timers.pop(addr).cancel() - - def _respond_query( - self, - msg: Optional[DNSIncoming], - addr: str, - port: int, - transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - ) -> None: - """Respond to a query and reassemble any truncated deferred packets.""" - self._cancel_any_timers_for_addr(addr) - packets = self._deferred.pop(addr, []) - if msg: - packets.append(msg) - - self.zc.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) - - def error_received(self, exc: Exception) -> None: - """Likely socket closed or IPv6.""" - # We preformat the message string with the socket as we want - # log_exception_once to log a warrning message once PER EACH - # different socket in case there are problems with multiple - # sockets - msg_str = f"Error with socket {self.sock_description}): %s" - QuietLogger.log_exception_once(exc, msg_str, exc) - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - wrapped_transport = _make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) - self.transport = wrapped_transport - self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" - - def connection_lost(self, exc: Optional[Exception]) -> None: - """Handle connection lost.""" diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py new file mode 100644 index 00000000..97bcf007 --- /dev/null +++ b/src/zeroconf/_listener.py @@ -0,0 +1,216 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import asyncio +import logging +import random +from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union, cast + +from ._logger import QuietLogger, log +from ._protocol.incoming import DNSIncoming +from ._transport import _WrappedTransport, make_wrapped_transport +from ._utils.time import current_time_millis, millis_to_seconds +from .const import _DUPLICATE_PACKET_SUPPRESSION_INTERVAL, _MAX_MSG_ABSOLUTE + +if TYPE_CHECKING: + from ._core import Zeroconf + +_TC_DELAY_RANDOM_INTERVAL = (400, 500) + + +class AsyncListener: + + """A Listener is used by this module to listen on the multicast + group to which DNS messages are sent, allowing the implementation + to cache information as it arrives. + + It requires registration with an Engine object in order to have + the read() method called when a socket is available for reading.""" + + __slots__ = ( + 'zc', + 'data', + 'last_time', + 'last_message', + 'transport', + 'sock_description', + '_deferred', + '_timers', + ) + + def __init__(self, zc: 'Zeroconf') -> None: + self.zc = zc + self.data: Optional[bytes] = None + self.last_time: float = 0 + self.last_message: Optional[DNSIncoming] = None + self.transport: Optional[_WrappedTransport] = None + self.sock_description: Optional[str] = None + self._deferred: Dict[str, List[DNSIncoming]] = {} + self._timers: Dict[str, asyncio.TimerHandle] = {} + super().__init__() + + def datagram_received( + self, data: bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] + ) -> None: + assert self.transport is not None + data_len = len(data) + debug = log.isEnabledFor(logging.DEBUG) + + if data_len > _MAX_MSG_ABSOLUTE: + # Guard against oversized packets to ensure bad implementations cannot overwhelm + # the system. + if debug: + log.debug( + "Discarding incoming packet with length %s, which is larger " + "than the absolute maximum size of %s", + data_len, + _MAX_MSG_ABSOLUTE, + ) + return + + now = current_time_millis() + if ( + self.data == data + and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time + and self.last_message is not None + and not self.last_message.has_qu_question() + ): + # Guard against duplicate packets + if debug: + log.debug( + 'Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]', + addrs, + self.sock_description, + data_len, + data, + ) + return + + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () + if len(addrs) == 2: + # https://github.com/python/mypy/issues/1178 + addr, port = addrs # type: ignore + scope = None + else: + # https://github.com/python/mypy/issues/1178 + addr, port, flow, scope = addrs # type: ignore + if debug: # pragma: no branch + log.debug('IPv6 scope_id %d associated to the receiving interface', scope) + v6_flow_scope = (flow, scope) + + msg = DNSIncoming(data, (addr, port), scope, now) + self.data = data + self.last_time = now + self.last_message = msg + if msg.valid: + if debug: + log.debug( + 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', + addr, + port, + self.sock_description, + msg, + data_len, + data, + ) + else: + if debug: + log.debug( + 'Received from %r:%r [socket %s]: (%d bytes) [%r]', + addr, + port, + self.sock_description, + data_len, + data, + ) + return + + if not msg.is_query(): + self.zc.handle_response(msg) + return + + self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) + + def handle_query_or_defer( + self, + msg: DNSIncoming, + addr: str, + port: int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Deal with incoming query packets. Provides a response if + possible.""" + if not msg.truncated: + self._respond_query(msg, addr, port, transport, v6_flow_scope) + return + + deferred = self._deferred.setdefault(addr, []) + # If we get the same packet we ignore it + for incoming in reversed(deferred): + if incoming.data == msg.data: + return + deferred.append(msg) + delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) + assert self.zc.loop is not None + self._cancel_any_timers_for_addr(addr) + self._timers[addr] = self.zc.loop.call_later( + delay, self._respond_query, None, addr, port, transport, v6_flow_scope + ) + + def _cancel_any_timers_for_addr(self, addr: str) -> None: + """Cancel any future truncated packet timers for the address.""" + if addr in self._timers: + self._timers.pop(addr).cancel() + + def _respond_query( + self, + msg: Optional[DNSIncoming], + addr: str, + port: int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Respond to a query and reassemble any truncated deferred packets.""" + self._cancel_any_timers_for_addr(addr) + packets = self._deferred.pop(addr, []) + if msg: + packets.append(msg) + + self.zc.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) + + def error_received(self, exc: Exception) -> None: + """Likely socket closed or IPv6.""" + # We preformat the message string with the socket as we want + # log_exception_once to log a warrning message once PER EACH + # different socket in case there are problems with multiple + # sockets + msg_str = f"Error with socket {self.sock_description}): %s" + QuietLogger.log_exception_once(exc, msg_str, exc) + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + wrapped_transport = make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) + self.transport = wrapped_transport + self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" + + def connection_lost(self, exc: Optional[Exception]) -> None: + """Handle connection lost.""" diff --git a/src/zeroconf/_transport.py b/src/zeroconf/_transport.py new file mode 100644 index 00000000..7f6d7ac8 --- /dev/null +++ b/src/zeroconf/_transport.py @@ -0,0 +1,67 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import asyncio +import socket +from typing import Any + + +class _WrappedTransport: + """A wrapper for transports.""" + + __slots__ = ( + 'transport', + 'is_ipv6', + 'sock', + 'fileno', + 'sock_name', + ) + + def __init__( + self, + transport: asyncio.DatagramTransport, + is_ipv6: bool, + sock: socket.socket, + fileno: int, + sock_name: Any, + ) -> None: + """Initialize the wrapped transport. + + These attributes are used when sending packets. + """ + self.transport = transport + self.is_ipv6 = is_ipv6 + self.sock = sock + self.fileno = fileno + self.sock_name = sock_name + + +def make_wrapped_transport(transport: asyncio.DatagramTransport) -> _WrappedTransport: + """Make a wrapped transport.""" + sock: socket.socket = transport.get_extra_info('socket') + return _WrappedTransport( + transport=transport, + is_ipv6=sock.family == socket.AF_INET6, + sock=sock, + fileno=sock.fileno(), + sock_name=sock.getsockname(), + ) diff --git a/tests/conftest.py b/tests/conftest.py index 34fdeb72..5cdff18e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,7 @@ import pytest -from zeroconf import _core, _engine, const +from zeroconf import _core, _listener, const @pytest.fixture(autouse=True) @@ -34,7 +34,7 @@ def disable_duplicate_packet_suppression(): Some tests run too slowly because of the duplicate packet suppression. """ - with patch.object(_engine, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0), patch.object( + with patch.object(_listener, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0), patch.object( const, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0 ): yield diff --git a/tests/test_engine.py b/tests/test_engine.py index 2c7e14be..dc6674dd 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -1,22 +1,18 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._core """ +""" Unit tests for zeroconf._engine """ import asyncio import itertools import logging -import unittest -import unittest.mock -from typing import Set, Tuple, Union -from unittest.mock import MagicMock, patch +from typing import Set +from unittest.mock import patch import pytest import zeroconf as r -from zeroconf import Zeroconf, _engine, const, current_time_millis -from zeroconf._protocol import outgoing -from zeroconf._protocol.incoming import DNSIncoming +from zeroconf import _engine, const from zeroconf.asyncio import AsyncZeroconf log = logging.getLogger('zeroconf') @@ -34,13 +30,6 @@ def teardown_module(): log.setLevel(original_logging_level) -def threadsafe_query(zc, protocol, *args): - async def make_query(): - protocol.handle_query_or_defer(*args) - - asyncio.run_coroutine_threadsafe(make_query(), zc.loop).result() - - # This test uses asyncio because it needs to access the cache directly # which is not threadsafe @pytest.mark.asyncio @@ -93,193 +82,3 @@ async def test_reaper_aborts_when_done(): await asyncio.sleep(1.2) assert zeroconf.cache.get(record_with_10s_ttl) is not None assert zeroconf.cache.get(record_with_1s_ttl) is not None - - -def test_guard_against_oversized_packets(): - """Ensure we do not process oversized packets. - - These packets can quickly overwhelm the system. - """ - zc = Zeroconf(interfaces=['127.0.0.1']) - - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - - for i in range(5000): - generated.add_answer_at_time( - r.DNSText( - "packet{i}.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ), - 0, - ) - - try: - # We are patching to generate an oversized packet - with patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), patch.object( - outgoing, "_MAX_MSG_TYPICAL", 100000 - ): - over_sized_packet = generated.packets()[0] - assert len(over_sized_packet) > const._MAX_MSG_ABSOLUTE - except AttributeError: - # cannot patch with cython - zc.close() - return - - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - okpacket_record = r.DNSText( - "okpacket.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - - generated.add_answer_at_time( - okpacket_record, - 0, - ) - ok_packet = generated.packets()[0] - - # We cannot test though the network interface as some operating systems - # will guard against the oversized packet and we won't see it. - listener = _engine.AsyncListener(zc) - listener.transport = unittest.mock.MagicMock() - - listener.datagram_received(ok_packet, ('127.0.0.1', const._MDNS_PORT)) - assert zc.cache.async_get_unique(okpacket_record) is not None - - listener.datagram_received(over_sized_packet, ('127.0.0.1', const._MDNS_PORT)) - assert ( - zc.cache.async_get_unique( - r.DNSText( - "packet0.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - ) - is None - ) - - logging.getLogger('zeroconf').setLevel(logging.INFO) - - listener.datagram_received(over_sized_packet, ('::1', const._MDNS_PORT, 1, 1)) - assert ( - zc.cache.async_get_unique( - r.DNSText( - "packet0.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - 500, - b'path=/~paulsm/', - ) - ) - is None - ) - - zc.close() - - -def test_guard_against_duplicate_packets(): - """Ensure we do not process duplicate packets. - These packets can quickly overwhelm the system. - """ - zc = Zeroconf(interfaces=['127.0.0.1']) - - class SubListener(_engine.AsyncListener): - def handle_query_or_defer( - self, - msg: DNSIncoming, - addr: str, - port: int, - transport: _engine._WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - ) -> None: - """Handle a query or defer it for later processing.""" - super().handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) - - listener = SubListener(zc) - listener.transport = MagicMock() - - query = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - query.add_question(question) - packet_with_qm_question = query.packets()[0] - - query3 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question3 = r.DNSQuestion("x._ay._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - query3.add_question(question3) - packet_with_qm_question2 = query3.packets()[0] - - query2 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) - question2 = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) - question2.unicast = True - query2.add_question(question2) - packet_with_qu_question = query2.packets()[0] - - addrs = ("1.2.3.4", 43) - - with patch.object(_engine, "current_time_millis") as _current_time_millis, patch.object( - listener, "handle_query_or_defer" - ) as _handle_query_or_defer: - start_time = current_time_millis() - - _current_time_millis.return_value = start_time - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - # Now walk time forward 1000 seconds - _current_time_millis.return_value = start_time + 1000 - # Now call with the same packet again and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question2, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the different packet with qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call again with the same packet that has a qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - log.setLevel(logging.WARNING) - - # Call with the QM packet again - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_called_once() - _handle_query_or_defer.reset_mock() - - # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - # Now call with garbage - listener.datagram_received(b'garbage', addrs) - _handle_query_or_defer.assert_not_called() - _handle_query_or_defer.reset_mock() - - zc.close() diff --git a/tests/test_listener.py b/tests/test_listener.py new file mode 100644 index 00000000..737b8111 --- /dev/null +++ b/tests/test_listener.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python + + +""" Unit tests for zeroconf._listener """ + +import logging +import unittest +import unittest.mock +from typing import Tuple, Union +from unittest.mock import MagicMock, patch + +import zeroconf as r +from zeroconf import Zeroconf, _engine, _listener, const, current_time_millis +from zeroconf._protocol import outgoing +from zeroconf._protocol.incoming import DNSIncoming + +log = logging.getLogger('zeroconf') +original_logging_level = logging.NOTSET + + +def setup_module(): + global original_logging_level + original_logging_level = log.level + log.setLevel(logging.DEBUG) + + +def teardown_module(): + if original_logging_level != logging.NOTSET: + log.setLevel(original_logging_level) + + +def test_guard_against_oversized_packets(): + """Ensure we do not process oversized packets. + + These packets can quickly overwhelm the system. + """ + zc = Zeroconf(interfaces=['127.0.0.1']) + + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + + for i in range(5000): + generated.add_answer_at_time( + r.DNSText( + "packet{i}.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ), + 0, + ) + + try: + # We are patching to generate an oversized packet + with patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), patch.object( + outgoing, "_MAX_MSG_TYPICAL", 100000 + ): + over_sized_packet = generated.packets()[0] + assert len(over_sized_packet) > const._MAX_MSG_ABSOLUTE + except AttributeError: + # cannot patch with cython + zc.close() + return + + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + okpacket_record = r.DNSText( + "okpacket.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + + generated.add_answer_at_time( + okpacket_record, + 0, + ) + ok_packet = generated.packets()[0] + + # We cannot test though the network interface as some operating systems + # will guard against the oversized packet and we won't see it. + listener = _listener.AsyncListener(zc) + listener.transport = unittest.mock.MagicMock() + + listener.datagram_received(ok_packet, ('127.0.0.1', const._MDNS_PORT)) + assert zc.cache.async_get_unique(okpacket_record) is not None + + listener.datagram_received(over_sized_packet, ('127.0.0.1', const._MDNS_PORT)) + assert ( + zc.cache.async_get_unique( + r.DNSText( + "packet0.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + ) + is None + ) + + logging.getLogger('zeroconf').setLevel(logging.INFO) + + listener.datagram_received(over_sized_packet, ('::1', const._MDNS_PORT, 1, 1)) + assert ( + zc.cache.async_get_unique( + r.DNSText( + "packet0.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + 500, + b'path=/~paulsm/', + ) + ) + is None + ) + + zc.close() + + +def test_guard_against_duplicate_packets(): + """Ensure we do not process duplicate packets. + These packets can quickly overwhelm the system. + """ + zc = Zeroconf(interfaces=['127.0.0.1']) + + class SubListener(_listener.AsyncListener): + def handle_query_or_defer( + self, + msg: DNSIncoming, + addr: str, + port: int, + transport: _engine._WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + ) -> None: + """Handle a query or defer it for later processing.""" + super().handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) + + listener = SubListener(zc) + listener.transport = MagicMock() + + query = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + query.add_question(question) + packet_with_qm_question = query.packets()[0] + + query3 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question3 = r.DNSQuestion("x._ay._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + query3.add_question(question3) + packet_with_qm_question2 = query3.packets()[0] + + query2 = r.DNSOutgoing(const._FLAGS_QR_QUERY, multicast=True) + question2 = r.DNSQuestion("x._http._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + question2.unicast = True + query2.add_question(question2) + packet_with_qu_question = query2.packets()[0] + + addrs = ("1.2.3.4", 43) + + with patch.object(_listener, "current_time_millis") as _current_time_millis, patch.object( + listener, "handle_query_or_defer" + ) as _handle_query_or_defer: + start_time = current_time_millis() + + _current_time_millis.return_value = start_time + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the same packet again and handle_query_or_defer should not fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + # Now walk time forward 1000 seconds + _current_time_millis.return_value = start_time + 1000 + # Now call with the same packet again and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question2, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet and handle_query_or_defer should fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the different packet with qu question and handle_query_or_defer should fire + listener.datagram_received(packet_with_qu_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call again with the same packet that has a qu question and handle_query_or_defer should fire + listener.datagram_received(packet_with_qu_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + log.setLevel(logging.WARNING) + + # Call with the QM packet again + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_called_once() + _handle_query_or_defer.reset_mock() + + # Now call with the same packet again and handle_query_or_defer should not fire + listener.datagram_received(packet_with_qm_question, addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + # Now call with garbage + listener.datagram_received(b'garbage', addrs) + _handle_query_or_defer.assert_not_called() + _handle_query_or_defer.reset_mock() + + zc.close() From 9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 12:02:42 -0500 Subject: [PATCH 031/434] feat: cythonize _listener.py to improve incoming message processing performance (#1220) --- build_ext.py | 1 + tests/test_core.py | 9 ++++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/build_ext.py b/build_ext.py index 0020f9fe..c0042df2 100644 --- a/build_ext.py +++ b/build_ext.py @@ -25,6 +25,7 @@ def build(setup_kwargs: Any) -> None: [ "src/zeroconf/_dns.py", "src/zeroconf/_cache.py", + "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", ], diff --git a/tests/test_core.py b/tests/test_core.py index 8f5322bd..303e28ef 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -15,6 +15,13 @@ from typing import cast from unittest.mock import patch +if sys.version_info[:3][1] < 8: + from unittest.mock import Mock + + AsyncMock = Mock +else: + from unittest.mock import AsyncMock + import pytest import zeroconf as r @@ -669,7 +676,7 @@ def _background_register(): @pytest.mark.asyncio @unittest.skipIf(sys.version_info[:3][1] < 8, 'Requires Python 3.8 or later to patch _async_setup') @patch("zeroconf._core._STARTUP_TIMEOUT", 0) -@patch("zeroconf._core.AsyncEngine._async_setup") +@patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): """Test we raise NotRunningException when waiting for startup that times out.""" aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) From 1901fb45b06ad2534e9455e50a44cd6608629ad9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 14 Aug 2023 17:11:08 +0000 Subject: [PATCH 032/434] 0.77.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ff7ead6e..c1ebcb0c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.77.0 (2023-08-14) + +### Feature + +* Cythonize _listener.py to improve incoming message processing performance ([#1220](https://github.com/python-zeroconf/python-zeroconf/issues/1220)) ([`9efde8c`](https://github.com/python-zeroconf/python-zeroconf/commit/9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9)) + ## v0.76.0 (2023-08-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 568f4623..f9d95b07 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.76.0" +version = "0.77.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 39a6b3fc..1c4807b1 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.76.0' +__version__ = '0.77.0' __license__ = 'LGPL' From f459856a0a61b8afa8a541926d7e15d51f8e4aea Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 12:24:47 -0500 Subject: [PATCH 033/434] feat: add cython pxd file for _listener.py to improve incoming message processing performance (#1221) --- src/zeroconf/_listener.pxd | 24 ++++++++++++++++++++++++ src/zeroconf/_listener.py | 9 +++++++-- 2 files changed, 31 insertions(+), 2 deletions(-) create mode 100644 src/zeroconf/_listener.pxd diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd new file mode 100644 index 00000000..0f32a44a --- /dev/null +++ b/src/zeroconf/_listener.pxd @@ -0,0 +1,24 @@ + +import cython + + +cdef object millis_to_seconds +cdef object log +cdef object logging_DEBUG + +from ._protocol.incoming cimport DNSIncoming + + +cdef class AsyncListener: + + cdef public object zc + cdef public cython.bytes data + cdef public cython.float last_time + cdef public DNSIncoming last_message + cdef public object transport + cdef public object sock_description + cdef public cython.dict _deferred + cdef public cython.dict _timers + + @cython.locals(now=cython.float, msg=DNSIncoming) + cpdef datagram_received(self, cython.bytes bytes, cython.tuple addrs) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 97bcf007..bc0af296 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -37,6 +37,11 @@ _TC_DELAY_RANDOM_INTERVAL = (400, 500) +_bytes = bytes + +logging_DEBUG = logging.DEBUG + + class AsyncListener: """A Listener is used by this module to listen on the multicast @@ -69,11 +74,11 @@ def __init__(self, zc: 'Zeroconf') -> None: super().__init__() def datagram_received( - self, data: bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] + self, data: _bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] ) -> None: assert self.transport is not None data_len = len(data) - debug = log.isEnabledFor(logging.DEBUG) + debug = log.isEnabledFor(logging_DEBUG) if data_len > _MAX_MSG_ABSOLUTE: # Guard against oversized packets to ensure bad implementations cannot overwhelm From 13d9aa5815b1b5a03000de2aaa62d106fe5e26a0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 12:27:11 -0500 Subject: [PATCH 034/434] chore: empty commit to re-run release (#1223) From 0e962201facea2f022bb21d292d17c700c4dbf92 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 14 Aug 2023 17:42:15 +0000 Subject: [PATCH 035/434] 0.78.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c1ebcb0c..3fbe7d4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.78.0 (2023-08-14) + +### Feature + +* Add cython pxd file for _listener.py to improve incoming message processing performance ([#1221](https://github.com/python-zeroconf/python-zeroconf/issues/1221)) ([`f459856`](https://github.com/python-zeroconf/python-zeroconf/commit/f459856a0a61b8afa8a541926d7e15d51f8e4aea)) + ## v0.77.0 (2023-08-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index f9d95b07..958a6e18 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.77.0" +version = "0.78.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 1c4807b1..f6393b28 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.77.0' +__version__ = '0.78.0' __license__ = 'LGPL' From ceb92cfe42d885dbb38cee7aaeebf685d97627a9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 14 Aug 2023 14:49:54 -0500 Subject: [PATCH 036/434] feat: refactor notify implementation to reduce overhead of adding and removing listeners (#1224) --- src/zeroconf/_core.py | 19 +++++++++--------- src/zeroconf/_services/info.py | 36 +++++++++++++--------------------- src/zeroconf/_utils/asyncio.py | 27 +++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 31 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 0f9b45df..173a29d0 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -25,7 +25,7 @@ import sys import threading from types import TracebackType -from typing import Awaitable, Dict, List, Optional, Tuple, Type, Union +from typing import Awaitable, Dict, List, Optional, Set, Tuple, Type, Union from ._cache import DNSCache from ._dns import DNSQuestion, DNSQuestionType @@ -49,11 +49,13 @@ from ._transport import _WrappedTransport from ._updates import RecordUpdateListener from ._utils.asyncio import ( + _resolve_all_futures_to_none, await_awaitable, get_running_loop, run_coro_with_timeout, shutdown_loop, wait_event_or_timeout, + wait_for_future_set_or_timeout, ) from ._utils.name import service_type_name from ._utils.net import ( @@ -188,7 +190,7 @@ def __init__( self.query_handler = QueryHandler(self.registry, self.cache, self.question_history) self.record_manager = RecordManager(self) - self.notify_event: Optional[asyncio.Event] = None + self._notify_futures: Set[asyncio.Future] = set() self.loop: Optional[asyncio.AbstractEventLoop] = None self._loop_thread: Optional[threading.Thread] = None @@ -206,7 +208,6 @@ def start(self) -> None: """Start Zeroconf.""" self.loop = get_running_loop() if self.loop: - self.notify_event = asyncio.Event() self.engine.setup(self.loop, None) return self._start_thread() @@ -218,7 +219,6 @@ def _start_thread(self) -> None: def _run_loop() -> None: self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) - self.notify_event = asyncio.Event() self.engine.setup(self.loop, loop_thread_ready) self.loop.run_forever() @@ -245,8 +245,9 @@ def listeners(self) -> List[RecordUpdateListener]: async def async_wait(self, timeout: float) -> None: """Calling task waits for a given number of milliseconds or until notified.""" - assert self.notify_event is not None - await wait_event_or_timeout(self.notify_event, timeout=millis_to_seconds(timeout)) + loop = self.loop + assert loop is not None + await wait_for_future_set_or_timeout(loop, self._notify_futures, timeout) def notify_all(self) -> None: """Notifies all waiting threads and notify listeners.""" @@ -255,9 +256,9 @@ def notify_all(self) -> None: def async_notify_all(self) -> None: """Schedule an async_notify_all.""" - assert self.notify_event is not None - self.notify_event.set() - self.notify_event.clear() + notify_futures = self._notify_futures + if notify_futures: + _resolve_all_futures_to_none(notify_futures) def get_service_info( self, type_: str, name: str, timeout: int = 3000, question_type: Optional[DNSQuestionType] = None diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 2f4ae59e..9b986404 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -39,10 +39,15 @@ from .._logger import log from .._protocol.outgoing import DNSOutgoing from .._updates import RecordUpdate, RecordUpdateListener -from .._utils.asyncio import get_running_loop, run_coro_with_timeout +from .._utils.asyncio import ( + _resolve_all_futures_to_none, + get_running_loop, + run_coro_with_timeout, + wait_for_future_set_or_timeout, +) from .._utils.name import service_type_name from .._utils.net import IPVersion, _encode_address -from .._utils.time import current_time_millis, millis_to_seconds +from .._utils.time import current_time_millis from ..const import ( _ADDRESS_RECORD_TYPES, _CLASS_IN, @@ -89,12 +94,6 @@ def instance_name_from_service_info(info: "ServiceInfo", strict: bool = True) -> _cached_ip_addresses = lru_cache(maxsize=256)(ip_address) -def _set_future_none_if_not_done(fut: asyncio.Future) -> None: - """Set a future to None if it is not done.""" - if not fut.done(): # pragma: no branch - fut.set_result(None) - - class ServiceInfo(RecordUpdateListener): """Service information. @@ -180,7 +179,7 @@ def __init__( self.host_ttl = host_ttl self.other_ttl = other_ttl self.interface_index = interface_index - self._new_records_futures: List[asyncio.Future] = [] + self._new_records_futures: Set[asyncio.Future] = set() @property def name(self) -> str: @@ -242,14 +241,9 @@ def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: async def async_wait(self, timeout: float) -> None: """Calling task waits for a given number of milliseconds or until notified.""" - loop = asyncio.get_running_loop() - future = loop.create_future() - self._new_records_futures.append(future) - handle = loop.call_later(millis_to_seconds(timeout), _set_future_none_if_not_done, future) - try: - await future - finally: - handle.cancel() + loop = get_running_loop() + assert loop is not None + await wait_for_future_set_or_timeout(loop, self._new_records_futures, timeout) def addresses_by_version(self, version: IPVersion) -> List[bytes]: """List addresses matching IP version. @@ -441,11 +435,9 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU This method will be run in the event loop. """ - if self._process_records_threadsafe(zc, now, records) and self._new_records_futures: - for future in self._new_records_futures: - if not future.done(): - future.set_result(None) - self._new_records_futures.clear() + new_records_futures = self._new_records_futures + if self._process_records_threadsafe(zc, now, records) and new_records_futures: + _resolve_all_futures_to_none(new_records_futures) def _process_records_threadsafe(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> bool: """Thread safe record updating. diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index 3a5beb5a..358ef37e 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -41,6 +41,33 @@ _WAIT_FOR_LOOP_TASKS_TIMEOUT = 3 # Must be larger than _TASK_AWAIT_TIMEOUT +def _set_future_none_if_not_done(fut: asyncio.Future) -> None: + """Set a future to None if it is not done.""" + if not fut.done(): # pragma: no branch + fut.set_result(None) + + +def _resolve_all_futures_to_none(futures: Set[asyncio.Future]) -> None: + """Resolve all futures to None.""" + for fut in futures: + _set_future_none_if_not_done(fut) + futures.clear() + + +async def wait_for_future_set_or_timeout( + loop: asyncio.AbstractEventLoop, future_set: Set[asyncio.Future], timeout: float +) -> None: + """Wait for a future or timeout (in milliseconds).""" + future = loop.create_future() + future_set.add(future) + handle = loop.call_later(millis_to_seconds(timeout), _set_future_none_if_not_done, future) + try: + await future + finally: + handle.cancel() + future_set.discard(future) + + async def wait_event_or_timeout(event: asyncio.Event, timeout: float) -> None: """Wait for an event or timeout.""" with contextlib.suppress(asyncio.TimeoutError): From 5406f30a32f8efc8de15da70f9e61be8bb893163 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 14 Aug 2023 20:04:43 +0000 Subject: [PATCH 037/434] 0.79.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3fbe7d4c..54c19900 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.79.0 (2023-08-14) + +### Feature + +* Refactor notify implementation to reduce overhead of adding and removing listeners ([#1224](https://github.com/python-zeroconf/python-zeroconf/issues/1224)) ([`ceb92cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb92cfe42d885dbb38cee7aaeebf685d97627a9)) + ## v0.78.0 (2023-08-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 958a6e18..1f985c98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.78.0" +version = "0.79.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index f6393b28..09dd2b2a 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.78.0' +__version__ = '0.79.0' __license__ = 'LGPL' From 1492e41b3d5cba5598cc9dd6bd2bc7d238f13555 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 15 Aug 2023 15:07:20 -0500 Subject: [PATCH 038/434] feat: optimize unpacking properties in ServiceInfo (#1225) --- src/zeroconf/_services/info.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 9b986404..f8be5b38 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -361,20 +361,11 @@ def _unpack_text_into_properties(self) -> None: strs.append(text[index : index + length]) index += length - key: bytes - value: Optional[bytes] for s in strs: - key_value = s.split(b'=', 1) - if len(key_value) == 2: - key, value = key_value - else: - # No equals sign at all - key = s - value = None - + key, _, value = s.partition(b'=') # Only update non-existent properties if key and key not in result: - result[key] = value + result[key] = value or None # Properties should be set atomically # in case another thread is reading them From 0c5e5cf363ae3a2dabd8da6e193c9e6726725b61 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 15 Aug 2023 20:18:24 +0000 Subject: [PATCH 039/434] 0.80.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 54c19900..b061cc26 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.80.0 (2023-08-15) + +### Feature + +* Optimize unpacking properties in ServiceInfo ([#1225](https://github.com/python-zeroconf/python-zeroconf/issues/1225)) ([`1492e41`](https://github.com/python-zeroconf/python-zeroconf/commit/1492e41b3d5cba5598cc9dd6bd2bc7d238f13555)) + ## v0.79.0 (2023-08-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 1f985c98..82d16c5f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.79.0" +version = "0.80.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 09dd2b2a..8cb7ec11 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.79.0' +__version__ = '0.80.0' __license__ = 'LGPL' From 7b00b261839bad6f57854a0f709a53165a8f7c2f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 09:46:10 -0500 Subject: [PATCH 040/434] chore: add missing typing to handler deque (#1228) --- src/zeroconf/_handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_handlers.py b/src/zeroconf/_handlers.py index be0d619f..bd33cc68 100644 --- a/src/zeroconf/_handlers.py +++ b/src/zeroconf/_handlers.py @@ -543,7 +543,7 @@ class MulticastOutgoingQueue: def __init__(self, zeroconf: 'Zeroconf', additional_delay: int, max_aggregation_delay: int) -> None: self.zc = zeroconf - self.queue: deque = deque() + self.queue: deque[AnswerGroup] = deque() # Additional delay is used to implement # Protect the network against excessive packet flooding # https://datatracker.ietf.org/doc/html/rfc6762#section-14 From a0e754c6a599e585f37943c158a589827ac58421 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 09:46:18 -0500 Subject: [PATCH 041/434] chore: remove default calls to .keys() (#1229) --- src/zeroconf/_handlers.py | 2 +- src/zeroconf/_services/registry.py | 2 +- tests/test_handlers.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/zeroconf/_handlers.py b/src/zeroconf/_handlers.py index bd33cc68..e1aba973 100644 --- a/src/zeroconf/_handlers.py +++ b/src/zeroconf/_handlers.py @@ -166,7 +166,7 @@ def add_qu_question_response(self, answers: _AnswerWithAdditionalsType) -> None: def add_ucast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: """Generate a response to a unicast query.""" self._additionals.update(answers) - self._ucast.update(answers.keys()) + self._ucast.update(answers) def add_mcast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: """Generate a response to a multicast query.""" diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index fd2ad5ce..64f13512 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -66,7 +66,7 @@ def async_get_info_name(self, name: str) -> Optional[ServiceInfo]: def async_get_types(self) -> List[str]: """Return all types.""" - return list(self.types.keys()) + return list(self.types) def async_get_infos_type(self, type_: str) -> List[ServiceInfo]: """Return all ServiceInfo matching type.""" diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 2aa5caa1..66ed811a 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -577,7 +577,7 @@ def test_qu_response(): def _validate_complete_response(answers): has_srv = has_txt = has_a = has_aaaa = has_nsec = False - nbr_answers = len(answers.keys()) + nbr_answers = len(answers) additionals = set().union(*answers.values()) nbr_additionals = len(additionals) From cd7b56b2aa0c8ee429da430e9a36abd515512011 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 09:46:26 -0500 Subject: [PATCH 042/434] feat: optimizing sending answers to questions (#1227) --- src/zeroconf/_handlers.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/zeroconf/_handlers.py b/src/zeroconf/_handlers.py index e1aba973..02f0d141 100644 --- a/src/zeroconf/_handlers.py +++ b/src/zeroconf/_handlers.py @@ -23,6 +23,7 @@ import itertools import random from collections import deque +from operator import attrgetter from typing import ( TYPE_CHECKING, Dict, @@ -71,6 +72,8 @@ _MULTICAST_DELAY_RANDOM_INTERVAL = (20, 120) _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} +NAME_GETTER = attrgetter('name') + class QuestionAnswers(NamedTuple): ucast: _AnswerWithAdditionalsType @@ -109,13 +112,13 @@ def construct_outgoing_unicast_answers( def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: # Find additionals and suppress any additionals that are already in answers - sending: Set[DNSRecord] = set(answers.keys()) + sending: Set[DNSRecord] = set(answers) # Answers are sorted to group names together to increase the chance # that similar names will end up in the same packet and can reduce the # overall size of the outgoing response via name compression - for answer, additionals in sorted(answers.items(), key=lambda kv: kv[0].name): + for answer in sorted(answers, key=NAME_GETTER): out.add_answer_at_time(answer, 0) - for additional in additionals: + for additional in answers[answer]: if additional not in sending: out.add_additional_answer(additional) sending.add(additional) From 47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 09:46:33 -0500 Subject: [PATCH 043/434] feat: speed up the service registry with a cython pxd (#1226) --- build_ext.py | 1 + src/zeroconf/_services/registry.pxd | 18 ++++++++++++++++++ src/zeroconf/_services/registry.py | 5 ++++- 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 src/zeroconf/_services/registry.pxd diff --git a/build_ext.py b/build_ext.py index c0042df2..38c8127a 100644 --- a/build_ext.py +++ b/build_ext.py @@ -28,6 +28,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", + "src/zeroconf/_services/registry.py", ], compiler_directives={"language_level": "3"}, # Python 3 ), diff --git a/src/zeroconf/_services/registry.pxd b/src/zeroconf/_services/registry.pxd new file mode 100644 index 00000000..722ef0ec --- /dev/null +++ b/src/zeroconf/_services/registry.pxd @@ -0,0 +1,18 @@ + +import cython + + +cdef class ServiceRegistry: + + cdef cython.dict _services + cdef public cython.dict types + cdef public cython.dict servers + + @cython.locals( + record_list=cython.list, + ) + cdef _async_get_by_index(self, cython.dict records, str key) + + cdef _add(self, object info) + + cdef _remove(self, cython.list infos) diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 64f13512..1f2f1d52 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -78,7 +78,10 @@ def async_get_infos_server(self, server: str) -> List[ServiceInfo]: def _async_get_by_index(self, records: Dict[str, List], key: str) -> List[ServiceInfo]: """Return all ServiceInfo matching the index.""" - return [self._services[name] for name in records.get(key, [])] + record_list = records.get(key) + if record_list is None: + return [] + return [self._services[name] for name in record_list] def _add(self, info: ServiceInfo) -> None: """Add a new service under the lock.""" From b492eb4204e433dec7b9f9a1c79525649ef33b5c Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 22 Aug 2023 14:58:49 +0000 Subject: [PATCH 044/434] 0.81.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b061cc26..2e4d245a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.81.0 (2023-08-22) + +### Feature + +* Speed up the service registry with a cython pxd ([#1226](https://github.com/python-zeroconf/python-zeroconf/issues/1226)) ([`47d3c7a`](https://github.com/python-zeroconf/python-zeroconf/commit/47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e)) +* Optimizing sending answers to questions ([#1227](https://github.com/python-zeroconf/python-zeroconf/issues/1227)) ([`cd7b56b`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7b56b2aa0c8ee429da430e9a36abd515512011)) + ## v0.80.0 (2023-08-15) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 82d16c5f..7ccfb43c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.80.0" +version = "0.81.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 8cb7ec11..316da1a2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.80.0' +__version__ = '0.81.0' __license__ = 'LGPL' From 3e89294ea0ecee1122e1c1ffdc78925add8ca40e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 13:23:55 -0500 Subject: [PATCH 045/434] feat: optimize processing of records in RecordUpdateListener subclasses (#1231) --- src/zeroconf/_services/browser.py | 70 ++++++------- src/zeroconf/_services/info.py | 22 +--- src/zeroconf/_updates.py | 2 +- tests/services/test_info.py | 165 ++++++++++++++++++------------ 4 files changed, 136 insertions(+), 123 deletions(-) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 84185f15..60c0439e 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -39,7 +39,7 @@ cast, ) -from .._dns import DNSPointer, DNSQuestion, DNSQuestionType, DNSRecord +from .._dns import DNSPointer, DNSQuestion, DNSQuestionType from .._logger import log from .._protocol.outgoing import DNSOutgoing from .._services import ( @@ -383,50 +383,46 @@ def _enqueue_callback( ): self._pending_handlers[key] = state_change - def _async_process_record_update( - self, now: float, record: DNSRecord, old_record: Optional[DNSRecord] - ) -> None: - """Process a single record update from a batch of updates.""" - record_type = record.type - - if record_type is _TYPE_PTR: - if TYPE_CHECKING: - record = cast(DNSPointer, record) - for type_ in self.types.intersection(cached_possible_types(record.name)): - if old_record is None: - self._enqueue_callback(ServiceStateChange.Added, type_, record.alias) - elif record.is_expired(now): - self._enqueue_callback(ServiceStateChange.Removed, type_, record.alias) - else: - self.reschedule_type(type_, now, record.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT)) - return - - # If its expired or already exists in the cache it cannot be updated. - if old_record or record.is_expired(now): - return - - if record_type in _ADDRESS_RECORD_TYPES: - # Iterate through the DNSCache and callback any services that use this address - for type_, name in self._names_matching_types( - {service.name for service in self.zc.cache.async_entries_with_server(record.name)} - ): - self._enqueue_callback(ServiceStateChange.Updated, type_, name) - return - - for type_, name in self._names_matching_types((record.name,)): - self._enqueue_callback(ServiceStateChange.Updated, type_, name) - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> None: """Callback invoked by Zeroconf when new information arrives. Updates information required by browser in the Zeroconf cache. - Ensures that there is are no unecessary duplicates in the list. + Ensures that there is are no unnecessary duplicates in the list. This method will be run in the event loop. """ - for record in records: - self._async_process_record_update(now, record[0], record[1]) + for record_update in records: + record, old_record = record_update + record_type = record.type + + if record_type is _TYPE_PTR: + if TYPE_CHECKING: + record = cast(DNSPointer, record) + for type_ in self.types.intersection(cached_possible_types(record.name)): + if old_record is None: + self._enqueue_callback(ServiceStateChange.Added, type_, record.alias) + elif record.is_expired(now): + self._enqueue_callback(ServiceStateChange.Removed, type_, record.alias) + else: + expire_time = record.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) + self.reschedule_type(type_, now, expire_time) + continue + + # If its expired or already exists in the cache it cannot be updated. + if old_record or record.is_expired(now): + continue + + if record_type in _ADDRESS_RECORD_TYPES: + # Iterate through the DNSCache and callback any services that use this address + for type_, name in self._names_matching_types( + {service.name for service in self.zc.cache.async_entries_with_server(record.name)} + ): + self._enqueue_callback(ServiceStateChange.Updated, type_, name) + continue + + for type_, name in self._names_matching_types((record.name,)): + self._enqueue_callback(ServiceStateChange.Updated, type_, name) @abstractmethod def async_update_records_complete(self) -> None: diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index f8be5b38..705f3723 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -410,35 +410,17 @@ def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: else: self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) - def update_record(self, zc: 'Zeroconf', now: float, record: Optional[DNSRecord]) -> None: - """Updates service information from a DNS record. - - This method is deprecated and will be removed in a future version. - update_records should be implemented instead. - - This method will be run in the event loop. - """ - if record is not None: - self._process_record_threadsafe(zc, record, now) - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> None: """Updates service information from a DNS record. This method will be run in the event loop. """ new_records_futures = self._new_records_futures - if self._process_records_threadsafe(zc, now, records) and new_records_futures: - _resolve_all_futures_to_none(new_records_futures) - - def _process_records_threadsafe(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> bool: - """Thread safe record updating. - - Returns True if new records were added. - """ updated: bool = False for record_update in records: updated |= self._process_record_threadsafe(zc, record_update.new, now) - return updated + if updated and new_records_futures: + _resolve_all_futures_to_none(new_records_futures) def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: float) -> bool: """Thread safe record updating. diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index 1a1e028d..b760daf9 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -56,7 +56,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU All records that are received in a single packet are passed to update_records. - This implementation is a compatiblity shim to ensure older code + This implementation is a compatibility shim to ensure older code that uses RecordUpdateListener as a base class will continue to get calls to update_record. This method will raise NotImplementedError in a future version. diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 64a51bd1..1fc3bd01 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -17,7 +17,7 @@ import pytest import zeroconf as r -from zeroconf import DNSAddress, const +from zeroconf import DNSAddress, RecordUpdate, const from zeroconf._services import info from zeroconf._services.info import ServiceInfo from zeroconf._utils.net import IPVersion @@ -68,89 +68,119 @@ def test_service_info_rejects_non_matching_updates(self): service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] ) # Verify backwards compatiblity with calling with None - info.update_record(zc, now, None) + info.async_update_records(zc, now, []) # Matching updates - info.update_record( + info.async_update_records( zc, now, - r.DNSText( - service_name, - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', - ), + [ + RecordUpdate( + r.DNSText( + service_name, + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + ), + None, + ) + ], ) assert info.properties[b"ci"] == b"2" - info.update_record( + info.async_update_records( zc, now, - r.DNSService( - service_name, - const._TYPE_SRV, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - 0, - 0, - 80, - 'ASH-2.local.', - ), + [ + RecordUpdate( + r.DNSService( + service_name, + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + 0, + 0, + 80, + 'ASH-2.local.', + ), + None, + ) + ], ) assert info.server_key == 'ash-2.local.' assert info.server == 'ASH-2.local.' new_address = socket.inet_aton("10.0.1.3") - info.update_record( + info.async_update_records( zc, now, - r.DNSAddress( - 'ASH-2.local.', - const._TYPE_A, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - new_address, - ), + [ + RecordUpdate( + r.DNSAddress( + 'ASH-2.local.', + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + new_address, + ), + None, + ) + ], ) assert new_address in info.addresses # Non-matching updates - info.update_record( + info.async_update_records( zc, now, - r.DNSText( - "incorrect.name.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - b'\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==', - ), + [ + RecordUpdate( + r.DNSText( + "incorrect.name.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + b'\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==', + ), + None, + ) + ], ) assert info.properties[b"ci"] == b"2" - info.update_record( + info.async_update_records( zc, now, - r.DNSService( - "incorrect.name.", - const._TYPE_SRV, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - 0, - 0, - 80, - 'ASH-2.local.', - ), + [ + RecordUpdate( + r.DNSService( + "incorrect.name.", + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + 0, + 0, + 80, + 'ASH-2.local.', + ), + None, + ) + ], ) assert info.server_key == 'ash-2.local.' assert info.server == 'ASH-2.local.' new_address = socket.inet_aton("10.0.1.4") - info.update_record( + info.async_update_records( zc, now, - r.DNSAddress( - "incorrect.name.", - const._TYPE_A, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - new_address, - ), + [ + RecordUpdate( + r.DNSAddress( + "incorrect.name.", + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + new_address, + ), + None, + ) + ], ) assert new_address not in info.addresses zc.close() @@ -169,16 +199,21 @@ def test_service_info_rejects_expired_records(self): service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] ) # Matching updates - info.update_record( + info.async_update_records( zc, now, - r.DNSText( - service_name, - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - ttl, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', - ), + [ + RecordUpdate( + r.DNSText( + service_name, + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + ), + None, + ) + ], ) assert info.properties[b"ci"] == b"2" # Expired record @@ -190,7 +225,7 @@ def test_service_info_rejects_expired_records(self): b'\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==', ) expired_record.set_created_ttl(1000, 1) - info.update_record(zc, now, expired_record) + info.async_update_records(zc, now, [RecordUpdate(expired_record, None)]) assert info.properties[b"ci"] == b"2" zc.close() From 8644173c3ba576e95dc90879f4d94da59d464702 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 22 Aug 2023 18:35:47 +0000 Subject: [PATCH 046/434] 0.82.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e4d245a..43632cba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.82.0 (2023-08-22) + +### Feature + +* Optimize processing of records in RecordUpdateListener subclasses ([#1231](https://github.com/python-zeroconf/python-zeroconf/issues/1231)) ([`3e89294`](https://github.com/python-zeroconf/python-zeroconf/commit/3e89294ea0ecee1122e1c1ffdc78925add8ca40e)) + ## v0.81.0 (2023-08-22) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 7ccfb43c..873dff4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.81.0" +version = "0.82.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 316da1a2..2928f1d1 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.81.0' +__version__ = '0.82.0' __license__ = 'LGPL' From 30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 22 Aug 2023 14:06:49 -0500 Subject: [PATCH 047/434] fix: build failures with older cython 0.29 series (#1232) --- src/zeroconf/_services/registry.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 1f2f1d52..12051275 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -25,6 +25,8 @@ from .._exceptions import ServiceNameAlreadyRegistered from .info import ServiceInfo +_str = str + class ServiceRegistry: """A registry to keep track of services. @@ -76,7 +78,7 @@ def async_get_infos_server(self, server: str) -> List[ServiceInfo]: """Return all ServiceInfo matching server.""" return self._async_get_by_index(self.servers, server) - def _async_get_by_index(self, records: Dict[str, List], key: str) -> List[ServiceInfo]: + def _async_get_by_index(self, records: Dict[str, List], key: _str) -> List[ServiceInfo]: """Return all ServiceInfo matching the index.""" record_list = records.get(key) if record_list is None: From 84054cea08c3947381e869d89e2b1a073f47eb79 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 22 Aug 2023 19:15:40 +0000 Subject: [PATCH 048/434] 0.82.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 43632cba..99e769b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.82.1 (2023-08-22) + +### Fix + +* Build failures with older cython 0.29 series ([#1232](https://github.com/python-zeroconf/python-zeroconf/issues/1232)) ([`30c3ad9`](https://github.com/python-zeroconf/python-zeroconf/commit/30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced)) + ## v0.82.0 (2023-08-22) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 873dff4e..bdae72d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.82.0" +version = "0.82.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 2928f1d1..07442718 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.82.0' +__version__ = '0.82.1' __license__ = 'LGPL' From 703ecb2901b2150fb72fac3deed61d7302561298 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 26 Aug 2023 08:59:17 -0500 Subject: [PATCH 049/434] feat: speed up question and answer history with a cython pxd (#1234) --- build_ext.py | 1 + src/zeroconf/_history.pxd | 16 ++++++++++++++++ src/zeroconf/_history.py | 27 ++++++++++++++++++--------- tests/__init__.py | 12 +++++++++--- tests/services/test_browser.py | 14 +++++++++----- tests/test_asyncio.py | 11 +++++------ tests/test_handlers.py | 4 ++-- tests/test_listener.py | 3 +++ 8 files changed, 63 insertions(+), 25 deletions(-) create mode 100644 src/zeroconf/_history.pxd diff --git a/build_ext.py b/build_ext.py index 38c8127a..f2c59288 100644 --- a/build_ext.py +++ b/build_ext.py @@ -25,6 +25,7 @@ def build(setup_kwargs: Any) -> None: [ "src/zeroconf/_dns.py", "src/zeroconf/_cache.py", + "src/zeroconf/_history.py", "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", diff --git a/src/zeroconf/_history.pxd b/src/zeroconf/_history.pxd new file mode 100644 index 00000000..6e4e374f --- /dev/null +++ b/src/zeroconf/_history.pxd @@ -0,0 +1,16 @@ +import cython + + +cdef cython.double _DUPLICATE_QUESTION_INTERVAL + +cdef class QuestionHistory: + + cdef cython.dict _history + + + @cython.locals(than=cython.double, previous_question=cython.tuple, previous_known_answers=cython.set) + cpdef suppresses(self, object question, cython.double now, cython.set known_answers) + + + @cython.locals(than=cython.double, now_known_answers=cython.tuple) + cpdef async_expire(self, cython.double now) diff --git a/src/zeroconf/_history.py b/src/zeroconf/_history.py index cbb36144..db6a394d 100644 --- a/src/zeroconf/_history.py +++ b/src/zeroconf/_history.py @@ -20,7 +20,7 @@ USA """ -from typing import Dict, Set, Tuple +from typing import Dict, List, Set, Tuple from ._dns import DNSQuestion, DNSRecord from .const import _DUPLICATE_QUESTION_INTERVAL @@ -28,16 +28,21 @@ # The QuestionHistory is used to implement Duplicate Question Suppression # https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 +_float = float + class QuestionHistory: + """Remember questions and known answers.""" + def __init__(self) -> None: + """Init a new QuestionHistory.""" self._history: Dict[DNSQuestion, Tuple[float, Set[DNSRecord]]] = {} - def add_question_at_time(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> None: + def add_question_at_time(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> None: """Remember a question with known answers.""" self._history[question] = (now, known_answers) - def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: + def suppresses(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> bool: """Check to see if a question should be suppressed. https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 @@ -59,12 +64,16 @@ def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRe return False return True - def async_expire(self, now: float) -> None: + def async_expire(self, now: _float) -> None: """Expire the history of old questions.""" - removes = [ - question - for question, now_known_answers in self._history.items() - if now - now_known_answers[0] > _DUPLICATE_QUESTION_INTERVAL - ] + removes: List[DNSQuestion] = [] + for question, now_known_answers in self._history.items(): + than, _ = now_known_answers + if now - than > _DUPLICATE_QUESTION_INTERVAL: + removes.append(question) for question in removes: del self._history[question] + + def clear(self) -> None: + """Clear the history.""" + self._history.clear() diff --git a/tests/__init__.py b/tests/__init__.py index 8f216c99..959cc3f3 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -23,11 +23,17 @@ import asyncio import socket from functools import lru_cache -from typing import List +from typing import List, Set import ifaddr -from zeroconf import DNSIncoming, Zeroconf +from zeroconf import DNSIncoming, DNSQuestion, DNSRecord, Zeroconf +from zeroconf._history import QuestionHistory + + +class QuestionHistoryWithoutSuppression(QuestionHistory): + def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: + return False def _inject_responses(zc: Zeroconf, msgs: List[DNSIncoming]) -> None: @@ -77,4 +83,4 @@ def has_working_ipv6(): def _clear_cache(zc): zc.cache.cache.clear() - zc.question_history._history.clear() + zc.question_history.clear() diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index d13701ec..215fcc0c 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -31,7 +31,12 @@ from zeroconf._services.info import ServiceInfo from zeroconf.asyncio import AsyncZeroconf -from .. import _inject_response, _wait_for_start, has_working_ipv6 +from .. import ( + QuestionHistoryWithoutSuppression, + _inject_response, + _wait_for_start, + has_working_ipv6, +) log = logging.getLogger('zeroconf') original_logging_level = logging.NOTSET @@ -444,6 +449,7 @@ def test_backoff(): type_ = "_http._tcp.local." zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) _wait_for_start(zeroconf_browser) + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() # we are going to patch the zeroconf send to check query transmission old_send = zeroconf_browser.async_send @@ -465,10 +471,8 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): # patch the zeroconf current_time_millis # patch the backoff limit to prevent test running forever with patch.object(zeroconf_browser, "async_send", send), patch.object( - zeroconf_browser.question_history, "suppresses", return_value=False - ), patch.object(_services_browser, "current_time_millis", current_time_millis), patch.object( - _services_browser, "_BROWSER_BACKOFF_LIMIT", 10 - ), patch.object( + _services_browser, "current_time_millis", current_time_millis + ), patch.object(_services_browser, "_BROWSER_BACKOFF_LIMIT", 10), patch.object( _services_browser, "_FIRST_QUERY_DELAY_RANDOM_INTERVAL", (0, 0) ): # dummy service callback diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index cd067ae1..88ea9fce 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -43,7 +43,7 @@ ) from zeroconf.const import _LISTENER_TIME -from . import _clear_cache, has_working_ipv6 +from . import QuestionHistoryWithoutSuppression, _clear_cache, has_working_ipv6 log = logging.getLogger('zeroconf') original_logging_level = logging.NOTSET @@ -951,6 +951,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) zeroconf_browser = aiozc.zeroconf + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() # we are going to patch the zeroconf send to check packet sizes @@ -990,11 +991,9 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): # patch the backoff limit to ensure we always get one query every 1/4 of the DNS TTL # Disable duplicate question suppression and duplicate packet suppression for this test as it works # by asking the same question over and over - with patch.object(zeroconf_browser.question_history, "suppresses", return_value=False), patch.object( - zeroconf_browser, "async_send", send - ), patch("zeroconf._services.browser.current_time_millis", _new_current_time_millis), patch.object( - _services_browser, "_BROWSER_BACKOFF_LIMIT", int(expected_ttl / 4) - ): + with patch.object(zeroconf_browser, "async_send", send), patch( + "zeroconf._services.browser.current_time_millis", _new_current_time_millis + ), patch.object(_services_browser, "_BROWSER_BACKOFF_LIMIT", int(expected_ttl / 4)): service_added = asyncio.Event() service_removed = asyncio.Event() diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 66ed811a..607f6819 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1131,7 +1131,7 @@ async def test_cache_flush_bit(): for record in new_records: assert zc.cache.async_get_unique(record) is not None - original_a_record.created = current_time_millis() - 1001 + original_a_record.created = current_time_millis() - 1500 # Do the run within 1s to verify the original record is not going to be expired out = r.DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA, multicast=True) @@ -1146,7 +1146,7 @@ async def test_cache_flush_bit(): cached_records = [zc.cache.async_get_unique(record) for record in new_records] for cached_record in cached_records: assert cached_record is not None - cached_record.created = current_time_millis() - 1001 + cached_record.created = current_time_millis() - 1500 fresh_address = socket.inet_aton("4.4.4.4") info.addresses = [fresh_address] diff --git a/tests/test_listener.py b/tests/test_listener.py index 737b8111..914b4a13 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -14,6 +14,8 @@ from zeroconf._protocol import outgoing from zeroconf._protocol.incoming import DNSIncoming +from . import QuestionHistoryWithoutSuppression + log = logging.getLogger('zeroconf') original_logging_level = logging.NOTSET @@ -123,6 +125,7 @@ def test_guard_against_duplicate_packets(): These packets can quickly overwhelm the system. """ zc = Zeroconf(interfaces=['127.0.0.1']) + zc.question_history = QuestionHistoryWithoutSuppression() class SubListener(_listener.AsyncListener): def handle_query_or_defer( From bfb3fe2bc36262fe2922028d9ce44c6d2f76f829 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 26 Aug 2023 14:22:49 +0000 Subject: [PATCH 050/434] 0.83.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 99e769b7..1e71654c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.83.0 (2023-08-26) + +### Feature + +* Speed up question and answer history with a cython pxd ([#1234](https://github.com/python-zeroconf/python-zeroconf/issues/1234)) ([`703ecb2`](https://github.com/python-zeroconf/python-zeroconf/commit/703ecb2901b2150fb72fac3deed61d7302561298)) + ## v0.82.1 (2023-08-22) ### Fix diff --git a/pyproject.toml b/pyproject.toml index bdae72d6..643f9953 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.82.1" +version = "0.83.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 07442718..66fe5cd8 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.82.1' +__version__ = '0.83.0' __license__ = 'LGPL' From dd637fb2e5a87ba283750e69d116e124bef54e7c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 27 Aug 2023 08:29:12 -0500 Subject: [PATCH 051/434] fix: rebuild wheels with cython 3.0.2 (#1236) From 041549c7f55503259e30b2f4725bee3ef2c6921e Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 27 Aug 2023 13:37:59 +0000 Subject: [PATCH 052/434] 0.83.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e71654c..76dda68f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.83.1 (2023-08-27) + +### Fix + +* Rebuild wheels with cython 3.0.2 ([#1236](https://github.com/python-zeroconf/python-zeroconf/issues/1236)) ([`dd637fb`](https://github.com/python-zeroconf/python-zeroconf/commit/dd637fb2e5a87ba283750e69d116e124bef54e7c)) + ## v0.83.0 (2023-08-26) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 643f9953..0681f64b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.83.0" +version = "0.83.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 66fe5cd8..657c150a 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.83.0' +__version__ = '0.83.1' __license__ = 'LGPL' From bd8d8467dec2a39a0b525043ea1051259100fded Mon Sep 17 00:00:00 2001 From: Tenebrosus3141 <105437363+Tenebrosus3141@users.noreply.github.com> Date: Sun, 27 Aug 2023 10:26:34 -0400 Subject: [PATCH 053/434] feat: context managers in ServiceBrowser and AsyncServiceBrowser (#1233) Co-authored-by: J. Nick Koston --- src/zeroconf/_services/browser.py | 14 ++++++++++++++ src/zeroconf/asyncio.py | 12 ++++++++++++ tests/services/test_browser.py | 32 ++++++++++++++++++++++++++++++- tests/test_asyncio.py | 27 ++++++++++++++++++++++++++ 4 files changed, 84 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 60c0439e..17307c99 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -26,6 +26,7 @@ import threading import warnings from abc import abstractmethod +from types import TracebackType # noqa # used in type hints from typing import ( TYPE_CHECKING, Callable, @@ -35,6 +36,7 @@ Optional, Set, Tuple, + Type, Union, cast, ) @@ -576,3 +578,15 @@ def async_update_records_complete(self) -> None: for pending in self._pending_handlers.items(): self.queue.put(pending) self._pending_handlers.clear() + + def __enter__(self) -> 'ServiceBrowser': + return self + + def __exit__( # pylint: disable=useless-return + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + self.cancel() + return None diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 755757d7..5aaee35f 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -93,6 +93,18 @@ def async_update_records_complete(self) -> None: self._fire_service_state_changed_event(pending) self._pending_handlers.clear() + async def __aenter__(self) -> 'AsyncServiceBrowser': + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + await self.async_cancel() + return None + class AsyncZeroconfServiceTypes(ZeroconfServiceTypes): """An async version of ZeroconfServiceTypes.""" diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 215fcc0c..d49295fa 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -3,13 +3,14 @@ """ Unit tests for zeroconf._services.browser. """ +import asyncio import logging import os import socket import time import unittest from threading import Event -from typing import Iterable, Set +from typing import Iterable, Set, cast from unittest.mock import patch import pytest @@ -75,6 +76,35 @@ class MyServiceListener(r.ServiceListener): zc.close() +def test_service_browser_cancel_context_manager(): + """Test we can cancel a ServiceBrowser with it being used as a context manager.""" + + # instantiate a zeroconf instance + zc = Zeroconf(interfaces=['127.0.0.1']) + # start a browser + type_ = "_hap._tcp.local." + + class MyServiceListener(r.ServiceListener): + pass + + listener = MyServiceListener() + + browser = r.ServiceBrowser(zc, type_, None, listener) + + assert cast(bool, browser.done) is False + + with browser: + pass + + # ensure call_soon_threadsafe in ServiceBrowser.cancel is run + assert zc.loop is not None + asyncio.run_coroutine_threadsafe(asyncio.sleep(0), zc.loop).result() + + assert cast(bool, browser.done) is True + + zc.close() + + def test_service_browser_cancel_multiple_times_after_close(): """Test we can cancel a ServiceBrowser multiple times after close.""" diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 88ea9fce..395a16ea 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -9,6 +9,7 @@ import socket import threading import time +from typing import cast from unittest.mock import ANY, call, patch import pytest @@ -779,6 +780,32 @@ async def test_async_context_manager() -> None: assert aiosinfo is not None +@pytest.mark.asyncio +async def test_service_browser_cancel_async_context_manager(): + """Test we can cancel an AsyncServiceBrowser with it being used as an async context manager.""" + + # instantiate a zeroconf instance + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zc = aiozc.zeroconf + type_ = "_hap._tcp.local." + + class MyServiceListener(ServiceListener): + pass + + listener = MyServiceListener() + + browser = AsyncServiceBrowser(zc, type_, None, listener) + + assert cast(bool, browser.done) is False + + async with browser: + pass + + assert cast(bool, browser.done) is True + + await aiozc.async_close() + + @pytest.mark.asyncio async def test_async_unregister_all_services() -> None: """Test unregistering all services.""" From a78ea54fe6ccf8e4941facc85168496f66922533 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 27 Aug 2023 14:35:45 +0000 Subject: [PATCH 054/434] 0.84.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 76dda68f..065fb6b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.84.0 (2023-08-27) + +### Feature + +* Context managers in ServiceBrowser and AsyncServiceBrowser ([#1233](https://github.com/python-zeroconf/python-zeroconf/issues/1233)) ([`bd8d846`](https://github.com/python-zeroconf/python-zeroconf/commit/bd8d8467dec2a39a0b525043ea1051259100fded)) + ## v0.83.1 (2023-08-27) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 0681f64b..4e50bc1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.83.1" +version = "0.84.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 657c150a..1233dc41 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.83.1' +__version__ = '0.84.0' __license__ = 'LGPL' From 68d99985a0e9d2c72ff670b2e2af92271a6fe934 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 27 Aug 2023 14:22:57 -0500 Subject: [PATCH 055/434] feat: simplify code to unpack properties (#1237) --- src/zeroconf/_services/info.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 705f3723..440a4b9d 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -345,31 +345,27 @@ def _set_text(self, text: bytes) -> None: def _unpack_text_into_properties(self) -> None: """Unpacks the text field into properties""" text = self.text - end = len(text) - if end == 0: + if not text: # Properties should be set atomically # in case another thread is reading them self._properties = {} return - result: Dict[Union[str, bytes], Optional[Union[str, bytes]]] = {} index = 0 - strs: List[bytes] = [] + pairs: List[bytes] = [] + end = len(text) while index < end: length = text[index] index += 1 - strs.append(text[index : index + length]) + pairs.append(text[index : index + length]) index += length - for s in strs: - key, _, value = s.partition(b'=') - # Only update non-existent properties - if key and key not in result: - result[key] = value or None - - # Properties should be set atomically - # in case another thread is reading them - self._properties = result + # Reverse the list so that the first item in the list + # is the last item in the text field. This is important + # to preserve backwards compatibility where the first + # key always wins if the key is seen multiple times. + pairs.reverse() + self._properties = {key: value or None for key, _, value in (pair.partition(b'=') for pair in pairs)} def get_name(self) -> str: """Name accessor""" From 55f719dbf9288c5b809e78560e468e1cf686cb11 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 27 Aug 2023 19:31:12 +0000 Subject: [PATCH 056/434] 0.85.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 065fb6b8..9395aea6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.85.0 (2023-08-27) + +### Feature + +* Simplify code to unpack properties ([#1237](https://github.com/python-zeroconf/python-zeroconf/issues/1237)) ([`68d9998`](https://github.com/python-zeroconf/python-zeroconf/commit/68d99985a0e9d2c72ff670b2e2af92271a6fe934)) + ## v0.84.0 (2023-08-27) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 4e50bc1e..b2f87330 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.84.0" +version = "0.85.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 1233dc41..6b2bd87e 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.84.0' +__version__ = '0.85.0' __license__ = 'LGPL' From cc8feb110fefc3fb714fd482a52f16e2b620e8c4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 27 Aug 2023 21:14:12 -0500 Subject: [PATCH 057/434] feat: use server_key when processing DNSService records (#1238) --- src/zeroconf/_core.py | 4 ++-- src/zeroconf/_services/info.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 173a29d0..67212286 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -440,8 +440,8 @@ async def async_unregister_service(self, info: ServiceInfo) -> Awaitable: # If another server uses the same addresses, we do not want to send # goodbye packets for the address records - assert info.server is not None - entries = self.registry.async_get_infos_server(info.server.lower()) + assert info.server_key is not None + entries = self.registry.async_get_infos_server(info.server_key) broadcast_addresses = not bool(entries) return asyncio.ensure_future( self._async_broadcast_service(info, _UNREGISTER_TIME, 0, broadcast_addresses) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 440a4b9d..19e4ce29 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -472,7 +472,7 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo old_server_key = self.server_key self.name = record.name self.server = record.server - self.server_key = record.server.lower() + self.server_key = record.server_key self.port = record.port self.weight = record.weight self.priority = record.priority @@ -586,7 +586,7 @@ def set_server_if_missing(self) -> None: """ if self.server is None: self.server = self._name - self.server_key = self.server.lower() + self.server_key = self.key def load_from_cache(self, zc: 'Zeroconf', now: Optional[float] = None) -> bool: """Populate the service info from the cache. From 58bc154f55b06b4ddfc4a141592488abe76f062a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 27 Aug 2023 21:14:23 -0500 Subject: [PATCH 058/434] feat: build wheels for cpython 3.12 (#1239) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 677dfad1..ff79b70b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,7 +41,7 @@ jobs: - "3.9" - "3.10" - "3.11" - - "3.12.0-beta.4" + - "3.12.0-rc.1" - "pypy-3.7" os: - ubuntu-latest @@ -145,7 +145,7 @@ jobs: fetch-depth: 0 - name: Build wheels - uses: pypa/cibuildwheel@v2.11.3 + uses: pypa/cibuildwheel@v2.15.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* From b88c8dd51784c93ba928f522b14ec53ec5c57f1c Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 28 Aug 2023 02:23:45 +0000 Subject: [PATCH 059/434] 0.86.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9395aea6..87aebdc3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.86.0 (2023-08-28) + +### Feature + +* Build wheels for cpython 3.12 ([#1239](https://github.com/python-zeroconf/python-zeroconf/issues/1239)) ([`58bc154`](https://github.com/python-zeroconf/python-zeroconf/commit/58bc154f55b06b4ddfc4a141592488abe76f062a)) +* Use server_key when processing DNSService records ([#1238](https://github.com/python-zeroconf/python-zeroconf/issues/1238)) ([`cc8feb1`](https://github.com/python-zeroconf/python-zeroconf/commit/cc8feb110fefc3fb714fd482a52f16e2b620e8c4)) + ## v0.85.0 (2023-08-27) ### Feature diff --git a/pyproject.toml b/pyproject.toml index b2f87330..f3c7e7b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.85.0" +version = "0.86.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 6b2bd87e..cbd3dce5 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.85.0' +__version__ = '0.86.0' __license__ = 'LGPL' From 9da99d706d1c8b97e19856e8d83784c6cf8211d7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 28 Aug 2023 16:49:25 -0500 Subject: [PATCH 060/434] chore: split up handlers into seperate modules (#1240) --- src/zeroconf/_core.py | 8 +- src/zeroconf/_handlers.py | 605 ------------------ src/zeroconf/_handlers/__init__.py | 21 + src/zeroconf/_handlers/answers.py | 84 +++ .../_handlers/multicast_outgoing_queue.py | 102 +++ src/zeroconf/_handlers/query_handler.py | 286 +++++++++ src/zeroconf/_handlers/record_manager.py | 211 ++++++ tests/__init__.py | 2 +- tests/services/test_browser.py | 4 +- tests/test_handlers.py | 21 +- 10 files changed, 723 insertions(+), 621 deletions(-) delete mode 100644 src/zeroconf/_handlers.py create mode 100644 src/zeroconf/_handlers/__init__.py create mode 100644 src/zeroconf/_handlers/answers.py create mode 100644 src/zeroconf/_handlers/multicast_outgoing_queue.py create mode 100644 src/zeroconf/_handlers/query_handler.py create mode 100644 src/zeroconf/_handlers/record_manager.py diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 67212286..4960f1e0 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -31,13 +31,13 @@ from ._dns import DNSQuestion, DNSQuestionType from ._engine import AsyncEngine from ._exceptions import NonUniqueNameException, NotRunningException -from ._handlers import ( - MulticastOutgoingQueue, - QueryHandler, - RecordManager, +from ._handlers.answers import ( construct_outgoing_multicast_answers, construct_outgoing_unicast_answers, ) +from ._handlers.multicast_outgoing_queue import MulticastOutgoingQueue +from ._handlers.query_handler import QueryHandler +from ._handlers.record_manager import RecordManager from ._history import QuestionHistory from ._logger import QuietLogger, log from ._protocol.incoming import DNSIncoming diff --git a/src/zeroconf/_handlers.py b/src/zeroconf/_handlers.py deleted file mode 100644 index 02f0d141..00000000 --- a/src/zeroconf/_handlers.py +++ /dev/null @@ -1,605 +0,0 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA -""" - -import itertools -import random -from collections import deque -from operator import attrgetter -from typing import ( - TYPE_CHECKING, - Dict, - List, - NamedTuple, - Optional, - Set, - Tuple, - Union, - cast, -) - -from ._cache import DNSCache, _UniqueRecordsType -from ._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet -from ._history import QuestionHistory -from ._logger import log -from ._protocol.incoming import DNSIncoming -from ._protocol.outgoing import DNSOutgoing -from ._services.registry import ServiceRegistry -from ._updates import RecordUpdate, RecordUpdateListener -from ._utils.time import current_time_millis, millis_to_seconds -from .const import ( - _ADDRESS_RECORD_TYPES, - _CLASS_IN, - _DNS_OTHER_TTL, - _DNS_PTR_MIN_TTL, - _FLAGS_AA, - _FLAGS_QR_RESPONSE, - _ONE_SECOND, - _SERVICE_TYPE_ENUMERATION_NAME, - _TYPE_A, - _TYPE_AAAA, - _TYPE_ANY, - _TYPE_NSEC, - _TYPE_PTR, - _TYPE_SRV, - _TYPE_TXT, -) - -if TYPE_CHECKING: - from ._core import Zeroconf - - -_AnswerWithAdditionalsType = Dict[DNSRecord, Set[DNSRecord]] - -_MULTICAST_DELAY_RANDOM_INTERVAL = (20, 120) -_RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} - -NAME_GETTER = attrgetter('name') - - -class QuestionAnswers(NamedTuple): - ucast: _AnswerWithAdditionalsType - mcast_now: _AnswerWithAdditionalsType - mcast_aggregate: _AnswerWithAdditionalsType - mcast_aggregate_last_second: _AnswerWithAdditionalsType - - -class AnswerGroup(NamedTuple): - """A group of answers scheduled to be sent at the same time.""" - - send_after: float # Must be sent after this time - send_before: float # Must be sent before this time - answers: _AnswerWithAdditionalsType - - -def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing: - """Add answers and additionals to a DNSOutgoing.""" - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=True) - _add_answers_additionals(out, answers) - return out - - -def construct_outgoing_unicast_answers( - answers: _AnswerWithAdditionalsType, ucast_source: bool, questions: List[DNSQuestion], id_: int -) -> DNSOutgoing: - """Add answers and additionals to a DNSOutgoing.""" - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=False, id_=id_) - # Adding the questions back when the source is legacy unicast behavior - if ucast_source: - for question in questions: - out.add_question(question) - _add_answers_additionals(out, answers) - return out - - -def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: - # Find additionals and suppress any additionals that are already in answers - sending: Set[DNSRecord] = set(answers) - # Answers are sorted to group names together to increase the chance - # that similar names will end up in the same packet and can reduce the - # overall size of the outgoing response via name compression - for answer in sorted(answers, key=NAME_GETTER): - out.add_answer_at_time(answer, 0) - for additional in answers[answer]: - if additional not in sending: - out.add_additional_answer(additional) - sending.add(additional) - - -class _QueryResponse: - """A pair for unicast and multicast DNSOutgoing responses.""" - - __slots__ = ( - "_is_probe", - "_msg", - "_now", - "_cache", - "_additionals", - "_ucast", - "_mcast_now", - "_mcast_aggregate", - "_mcast_aggregate_last_second", - ) - - def __init__(self, cache: DNSCache, msgs: List[DNSIncoming]) -> None: - """Build a query response.""" - self._is_probe = False - for msg in msgs: - if msg.is_probe: - self._is_probe = True - break - self._msg = msgs[0] - self._now = self._msg.now - self._cache = cache - self._additionals: _AnswerWithAdditionalsType = {} - self._ucast: Set[DNSRecord] = set() - self._mcast_now: Set[DNSRecord] = set() - self._mcast_aggregate: Set[DNSRecord] = set() - self._mcast_aggregate_last_second: Set[DNSRecord] = set() - - def add_qu_question_response(self, answers: _AnswerWithAdditionalsType) -> None: - """Generate a response to a multicast QU query.""" - for record, additionals in answers.items(): - self._additionals[record] = additionals - if self._is_probe: - self._ucast.add(record) - if not self._has_mcast_within_one_quarter_ttl(record): - self._mcast_now.add(record) - elif not self._is_probe: - self._ucast.add(record) - - def add_ucast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: - """Generate a response to a unicast query.""" - self._additionals.update(answers) - self._ucast.update(answers) - - def add_mcast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: - """Generate a response to a multicast query.""" - self._additionals.update(answers) - for answer in answers: - if self._is_probe: - self._mcast_now.add(answer) - continue - - if self._has_mcast_record_in_last_second(answer): - self._mcast_aggregate_last_second.add(answer) - elif len(self._msg.questions) == 1 and self._msg.questions[0].type in _RESPOND_IMMEDIATE_TYPES: - self._mcast_now.add(answer) - else: - self._mcast_aggregate.add(answer) - - def _generate_answers_with_additionals(self, rrset: Set[DNSRecord]) -> _AnswerWithAdditionalsType: - """Create answers with additionals from an rrset.""" - return {record: self._additionals[record] for record in rrset} - - def answers( - self, - ) -> QuestionAnswers: - """Return answer sets that will be queued.""" - return QuestionAnswers( - self._generate_answers_with_additionals(self._ucast), - self._generate_answers_with_additionals(self._mcast_now), - self._generate_answers_with_additionals(self._mcast_aggregate), - self._generate_answers_with_additionals(self._mcast_aggregate_last_second), - ) - - def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: - """Check to see if a record has been mcasted recently. - - https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 - When receiving a question with the unicast-response bit set, a - responder SHOULD usually respond with a unicast packet directed back - to the querier. However, if the responder has not multicast that - record recently (within one quarter of its TTL), then the responder - SHOULD instead multicast the response so as to keep all the peer - caches up to date - """ - if TYPE_CHECKING: - record = cast(_UniqueRecordsType, record) - maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry and maybe_entry.is_recent(self._now)) - - def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: - """Check if an answer was seen in the last second. - Protect the network against excessive packet flooding - https://datatracker.ietf.org/doc/html/rfc6762#section-14 - """ - if TYPE_CHECKING: - record = cast(_UniqueRecordsType, record) - maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry and self._now - maybe_entry.created < _ONE_SECOND) - - -class QueryHandler: - """Query the ServiceRegistry.""" - - __slots__ = ("registry", "cache", "question_history") - - def __init__(self, registry: ServiceRegistry, cache: DNSCache, question_history: QuestionHistory) -> None: - """Init the query handler.""" - self.registry = registry - self.cache = cache - self.question_history = question_history - - def _add_service_type_enumeration_query_answers( - self, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float - ) -> None: - """Provide an answer to a service type enumeration query. - - https://datatracker.ietf.org/doc/html/rfc6763#section-9 - """ - for stype in self.registry.async_get_types(): - dns_pointer = DNSPointer( - _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, now - ) - if not known_answers.suppresses(dns_pointer): - answer_set[dns_pointer] = set() - - def _add_pointer_answers( - self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float - ) -> None: - """Answer PTR/ANY question.""" - for service in self.registry.async_get_infos_type(lower_name): - # Add recommended additional answers according to - # https://tools.ietf.org/html/rfc6763#section-12.1. - dns_pointer = service.dns_pointer(created=now) - if known_answers.suppresses(dns_pointer): - continue - answer_set[dns_pointer] = { - service.dns_service(created=now), - service.dns_text(created=now), - } | service.get_address_and_nsec_records(created=now) - - def _add_address_answers( - self, - lower_name: str, - answer_set: _AnswerWithAdditionalsType, - known_answers: DNSRRSet, - now: float, - type_: int, - ) -> None: - """Answer A/AAAA/ANY question.""" - for service in self.registry.async_get_infos_server(lower_name): - answers: List[DNSAddress] = [] - additionals: Set[DNSRecord] = set() - seen_types: Set[int] = set() - for dns_address in service.dns_addresses(created=now): - seen_types.add(dns_address.type) - if dns_address.type != type_: - additionals.add(dns_address) - elif not known_answers.suppresses(dns_address): - answers.append(dns_address) - missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types - if answers: - if missing_types: - assert service.server is not None, "Service server must be set for NSEC record." - additionals.add(service.dns_nsec(list(missing_types), created=now)) - for answer in answers: - answer_set[answer] = additionals - elif type_ in missing_types: - assert service.server is not None, "Service server must be set for NSEC record." - answer_set[service.dns_nsec(list(missing_types), created=now)] = set() - - def _answer_question( - self, - question: DNSQuestion, - known_answers: DNSRRSet, - now: float, - ) -> _AnswerWithAdditionalsType: - answer_set: _AnswerWithAdditionalsType = {} - question_lower_name = question.name.lower() - - if question.type == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: - self._add_service_type_enumeration_query_answers(answer_set, known_answers, now) - return answer_set - - type_ = question.type - - if type_ in (_TYPE_PTR, _TYPE_ANY): - self._add_pointer_answers(question_lower_name, answer_set, known_answers, now) - - if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): - self._add_address_answers(question_lower_name, answer_set, known_answers, now, type_) - - if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): - service = self.registry.async_get_info_name(question_lower_name) - if service is not None: - if type_ in (_TYPE_SRV, _TYPE_ANY): - # Add recommended additional answers according to - # https://tools.ietf.org/html/rfc6763#section-12.2. - dns_service = service.dns_service(created=now) - if not known_answers.suppresses(dns_service): - answer_set[dns_service] = service.get_address_and_nsec_records(created=now) - if type_ in (_TYPE_TXT, _TYPE_ANY): - dns_text = service.dns_text(created=now) - if not known_answers.suppresses(dns_text): - answer_set[dns_text] = set() - - return answer_set - - def async_response( # pylint: disable=unused-argument - self, msgs: List[DNSIncoming], ucast_source: bool - ) -> QuestionAnswers: - """Deal with incoming query packets. Provides a response if possible. - - This function must be run in the event loop as it is not - threadsafe. - """ - known_answers = DNSRRSet([msg.answers for msg in msgs if not msg.is_probe]) - query_res = _QueryResponse(self.cache, msgs) - - for msg in msgs: - for question in msg.questions: - if not question.unicast: - self.question_history.add_question_at_time(question, msg.now, set(known_answers.lookup)) - answer_set = self._answer_question(question, known_answers, msg.now) - if not ucast_source and question.unicast: - query_res.add_qu_question_response(answer_set) - continue - if ucast_source: - query_res.add_ucast_question_response(answer_set) - # We always multicast as well even if its a unicast - # source as long as we haven't done it recently (75% of ttl) - query_res.add_mcast_question_response(answer_set) - - return query_res.answers() - - -class RecordManager: - """Process records into the cache and notify listeners.""" - - __slots__ = ("zc", "cache", "listeners") - - def __init__(self, zeroconf: 'Zeroconf') -> None: - """Init the record manager.""" - self.zc = zeroconf - self.cache = zeroconf.cache - self.listeners: List[RecordUpdateListener] = [] - - def async_updates(self, now: float, records: List[RecordUpdate]) -> None: - """Used to notify listeners of new information that has updated - a record. - - This method must be called before the cache is updated. - - This method will be run in the event loop. - """ - for listener in self.listeners: - listener.async_update_records(self.zc, now, records) - - def async_updates_complete(self, notify: bool) -> None: - """Used to notify listeners of new information that has updated - a record. - - This method must be called after the cache is updated. - - This method will be run in the event loop. - """ - for listener in self.listeners: - listener.async_update_records_complete() - if notify: - self.zc.async_notify_all() - - def async_updates_from_response(self, msg: DNSIncoming) -> None: - """Deal with incoming response packets. All answers - are held in the cache, and listeners are notified. - - This function must be run in the event loop as it is not - threadsafe. - """ - updates: List[RecordUpdate] = [] - address_adds: List[DNSRecord] = [] - other_adds: List[DNSRecord] = [] - removes: Set[DNSRecord] = set() - now = msg.now - unique_types: Set[Tuple[str, int, int]] = set() - cache = self.cache - - for record in msg.answers: - # Protect zeroconf from records that can cause denial of service. - # - # We enforce a minimum TTL for PTR records to avoid - # ServiceBrowsers generating excessive queries refresh queries. - # Apple uses a 15s minimum TTL, however we do not have the same - # level of rate limit and safe guards so we use 1/4 of the recommended value. - record_type = record.type - record_ttl = record.ttl - if record_ttl and record_type == _TYPE_PTR and record_ttl < _DNS_PTR_MIN_TTL: - log.debug( - "Increasing effective ttl of %s to minimum of %s to protect against excessive refreshes.", - record, - _DNS_PTR_MIN_TTL, - ) - record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) - - if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 - unique_types.add((record.name, record_type, record.class_)) - - if TYPE_CHECKING: - record = cast(_UniqueRecordsType, record) - - maybe_entry = cache.async_get_unique(record) - if not record.is_expired(now): - if maybe_entry is not None: - maybe_entry.reset_ttl(record) - else: - if record.type in _ADDRESS_RECORD_TYPES: - address_adds.append(record) - else: - other_adds.append(record) - updates.append(RecordUpdate(record, maybe_entry)) - # This is likely a goodbye since the record is - # expired and exists in the cache - elif maybe_entry is not None: - updates.append(RecordUpdate(record, maybe_entry)) - removes.add(record) - - if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, msg.answers, now) - - if updates: - self.async_updates(now, updates) - # The cache adds must be processed AFTER we trigger - # the updates since we compare existing data - # with the new data and updating the cache - # ahead of update_record will cause listeners - # to miss changes - # - # We must process address adds before non-addresses - # otherwise a fetch of ServiceInfo may miss an address - # because it thinks the cache is complete - # - # The cache is processed under the context manager to ensure - # that any ServiceBrowser that is going to call - # zc.get_service_info will see the cached value - # but ONLY after all the record updates have been - # processsed. - new = False - if other_adds or address_adds: - new = cache.async_add_records(itertools.chain(address_adds, other_adds)) - # Removes are processed last since - # ServiceInfo could generate an un-needed query - # because the data was not yet populated. - if removes: - cache.async_remove_records(removes) - if updates: - self.async_updates_complete(new) - - def async_add_listener( - self, listener: RecordUpdateListener, question: Optional[Union[DNSQuestion, List[DNSQuestion]]] - ) -> None: - """Adds a listener for a given question. The listener will have - its update_record method called when information is available to - answer the question(s). - - This function is not thread-safe and must be called in the eventloop. - """ - if not isinstance(listener, RecordUpdateListener): - log.error( # type: ignore[unreachable] - "listeners passed to async_add_listener must inherit from RecordUpdateListener;" - " In the future this will fail" - ) - - self.listeners.append(listener) - - if question is None: - return - - questions = [question] if isinstance(question, DNSQuestion) else question - assert self.zc.loop is not None - self._async_update_matching_records(listener, questions) - - def _async_update_matching_records( - self, listener: RecordUpdateListener, questions: List[DNSQuestion] - ) -> None: - """Calls back any existing entries in the cache that answer the question. - - This function must be run from the event loop. - """ - now = current_time_millis() - records: List[RecordUpdate] = [ - RecordUpdate(record, None) - for question in questions - for record in self.cache.async_entries_with_name(question.name) - if not record.is_expired(now) and question.answered_by(record) - ] - if not records: - return - listener.async_update_records(self.zc, now, records) - listener.async_update_records_complete() - self.zc.async_notify_all() - - def async_remove_listener(self, listener: RecordUpdateListener) -> None: - """Removes a listener. - - This function is not threadsafe and must be called in the eventloop. - """ - try: - self.listeners.remove(listener) - self.zc.async_notify_all() - except ValueError as e: - log.exception('Failed to remove listener: %r', e) - - -class MulticastOutgoingQueue: - """An outgoing queue used to aggregate multicast responses.""" - - __slots__ = ("zc", "queue", "additional_delay", "aggregation_delay") - - def __init__(self, zeroconf: 'Zeroconf', additional_delay: int, max_aggregation_delay: int) -> None: - self.zc = zeroconf - self.queue: deque[AnswerGroup] = deque() - # Additional delay is used to implement - # Protect the network against excessive packet flooding - # https://datatracker.ietf.org/doc/html/rfc6762#section-14 - self.additional_delay = additional_delay - self.aggregation_delay = max_aggregation_delay - - def async_add(self, now: float, answers: _AnswerWithAdditionalsType) -> None: - """Add a group of answers with additionals to the outgoing queue.""" - assert self.zc.loop is not None - random_delay = random.randint(*_MULTICAST_DELAY_RANDOM_INTERVAL) + self.additional_delay - send_after = now + random_delay - send_before = now + self.aggregation_delay + self.additional_delay - if len(self.queue): - # If we calculate a random delay for the send after time - # that is less than the last group scheduled to go out, - # we instead add the answers to the last group as this - # allows aggregating additonal responses - last_group = self.queue[-1] - if send_after <= last_group.send_after: - last_group.answers.update(answers) - return - else: - self.zc.loop.call_later(millis_to_seconds(random_delay), self.async_ready) - self.queue.append(AnswerGroup(send_after, send_before, answers)) - - def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> None: - """Remove a set of answers from the outgoing queue.""" - for pending in self.queue: - for record in answers: - pending.answers.pop(record, None) - - def async_ready(self) -> None: - """Process anything in the queue that is ready.""" - assert self.zc.loop is not None - now = current_time_millis() - - if len(self.queue) > 1 and self.queue[0].send_before > now: - # There is more than one answer in the queue, - # delay until we have to send it (first answer group reaches send_before) - self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_before - now), self.async_ready) - return - - answers: _AnswerWithAdditionalsType = {} - # Add all groups that can be sent now - while len(self.queue) and self.queue[0].send_after <= now: - answers.update(self.queue.popleft().answers) - - if len(self.queue): - # If there are still groups in the queue that are not ready to send - # be sure we schedule them to go out later - self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_after - now), self.async_ready) - - if answers: - # If we have the same answer scheduled to go out, remove them - self._remove_answers_from_queue(answers) - self.zc.async_send(construct_outgoing_multicast_answers(answers)) diff --git a/src/zeroconf/_handlers/__init__.py b/src/zeroconf/_handlers/__init__.py new file mode 100644 index 00000000..2ef4b15b --- /dev/null +++ b/src/zeroconf/_handlers/__init__.py @@ -0,0 +1,21 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py new file mode 100644 index 00000000..a80d2367 --- /dev/null +++ b/src/zeroconf/_handlers/answers.py @@ -0,0 +1,84 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +from operator import attrgetter +from typing import Dict, List, NamedTuple, Set + +from .._dns import DNSQuestion, DNSRecord +from .._protocol.outgoing import DNSOutgoing +from ..const import _FLAGS_AA, _FLAGS_QR_RESPONSE + +_AnswerWithAdditionalsType = Dict[DNSRecord, Set[DNSRecord]] + + +MULTICAST_DELAY_RANDOM_INTERVAL = (20, 120) + +NAME_GETTER = attrgetter('name') + + +class QuestionAnswers(NamedTuple): + ucast: _AnswerWithAdditionalsType + mcast_now: _AnswerWithAdditionalsType + mcast_aggregate: _AnswerWithAdditionalsType + mcast_aggregate_last_second: _AnswerWithAdditionalsType + + +class AnswerGroup(NamedTuple): + """A group of answers scheduled to be sent at the same time.""" + + send_after: float # Must be sent after this time + send_before: float # Must be sent before this time + answers: _AnswerWithAdditionalsType + + +def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing: + """Add answers and additionals to a DNSOutgoing.""" + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=True) + _add_answers_additionals(out, answers) + return out + + +def construct_outgoing_unicast_answers( + answers: _AnswerWithAdditionalsType, ucast_source: bool, questions: List[DNSQuestion], id_: int +) -> DNSOutgoing: + """Add answers and additionals to a DNSOutgoing.""" + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=False, id_=id_) + # Adding the questions back when the source is legacy unicast behavior + if ucast_source: + for question in questions: + out.add_question(question) + _add_answers_additionals(out, answers) + return out + + +def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: + # Find additionals and suppress any additionals that are already in answers + sending: Set[DNSRecord] = set(answers) + # Answers are sorted to group names together to increase the chance + # that similar names will end up in the same packet and can reduce the + # overall size of the outgoing response via name compression + for answer in sorted(answers, key=NAME_GETTER): + out.add_answer_at_time(answer, 0) + for additional in answers[answer]: + if additional not in sending: + out.add_additional_answer(additional) + sending.add(additional) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py new file mode 100644 index 00000000..0e469d28 --- /dev/null +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -0,0 +1,102 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import random +from collections import deque +from typing import TYPE_CHECKING + +from .._utils.time import current_time_millis, millis_to_seconds +from .answers import ( + MULTICAST_DELAY_RANDOM_INTERVAL, + AnswerGroup, + _AnswerWithAdditionalsType, + construct_outgoing_multicast_answers, +) + +if TYPE_CHECKING: + from .._core import Zeroconf + + +class MulticastOutgoingQueue: + """An outgoing queue used to aggregate multicast responses.""" + + __slots__ = ("zc", "queue", "additional_delay", "aggregation_delay") + + def __init__(self, zeroconf: 'Zeroconf', additional_delay: int, max_aggregation_delay: int) -> None: + self.zc = zeroconf + self.queue: deque[AnswerGroup] = deque() + # Additional delay is used to implement + # Protect the network against excessive packet flooding + # https://datatracker.ietf.org/doc/html/rfc6762#section-14 + self.additional_delay = additional_delay + self.aggregation_delay = max_aggregation_delay + + def async_add(self, now: float, answers: _AnswerWithAdditionalsType) -> None: + """Add a group of answers with additionals to the outgoing queue.""" + assert self.zc.loop is not None + random_delay = random.randint(*MULTICAST_DELAY_RANDOM_INTERVAL) + self.additional_delay + send_after = now + random_delay + send_before = now + self.aggregation_delay + self.additional_delay + if len(self.queue): + # If we calculate a random delay for the send after time + # that is less than the last group scheduled to go out, + # we instead add the answers to the last group as this + # allows aggregating additonal responses + last_group = self.queue[-1] + if send_after <= last_group.send_after: + last_group.answers.update(answers) + return + else: + self.zc.loop.call_later(millis_to_seconds(random_delay), self.async_ready) + self.queue.append(AnswerGroup(send_after, send_before, answers)) + + def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> None: + """Remove a set of answers from the outgoing queue.""" + for pending in self.queue: + for record in answers: + pending.answers.pop(record, None) + + def async_ready(self) -> None: + """Process anything in the queue that is ready.""" + assert self.zc.loop is not None + now = current_time_millis() + + if len(self.queue) > 1 and self.queue[0].send_before > now: + # There is more than one answer in the queue, + # delay until we have to send it (first answer group reaches send_before) + self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_before - now), self.async_ready) + return + + answers: _AnswerWithAdditionalsType = {} + # Add all groups that can be sent now + while len(self.queue) and self.queue[0].send_after <= now: + answers.update(self.queue.popleft().answers) + + if len(self.queue): + # If there are still groups in the queue that are not ready to send + # be sure we schedule them to go out later + self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_after - now), self.async_ready) + + if answers: + # If we have the same answer scheduled to go out, remove them + self._remove_answers_from_queue(answers) + self.zc.async_send(construct_outgoing_multicast_answers(answers)) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py new file mode 100644 index 00000000..e4b63508 --- /dev/null +++ b/src/zeroconf/_handlers/query_handler.py @@ -0,0 +1,286 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + + +from typing import TYPE_CHECKING, List, Set, cast + +from .._cache import DNSCache, _UniqueRecordsType +from .._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet +from .._history import QuestionHistory +from .._protocol.incoming import DNSIncoming +from .._services.registry import ServiceRegistry +from ..const import ( + _ADDRESS_RECORD_TYPES, + _CLASS_IN, + _DNS_OTHER_TTL, + _ONE_SECOND, + _SERVICE_TYPE_ENUMERATION_NAME, + _TYPE_A, + _TYPE_AAAA, + _TYPE_ANY, + _TYPE_NSEC, + _TYPE_PTR, + _TYPE_SRV, + _TYPE_TXT, +) +from .answers import QuestionAnswers, _AnswerWithAdditionalsType + +_RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} + + +class _QueryResponse: + """A pair for unicast and multicast DNSOutgoing responses.""" + + __slots__ = ( + "_is_probe", + "_msg", + "_now", + "_cache", + "_additionals", + "_ucast", + "_mcast_now", + "_mcast_aggregate", + "_mcast_aggregate_last_second", + ) + + def __init__(self, cache: DNSCache, msgs: List[DNSIncoming]) -> None: + """Build a query response.""" + self._is_probe = False + for msg in msgs: + if msg.is_probe: + self._is_probe = True + break + self._msg = msgs[0] + self._now = self._msg.now + self._cache = cache + self._additionals: _AnswerWithAdditionalsType = {} + self._ucast: Set[DNSRecord] = set() + self._mcast_now: Set[DNSRecord] = set() + self._mcast_aggregate: Set[DNSRecord] = set() + self._mcast_aggregate_last_second: Set[DNSRecord] = set() + + def add_qu_question_response(self, answers: _AnswerWithAdditionalsType) -> None: + """Generate a response to a multicast QU query.""" + for record, additionals in answers.items(): + self._additionals[record] = additionals + if self._is_probe: + self._ucast.add(record) + if not self._has_mcast_within_one_quarter_ttl(record): + self._mcast_now.add(record) + elif not self._is_probe: + self._ucast.add(record) + + def add_ucast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: + """Generate a response to a unicast query.""" + self._additionals.update(answers) + self._ucast.update(answers) + + def add_mcast_question_response(self, answers: _AnswerWithAdditionalsType) -> None: + """Generate a response to a multicast query.""" + self._additionals.update(answers) + for answer in answers: + if self._is_probe: + self._mcast_now.add(answer) + continue + + if self._has_mcast_record_in_last_second(answer): + self._mcast_aggregate_last_second.add(answer) + elif len(self._msg.questions) == 1 and self._msg.questions[0].type in _RESPOND_IMMEDIATE_TYPES: + self._mcast_now.add(answer) + else: + self._mcast_aggregate.add(answer) + + def _generate_answers_with_additionals(self, rrset: Set[DNSRecord]) -> _AnswerWithAdditionalsType: + """Create answers with additionals from an rrset.""" + return {record: self._additionals[record] for record in rrset} + + def answers( + self, + ) -> QuestionAnswers: + """Return answer sets that will be queued.""" + return QuestionAnswers( + self._generate_answers_with_additionals(self._ucast), + self._generate_answers_with_additionals(self._mcast_now), + self._generate_answers_with_additionals(self._mcast_aggregate), + self._generate_answers_with_additionals(self._mcast_aggregate_last_second), + ) + + def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: + """Check to see if a record has been mcasted recently. + + https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 + When receiving a question with the unicast-response bit set, a + responder SHOULD usually respond with a unicast packet directed back + to the querier. However, if the responder has not multicast that + record recently (within one quarter of its TTL), then the responder + SHOULD instead multicast the response so as to keep all the peer + caches up to date + """ + if TYPE_CHECKING: + record = cast(_UniqueRecordsType, record) + maybe_entry = self._cache.async_get_unique(record) + return bool(maybe_entry and maybe_entry.is_recent(self._now)) + + def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: + """Check if an answer was seen in the last second. + Protect the network against excessive packet flooding + https://datatracker.ietf.org/doc/html/rfc6762#section-14 + """ + if TYPE_CHECKING: + record = cast(_UniqueRecordsType, record) + maybe_entry = self._cache.async_get_unique(record) + return bool(maybe_entry and self._now - maybe_entry.created < _ONE_SECOND) + + +class QueryHandler: + """Query the ServiceRegistry.""" + + __slots__ = ("registry", "cache", "question_history") + + def __init__(self, registry: ServiceRegistry, cache: DNSCache, question_history: QuestionHistory) -> None: + """Init the query handler.""" + self.registry = registry + self.cache = cache + self.question_history = question_history + + def _add_service_type_enumeration_query_answers( + self, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float + ) -> None: + """Provide an answer to a service type enumeration query. + + https://datatracker.ietf.org/doc/html/rfc6763#section-9 + """ + for stype in self.registry.async_get_types(): + dns_pointer = DNSPointer( + _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, now + ) + if not known_answers.suppresses(dns_pointer): + answer_set[dns_pointer] = set() + + def _add_pointer_answers( + self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float + ) -> None: + """Answer PTR/ANY question.""" + for service in self.registry.async_get_infos_type(lower_name): + # Add recommended additional answers according to + # https://tools.ietf.org/html/rfc6763#section-12.1. + dns_pointer = service.dns_pointer(created=now) + if known_answers.suppresses(dns_pointer): + continue + answer_set[dns_pointer] = { + service.dns_service(created=now), + service.dns_text(created=now), + } | service.get_address_and_nsec_records(created=now) + + def _add_address_answers( + self, + lower_name: str, + answer_set: _AnswerWithAdditionalsType, + known_answers: DNSRRSet, + now: float, + type_: int, + ) -> None: + """Answer A/AAAA/ANY question.""" + for service in self.registry.async_get_infos_server(lower_name): + answers: List[DNSAddress] = [] + additionals: Set[DNSRecord] = set() + seen_types: Set[int] = set() + for dns_address in service.dns_addresses(created=now): + seen_types.add(dns_address.type) + if dns_address.type != type_: + additionals.add(dns_address) + elif not known_answers.suppresses(dns_address): + answers.append(dns_address) + missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types + if answers: + if missing_types: + assert service.server is not None, "Service server must be set for NSEC record." + additionals.add(service.dns_nsec(list(missing_types), created=now)) + for answer in answers: + answer_set[answer] = additionals + elif type_ in missing_types: + assert service.server is not None, "Service server must be set for NSEC record." + answer_set[service.dns_nsec(list(missing_types), created=now)] = set() + + def _answer_question( + self, + question: DNSQuestion, + known_answers: DNSRRSet, + now: float, + ) -> _AnswerWithAdditionalsType: + answer_set: _AnswerWithAdditionalsType = {} + question_lower_name = question.name.lower() + + if question.type == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: + self._add_service_type_enumeration_query_answers(answer_set, known_answers, now) + return answer_set + + type_ = question.type + + if type_ in (_TYPE_PTR, _TYPE_ANY): + self._add_pointer_answers(question_lower_name, answer_set, known_answers, now) + + if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): + self._add_address_answers(question_lower_name, answer_set, known_answers, now, type_) + + if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): + service = self.registry.async_get_info_name(question_lower_name) + if service is not None: + if type_ in (_TYPE_SRV, _TYPE_ANY): + # Add recommended additional answers according to + # https://tools.ietf.org/html/rfc6763#section-12.2. + dns_service = service.dns_service(created=now) + if not known_answers.suppresses(dns_service): + answer_set[dns_service] = service.get_address_and_nsec_records(created=now) + if type_ in (_TYPE_TXT, _TYPE_ANY): + dns_text = service.dns_text(created=now) + if not known_answers.suppresses(dns_text): + answer_set[dns_text] = set() + + return answer_set + + def async_response( # pylint: disable=unused-argument + self, msgs: List[DNSIncoming], ucast_source: bool + ) -> QuestionAnswers: + """Deal with incoming query packets. Provides a response if possible. + + This function must be run in the event loop as it is not + threadsafe. + """ + known_answers = DNSRRSet([msg.answers for msg in msgs if not msg.is_probe]) + query_res = _QueryResponse(self.cache, msgs) + + for msg in msgs: + for question in msg.questions: + if not question.unicast: + self.question_history.add_question_at_time(question, msg.now, set(known_answers.lookup)) + answer_set = self._answer_question(question, known_answers, msg.now) + if not ucast_source and question.unicast: + query_res.add_qu_question_response(answer_set) + continue + if ucast_source: + query_res.add_ucast_question_response(answer_set) + # We always multicast as well even if its a unicast + # source as long as we haven't done it recently (75% of ttl) + query_res.add_mcast_question_response(answer_set) + + return query_res.answers() diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py new file mode 100644 index 00000000..9f0f4787 --- /dev/null +++ b/src/zeroconf/_handlers/record_manager.py @@ -0,0 +1,211 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +import itertools +from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast + +from .._cache import _UniqueRecordsType +from .._dns import DNSQuestion, DNSRecord +from .._logger import log +from .._protocol.incoming import DNSIncoming +from .._updates import RecordUpdate, RecordUpdateListener +from .._utils.time import current_time_millis +from ..const import _ADDRESS_RECORD_TYPES, _DNS_PTR_MIN_TTL, _TYPE_PTR + +if TYPE_CHECKING: + from .._core import Zeroconf + + +class RecordManager: + """Process records into the cache and notify listeners.""" + + __slots__ = ("zc", "cache", "listeners") + + def __init__(self, zeroconf: 'Zeroconf') -> None: + """Init the record manager.""" + self.zc = zeroconf + self.cache = zeroconf.cache + self.listeners: List[RecordUpdateListener] = [] + + def async_updates(self, now: float, records: List[RecordUpdate]) -> None: + """Used to notify listeners of new information that has updated + a record. + + This method must be called before the cache is updated. + + This method will be run in the event loop. + """ + for listener in self.listeners: + listener.async_update_records(self.zc, now, records) + + def async_updates_complete(self, notify: bool) -> None: + """Used to notify listeners of new information that has updated + a record. + + This method must be called after the cache is updated. + + This method will be run in the event loop. + """ + for listener in self.listeners: + listener.async_update_records_complete() + if notify: + self.zc.async_notify_all() + + def async_updates_from_response(self, msg: DNSIncoming) -> None: + """Deal with incoming response packets. All answers + are held in the cache, and listeners are notified. + + This function must be run in the event loop as it is not + threadsafe. + """ + updates: List[RecordUpdate] = [] + address_adds: List[DNSRecord] = [] + other_adds: List[DNSRecord] = [] + removes: Set[DNSRecord] = set() + now = msg.now + unique_types: Set[Tuple[str, int, int]] = set() + cache = self.cache + + for record in msg.answers: + # Protect zeroconf from records that can cause denial of service. + # + # We enforce a minimum TTL for PTR records to avoid + # ServiceBrowsers generating excessive queries refresh queries. + # Apple uses a 15s minimum TTL, however we do not have the same + # level of rate limit and safe guards so we use 1/4 of the recommended value. + record_type = record.type + record_ttl = record.ttl + if record_ttl and record_type == _TYPE_PTR and record_ttl < _DNS_PTR_MIN_TTL: + log.debug( + "Increasing effective ttl of %s to minimum of %s to protect against excessive refreshes.", + record, + _DNS_PTR_MIN_TTL, + ) + record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) + + if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 + unique_types.add((record.name, record_type, record.class_)) + + if TYPE_CHECKING: + record = cast(_UniqueRecordsType, record) + + maybe_entry = cache.async_get_unique(record) + if not record.is_expired(now): + if maybe_entry is not None: + maybe_entry.reset_ttl(record) + else: + if record.type in _ADDRESS_RECORD_TYPES: + address_adds.append(record) + else: + other_adds.append(record) + updates.append(RecordUpdate(record, maybe_entry)) + # This is likely a goodbye since the record is + # expired and exists in the cache + elif maybe_entry is not None: + updates.append(RecordUpdate(record, maybe_entry)) + removes.add(record) + + if unique_types: + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, msg.answers, now) + + if updates: + self.async_updates(now, updates) + # The cache adds must be processed AFTER we trigger + # the updates since we compare existing data + # with the new data and updating the cache + # ahead of update_record will cause listeners + # to miss changes + # + # We must process address adds before non-addresses + # otherwise a fetch of ServiceInfo may miss an address + # because it thinks the cache is complete + # + # The cache is processed under the context manager to ensure + # that any ServiceBrowser that is going to call + # zc.get_service_info will see the cached value + # but ONLY after all the record updates have been + # processsed. + new = False + if other_adds or address_adds: + new = cache.async_add_records(itertools.chain(address_adds, other_adds)) + # Removes are processed last since + # ServiceInfo could generate an un-needed query + # because the data was not yet populated. + if removes: + cache.async_remove_records(removes) + if updates: + self.async_updates_complete(new) + + def async_add_listener( + self, listener: RecordUpdateListener, question: Optional[Union[DNSQuestion, List[DNSQuestion]]] + ) -> None: + """Adds a listener for a given question. The listener will have + its update_record method called when information is available to + answer the question(s). + + This function is not thread-safe and must be called in the eventloop. + """ + if not isinstance(listener, RecordUpdateListener): + log.error( # type: ignore[unreachable] + "listeners passed to async_add_listener must inherit from RecordUpdateListener;" + " In the future this will fail" + ) + + self.listeners.append(listener) + + if question is None: + return + + questions = [question] if isinstance(question, DNSQuestion) else question + assert self.zc.loop is not None + self._async_update_matching_records(listener, questions) + + def _async_update_matching_records( + self, listener: RecordUpdateListener, questions: List[DNSQuestion] + ) -> None: + """Calls back any existing entries in the cache that answer the question. + + This function must be run from the event loop. + """ + now = current_time_millis() + records: List[RecordUpdate] = [ + RecordUpdate(record, None) + for question in questions + for record in self.cache.async_entries_with_name(question.name) + if not record.is_expired(now) and question.answered_by(record) + ] + if not records: + return + listener.async_update_records(self.zc, now, records) + listener.async_update_records_complete() + self.zc.async_notify_all() + + def async_remove_listener(self, listener: RecordUpdateListener) -> None: + """Removes a listener. + + This function is not threadsafe and must be called in the eventloop. + """ + try: + self.listeners.remove(listener) + self.zc.async_notify_all() + except ValueError as e: + log.exception('Failed to remove listener: %r', e) diff --git a/tests/__init__.py b/tests/__init__.py index 959cc3f3..f203ff07 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -81,6 +81,6 @@ def has_working_ipv6(): return False -def _clear_cache(zc): +def _clear_cache(zc: Zeroconf) -> None: zc.cache.cache.clear() zc.question_history.clear() diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index d49295fa..d269f85c 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -22,11 +22,11 @@ DNSQuestion, Zeroconf, _engine, - _handlers, const, current_time_millis, millis_to_seconds, ) +from zeroconf._handlers import record_manager from zeroconf._services import ServiceStateChange from zeroconf._services.browser import ServiceBrowser from zeroconf._services.info import ServiceInfo @@ -1151,7 +1151,7 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: zc.close() -@patch.object(_handlers, '_DNS_PTR_MIN_TTL', 1) +@patch.object(record_manager, '_DNS_PTR_MIN_TTL', 1) @patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01) def test_service_browser_expire_callbacks(): """Test that the ServiceBrowser matching does not match partial names.""" diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 607f6819..4cfdd8e9 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -15,8 +15,9 @@ import pytest import zeroconf as r -from zeroconf import ServiceInfo, Zeroconf, _handlers, const, current_time_millis -from zeroconf._handlers import ( +from zeroconf import ServiceInfo, Zeroconf, const, current_time_millis +from zeroconf._handlers import multicast_outgoing_queue +from zeroconf._handlers.multicast_outgoing_queue import ( MulticastOutgoingQueue, construct_outgoing_multicast_answers, ) @@ -1575,15 +1576,15 @@ async def test_response_aggregation_random_delay(): outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 500) now = current_time_millis() - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (500, 600)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (500, 600)): outgoing_queue.async_add(now, {info.dns_pointer(): set()}) # The second group should always be coalesced into first group since it will always come before - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (300, 400)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (300, 400)): outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) # The third group should always be coalesced into first group since it will always come before - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (100, 200)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (100, 200)): outgoing_queue.async_add(now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 1 @@ -1593,7 +1594,7 @@ async def test_response_aggregation_random_delay(): assert info4.dns_pointer() in outgoing_queue.queue[0].answers # The forth group should not be coalesced because its scheduled after the last group in the queue - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (700, 800)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (700, 800)): outgoing_queue.async_add(now, {info5.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 2 @@ -1624,17 +1625,19 @@ async def test_future_answers_are_removed_on_send(): outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 0) now = current_time_millis() - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (1, 1)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (1, 1)): outgoing_queue.async_add(now, {info.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 1 - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (2, 2)): + with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (2, 2)): outgoing_queue.async_add(now, {info.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 2 - with unittest.mock.patch.object(_handlers, "_MULTICAST_DELAY_RANDOM_INTERVAL", (1000, 1000)): + with unittest.mock.patch.object( + multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (1000, 1000) + ): outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) outgoing_queue.async_add(now, {info.dns_pointer(): set()}) From a7dad3d9743586f352e21eea1e129c6875f9a713 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 28 Aug 2023 20:58:19 -0500 Subject: [PATCH 061/434] feat: improve performance by adding cython pxd for RecordManager (#1241) --- build_ext.py | 1 + src/zeroconf/_handlers/record_manager.pxd | 23 +++++++++++++++++++++++ src/zeroconf/_handlers/record_manager.py | 4 ++-- 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 src/zeroconf/_handlers/record_manager.pxd diff --git a/build_ext.py b/build_ext.py index f2c59288..1b27457d 100644 --- a/build_ext.py +++ b/build_ext.py @@ -29,6 +29,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", + "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_services/registry.py", ], compiler_directives={"language_level": "3"}, # Python 3 diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd new file mode 100644 index 00000000..7616bead --- /dev/null +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -0,0 +1,23 @@ + +import cython + +from .._cache cimport DNSCache +from .._dns cimport DNSRecord +from .._protocol.incoming cimport DNSIncoming + + +cdef cython.float _DNS_PTR_MIN_TTL +cdef object _ADDRESS_RECORD_TYPES +cdef object RecordUpdate + +cdef class RecordManager: + + cdef object zc + cdef DNSCache cache + cdef cython.list listeners + + @cython.locals( + cache=DNSCache, + record=DNSRecord + ) + cpdef async_updates_from_response(self, DNSIncoming msg) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 9f0f4787..94a37b78 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -20,7 +20,6 @@ USA """ -import itertools from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast from .._cache import _UniqueRecordsType @@ -146,7 +145,8 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # processsed. new = False if other_adds or address_adds: - new = cache.async_add_records(itertools.chain(address_adds, other_adds)) + new = cache.async_add_records(address_adds) + new |= cache.async_add_records(other_adds) # Removes are processed last since # ServiceInfo could generate an un-needed query # because the data was not yet populated. From f8ad5a2df2914ab310ac3fd34343e7e09e66ebf6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 29 Aug 2023 02:18:59 +0000 Subject: [PATCH 062/434] 0.87.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 87aebdc3..d8bbc85c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.87.0 (2023-08-29) + +### Feature + +* Improve performance by adding cython pxd for RecordManager ([#1241](https://github.com/python-zeroconf/python-zeroconf/issues/1241)) ([`a7dad3d`](https://github.com/python-zeroconf/python-zeroconf/commit/a7dad3d9743586f352e21eea1e129c6875f9a713)) + ## v0.86.0 (2023-08-28) ### Feature diff --git a/pyproject.toml b/pyproject.toml index f3c7e7b7..34ad7e69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.86.0" +version = "0.87.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index cbd3dce5..3d63213b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.86.0' +__version__ = '0.87.0' __license__ = 'LGPL' From 5a76fc5ff74f2941ffbf7570e45390f35e0b7e01 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 28 Aug 2023 21:47:13 -0500 Subject: [PATCH 063/434] feat: speed up RecordManager with additional cython defs (#1242) --- src/zeroconf/_cache.pxd | 8 ++++++++ src/zeroconf/_handlers/record_manager.pxd | 16 ++++++++++++---- src/zeroconf/_handlers/record_manager.py | 12 ++++++++---- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 07eeb807..6bc9ea5d 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -23,6 +23,12 @@ cdef class DNSCache: cdef public cython.dict cache cdef public cython.dict service_cache + cpdef async_add_records(self, object entries) + + cpdef async_remove_records(self, object entries) + + cpdef async_get_unique(self, DNSRecord entry) + @cython.locals( records=cython.dict, record=DNSRecord, @@ -33,6 +39,8 @@ cdef class DNSCache: cdef _async_remove(self, DNSRecord record) + cpdef async_mark_unique_records_older_than_1s_to_expire(self, object unique_types, object answers, object now) + @cython.locals( record=DNSRecord, ) diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 7616bead..7a55e64f 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -9,15 +9,23 @@ from .._protocol.incoming cimport DNSIncoming cdef cython.float _DNS_PTR_MIN_TTL cdef object _ADDRESS_RECORD_TYPES cdef object RecordUpdate +cdef object TYPE_CHECKING +cdef object _TYPE_PTR cdef class RecordManager: - cdef object zc - cdef DNSCache cache - cdef cython.list listeners + cdef public object zc + cdef public DNSCache cache + cdef public cython.list listeners + + cpdef async_updates(self, object now, object records) + + cpdef async_updates_complete(self, object notify) @cython.locals( cache=DNSCache, - record=DNSRecord + record=DNSRecord, + maybe_entry=DNSRecord, + now_float=cython.float ) cpdef async_updates_from_response(self, DNSIncoming msg) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 94a37b78..5e4f7c9b 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -33,6 +33,8 @@ if TYPE_CHECKING: from .._core import Zeroconf +_float = float + class RecordManager: """Process records into the cache and notify listeners.""" @@ -45,7 +47,7 @@ def __init__(self, zeroconf: 'Zeroconf') -> None: self.cache = zeroconf.cache self.listeners: List[RecordUpdateListener] = [] - def async_updates(self, now: float, records: List[RecordUpdate]) -> None: + def async_updates(self, now: _float, records: List[RecordUpdate]) -> None: """Used to notify listeners of new information that has updated a record. @@ -81,6 +83,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: other_adds: List[DNSRecord] = [] removes: Set[DNSRecord] = set() now = msg.now + now_float = now unique_types: Set[Tuple[str, int, int]] = set() cache = self.cache @@ -108,11 +111,11 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: record = cast(_UniqueRecordsType, record) maybe_entry = cache.async_get_unique(record) - if not record.is_expired(now): + if not record.is_expired(now_float): if maybe_entry is not None: maybe_entry.reset_ttl(record) else: - if record.type in _ADDRESS_RECORD_TYPES: + if record_type in _ADDRESS_RECORD_TYPES: address_adds.append(record) else: other_adds.append(record) @@ -146,7 +149,8 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: new = False if other_adds or address_adds: new = cache.async_add_records(address_adds) - new |= cache.async_add_records(other_adds) + if cache.async_add_records(other_adds): + new = True # Removes are processed last since # ServiceInfo could generate an un-needed query # because the data was not yet populated. From a3e98cb77baeddb5098e669b852a9ad951fd2506 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 29 Aug 2023 02:55:15 +0000 Subject: [PATCH 064/434] 0.88.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d8bbc85c..2ff20c2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.88.0 (2023-08-29) + +### Feature + +* Speed up RecordManager with additional cython defs ([#1242](https://github.com/python-zeroconf/python-zeroconf/issues/1242)) ([`5a76fc5`](https://github.com/python-zeroconf/python-zeroconf/commit/5a76fc5ff74f2941ffbf7570e45390f35e0b7e01)) + ## v0.87.0 (2023-08-29) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 34ad7e69..594ee6bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.87.0" +version = "0.88.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 3d63213b..de54784b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.87.0' +__version__ = '0.88.0' __license__ = 'LGPL' From 18b65d1c75622869b0c29258215d3db3ae520d6c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 1 Sep 2023 19:09:42 -0500 Subject: [PATCH 065/434] feat: reduce overhead to process incoming questions (#1244) --- build_ext.py | 1 + src/zeroconf/_listener.pxd | 6 +++--- src/zeroconf/_utils/time.pxd | 4 ++++ src/zeroconf/_utils/time.py | 4 +++- 4 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 src/zeroconf/_utils/time.pxd diff --git a/build_ext.py b/build_ext.py index 1b27457d..8e4e7b99 100644 --- a/build_ext.py +++ b/build_ext.py @@ -31,6 +31,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_protocol/outgoing.py", "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_services/registry.py", + "src/zeroconf/_utils/time.py", ], compiler_directives={"language_level": "3"}, # Python 3 ), diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 0f32a44a..87ed8b5f 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -1,13 +1,13 @@ import cython +from ._protocol.incoming cimport DNSIncoming +from ._utils.time cimport current_time_millis, millis_to_seconds + -cdef object millis_to_seconds cdef object log cdef object logging_DEBUG -from ._protocol.incoming cimport DNSIncoming - cdef class AsyncListener: diff --git a/src/zeroconf/_utils/time.pxd b/src/zeroconf/_utils/time.pxd new file mode 100644 index 00000000..367f39b6 --- /dev/null +++ b/src/zeroconf/_utils/time.pxd @@ -0,0 +1,4 @@ + +cpdef current_time_millis() + +cpdef millis_to_seconds(object millis) diff --git a/src/zeroconf/_utils/time.py b/src/zeroconf/_utils/time.py index 59362c55..c6811585 100644 --- a/src/zeroconf/_utils/time.py +++ b/src/zeroconf/_utils/time.py @@ -23,6 +23,8 @@ import time +_float = float + def current_time_millis() -> float: """Current time in milliseconds. @@ -33,6 +35,6 @@ def current_time_millis() -> float: return time.monotonic() * 1000 -def millis_to_seconds(millis: float) -> float: +def millis_to_seconds(millis: _float) -> float: """Convert milliseconds to seconds.""" return millis / 1000.0 From a0b6266fa4d1500421df1696f7e7dc723ea54672 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 00:18:06 +0000 Subject: [PATCH 066/434] 0.89.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ff20c2d..bb68ad09 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.89.0 (2023-09-02) + +### Feature + +* Reduce overhead to process incoming questions ([#1244](https://github.com/python-zeroconf/python-zeroconf/issues/1244)) ([`18b65d1`](https://github.com/python-zeroconf/python-zeroconf/commit/18b65d1c75622869b0c29258215d3db3ae520d6c)) + ## v0.88.0 (2023-08-29) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 594ee6bc..6b228f13 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.88.0" +version = "0.89.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index de54784b..d69d6864 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.88.0' +__version__ = '0.89.0' __license__ = 'LGPL' From 36ae505dc9f95b59fdfb632960845a45ba8575b8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 1 Sep 2023 20:01:55 -0500 Subject: [PATCH 067/434] refactor: reduce duplicate code in engine.py (#1246) --- src/zeroconf/_engine.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 44435750..a74c091c 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -80,8 +80,7 @@ def setup(self, loop: asyncio.AbstractEventLoop, loop_thread_ready: Optional[thr async def _async_setup(self, loop_thread_ready: Optional[threading.Event]) -> None: """Set up the instance.""" - assert self.loop is not None - self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) + self._async_schedule_next_cache_cleanup() await self._async_create_endpoints() assert self.running_event is not None self.running_event.set() @@ -118,8 +117,13 @@ def _async_cache_cleanup(self) -> None: now, [RecordUpdate(record, record) for record in self.zc.cache.async_expire(now)] ) self.zc.record_manager.async_updates_complete(False) - assert self.loop is not None - self._cleanup_timer = self.loop.call_later(_CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) + self._async_schedule_next_cache_cleanup() + + def _async_schedule_next_cache_cleanup(self) -> None: + """Schedule the next cache cleanup.""" + loop = self.loop + assert loop is not None + self._cleanup_timer = loop.call_at(loop.time() + _CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) async def _async_close(self) -> None: """Cancel and wait for the cleanup task to finish.""" From 816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 1 Sep 2023 20:02:01 -0500 Subject: [PATCH 068/434] feat: avoid python float conversion in listener hot path (#1245) --- src/zeroconf/_listener.pxd | 5 +++++ src/zeroconf/_listener.py | 23 +++++++++++++++-------- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 87ed8b5f..75114d56 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -7,7 +7,10 @@ from ._utils.time cimport current_time_millis, millis_to_seconds cdef object log cdef object logging_DEBUG +cdef object TYPE_CHECKING +cdef cython.uint _MAX_MSG_ABSOLUTE +cdef cython.uint _DUPLICATE_PACKET_SUPPRESSION_INTERVAL cdef class AsyncListener: @@ -22,3 +25,5 @@ cdef class AsyncListener: @cython.locals(now=cython.float, msg=DNSIncoming) cpdef datagram_received(self, cython.bytes bytes, cython.tuple addrs) + + cdef _cancel_any_timers_for_addr(self, object addr) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index bc0af296..8da9381c 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -38,6 +38,8 @@ _bytes = bytes +_str = str +_int = int logging_DEBUG = logging.DEBUG @@ -110,10 +112,13 @@ def datagram_received( ) return - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () if len(addrs) == 2: + v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () # https://github.com/python/mypy/issues/1178 addr, port = addrs # type: ignore + addr_port = addrs + if TYPE_CHECKING: + addr_port = cast(Tuple[str, int], addr_port) scope = None else: # https://github.com/python/mypy/issues/1178 @@ -121,8 +126,9 @@ def datagram_received( if debug: # pragma: no branch log.debug('IPv6 scope_id %d associated to the receiving interface', scope) v6_flow_scope = (flow, scope) + addr_port = (addr, port) - msg = DNSIncoming(data, (addr, port), scope, now) + msg = DNSIncoming(data, addr_port, scope, now) self.data = data self.last_time = now self.last_message = msg @@ -176,13 +182,14 @@ def handle_query_or_defer( return deferred.append(msg) delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) - assert self.zc.loop is not None + loop = self.zc.loop + assert loop is not None self._cancel_any_timers_for_addr(addr) - self._timers[addr] = self.zc.loop.call_later( - delay, self._respond_query, None, addr, port, transport, v6_flow_scope + self._timers[addr] = loop.call_at( + loop.time() + delay, self._respond_query, None, addr, port, transport, v6_flow_scope ) - def _cancel_any_timers_for_addr(self, addr: str) -> None: + def _cancel_any_timers_for_addr(self, addr: _str) -> None: """Cancel any future truncated packet timers for the address.""" if addr in self._timers: self._timers.pop(addr).cancel() @@ -190,8 +197,8 @@ def _cancel_any_timers_for_addr(self, addr: str) -> None: def _respond_query( self, msg: Optional[DNSIncoming], - addr: str, - port: int, + addr: _str, + port: _int, transport: _WrappedTransport, v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), ) -> None: From f26218da633da0d57a6892ba6ac0a7847bc0b6a6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 01:10:40 +0000 Subject: [PATCH 069/434] 0.90.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bb68ad09..82f2aa00 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.90.0 (2023-09-02) + +### Feature + +* Avoid python float conversion in listener hot path ([#1245](https://github.com/python-zeroconf/python-zeroconf/issues/1245)) ([`816ad4d`](https://github.com/python-zeroconf/python-zeroconf/commit/816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a)) + ## v0.89.0 (2023-09-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 6b228f13..88046786 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.89.0" +version = "0.90.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d69d6864..07779981 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.89.0' +__version__ = '0.90.0' __license__ = 'LGPL' From 5e31f0afe4c341fbdbbbe50348a829ea553cbda0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 1 Sep 2023 22:23:11 -0500 Subject: [PATCH 070/434] feat: reduce overhead to process incoming updates by avoiding the handle_response shim (#1247) --- src/zeroconf/_core.py | 1 + src/zeroconf/_listener.pxd | 4 ++++ src/zeroconf/_listener.py | 4 +++- src/zeroconf/_protocol/incoming.pxd | 4 ++++ tests/__init__.py | 2 +- tests/services/test_info.py | 30 ++++++++++++++--------------- tests/test_asyncio.py | 1 + tests/test_core.py | 2 +- tests/test_handlers.py | 10 +++++----- 9 files changed, 35 insertions(+), 23 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 4960f1e0..aebcee34 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -564,6 +564,7 @@ def async_remove_listener(self, listener: RecordUpdateListener) -> None: def handle_response(self, msg: DNSIncoming) -> None: """Deal with incoming response packets. All answers are held in the cache, and listeners are notified.""" + self.log_warning_once("handle_response is deprecated, use record_manager.async_updates_from_response") self.record_manager.async_updates_from_response(msg) def handle_assembled_query( diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 75114d56..4e4144c7 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -1,6 +1,7 @@ import cython +from ._handlers.record_manager cimport RecordManager from ._protocol.incoming cimport DNSIncoming from ._utils.time cimport current_time_millis, millis_to_seconds @@ -12,9 +13,12 @@ cdef object TYPE_CHECKING cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _DUPLICATE_PACKET_SUPPRESSION_INTERVAL + + cdef class AsyncListener: cdef public object zc + cdef RecordManager _record_manager cdef public cython.bytes data cdef public cython.float last_time cdef public DNSIncoming last_message diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 8da9381c..913c169f 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -55,6 +55,7 @@ class AsyncListener: __slots__ = ( 'zc', + '_record_manager', 'data', 'last_time', 'last_message', @@ -66,6 +67,7 @@ class AsyncListener: def __init__(self, zc: 'Zeroconf') -> None: self.zc = zc + self._record_manager = zc.record_manager self.data: Optional[bytes] = None self.last_time: float = 0 self.last_message: Optional[DNSIncoming] = None @@ -156,7 +158,7 @@ def datagram_received( return if not msg.is_query(): - self.zc.handle_response(msg) + self._record_manager.async_updates_from_response(msg) return self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index a7130b66..604b1e30 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -70,6 +70,10 @@ cdef class DNSIncoming: ) cpdef has_qu_question(self) + cpdef is_query(self) + + cpdef is_response(self) + @cython.locals( off=cython.uint, label_idx=cython.uint, diff --git a/tests/__init__.py b/tests/__init__.py index f203ff07..98cd901c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -42,7 +42,7 @@ def _inject_responses(zc: Zeroconf, msgs: List[DNSIncoming]) -> None: async def _wait_for_response(): for msg in msgs: - zc.handle_response(msg) + zc.record_manager.async_updates_from_response(msg) asyncio.run_coroutine_threadsafe(_wait_for_response(), zc.loop).result() diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 1fc3bd01..c0a4e661 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -903,7 +903,7 @@ async def test_release_wait_when_new_recorded_added(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) assert await asyncio.wait_for(task, timeout=2) assert info.addresses == [b'\x7f\x00\x00\x01'] await aiozc.async_close() @@ -966,7 +966,7 @@ async def test_port_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -982,7 +982,7 @@ async def test_port_changes_are_seen(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1049,7 +1049,7 @@ async def test_port_changes_are_seen_with_directed_request(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1065,7 +1065,7 @@ async def test_port_changes_are_seen_with_directed_request(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200, addr="127.0.0.1", port=5353) @@ -1131,7 +1131,7 @@ async def test_ipv4_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x01'] @@ -1147,7 +1147,7 @@ async def test_ipv4_changes_are_seen(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) @@ -1213,7 +1213,7 @@ async def test_ipv6_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V6Only) == [ @@ -1231,7 +1231,7 @@ async def test_ipv6_changes_are_seen(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) @@ -1295,7 +1295,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x01'] @@ -1354,7 +1354,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1374,7 +1374,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1426,7 +1426,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1456,7 +1456,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): ), 0, ) - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1530,7 +1530,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): await asyncio.sleep(0) for task in tasks: assert not task.done() - aiozc.zeroconf.handle_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) _, pending = await asyncio.wait(tasks, timeout=2) assert not pending assert info.addresses == [b'\x7f\x00\x00\x01'] diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 395a16ea..18e8c8e0 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1192,6 +1192,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de 0, ) + zc.record_manager.async_updates_from_response(DNSIncoming(generated.packets()[0])) zc.handle_response(DNSIncoming(generated.packets()[0])) await browser.async_cancel() diff --git a/tests/test_core.py b/tests/test_core.py index 303e28ef..4bce6db9 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -105,7 +105,7 @@ def test_launch_and_close_apple_p2p_on_mac(self): rv = r.Zeroconf(apple_p2p=True) rv.close() - def test_handle_response(self): + def test_async_updates_from_response(self): def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: ttl = 120 generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 4cfdd8e9..bdd16c3c 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1423,7 +1423,7 @@ async def test_response_aggregation_timings(run_isolated): assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - zc.handle_response(incoming) + zc.record_manager.async_updates_from_response(incoming) assert info.dns_pointer() in incoming.answers assert info2.dns_pointer() in incoming.answers send_mock.reset_mock() @@ -1437,7 +1437,7 @@ async def test_response_aggregation_timings(run_isolated): assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - zc.handle_response(incoming) + zc.record_manager.async_updates_from_response(incoming) assert info3.dns_pointer() in incoming.answers send_mock.reset_mock() @@ -1499,7 +1499,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - zc.handle_response(incoming) + zc.record_manager.async_updates_from_response(incoming) assert info2.dns_pointer() in incoming.answers send_mock.reset_mock() @@ -1509,7 +1509,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - zc.handle_response(incoming) + zc.record_manager.async_updates_from_response(incoming) assert info2.dns_pointer() in incoming.answers send_mock.reset_mock() @@ -1532,7 +1532,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - zc.handle_response(incoming) + zc.record_manager.async_updates_from_response(incoming) assert info2.dns_pointer() in incoming.answers From a7feade6baf5e9c9baffeba66d023fd156be86fe Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 03:31:28 +0000 Subject: [PATCH 071/434] 0.91.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 82f2aa00..63153755 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.91.0 (2023-09-02) + +### Feature + +* Reduce overhead to process incoming updates by avoiding the handle_response shim ([#1247](https://github.com/python-zeroconf/python-zeroconf/issues/1247)) ([`5e31f0a`](https://github.com/python-zeroconf/python-zeroconf/commit/5e31f0afe4c341fbdbbbe50348a829ea553cbda0)) + ## v0.90.0 (2023-09-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 88046786..bb5cf8c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.90.0" +version = "0.91.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 07779981..c41bd89c 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.90.0' +__version__ = '0.91.0' __license__ = 'LGPL' From 4e40fae20bf50b4608e28fad4a360c4ed48ac86b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 1 Sep 2023 23:49:40 -0500 Subject: [PATCH 072/434] fix: remove useless calls in ServiceInfo (#1248) --- src/zeroconf/_services/info.py | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 19e4ce29..a308fddb 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -427,7 +427,10 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo return False record_key = record.key - if record_key == self.server_key and type(record) is DNSAddress: + record_type = type(record) + if record_key == self.server_key and record_type is DNSAddress: + if TYPE_CHECKING: + assert isinstance(record, DNSAddress) try: ip_addr = _cached_ip_addresses(record.address) except ValueError as ex: @@ -435,9 +438,6 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo return False if type(ip_addr) is IPv4Address: - if self._ipv4_addresses: - self._set_ipv4_addresses_from_cache(zc, now) - ipv4_addresses = self._ipv4_addresses if ip_addr not in ipv4_addresses: ipv4_addresses.insert(0, ip_addr) @@ -448,9 +448,6 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo return False - if not self._ipv6_addresses: - self._set_ipv6_addresses_from_cache(zc, now) - ipv6_addresses = self._ipv6_addresses if ip_addr not in self._ipv6_addresses: ipv6_addresses.insert(0, ip_addr) @@ -464,13 +461,18 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo if record_key != self.key: return False - if record.type == _TYPE_TXT and type(record) is DNSText: + if record_type is DNSText: + if TYPE_CHECKING: + assert isinstance(record, DNSText) self._set_text(record.text) return True - if record.type == _TYPE_SRV and type(record) is DNSService: + if record_type is DNSService: + if TYPE_CHECKING: + assert isinstance(record, DNSService) old_server_key = self.server_key - self.name = record.name + self._name = record.name + self.key = record.key self.server = record.server self.server_key = record.server_key self.port = record.port @@ -577,7 +579,11 @@ def _get_address_records_from_cache_by_type(self, zc: 'Zeroconf', _type: int) -> """Get the addresses from the cache.""" if self.server_key is None: return [] - return cast("List[DNSAddress]", zc.cache.get_all_by_details(self.server_key, _type, _CLASS_IN)) + if TYPE_CHECKING: + records = cast("List[DNSAddress]", zc.cache.get_all_by_details(self.server_key, _type, _CLASS_IN)) + else: + records = zc.cache.get_all_by_details(self.server_key, _type, _CLASS_IN) + return records def set_server_if_missing(self) -> None: """Set the server if it is missing. From af192d38ea1035e53bc2154aeed493e96a8d08a2 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 04:57:54 +0000 Subject: [PATCH 073/434] 0.91.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 63153755..64ceb710 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.91.1 (2023-09-02) + +### Fix + +* Remove useless calls in ServiceInfo ([#1248](https://github.com/python-zeroconf/python-zeroconf/issues/1248)) ([`4e40fae`](https://github.com/python-zeroconf/python-zeroconf/commit/4e40fae20bf50b4608e28fad4a360c4ed48ac86b)) + ## v0.91.0 (2023-09-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index bb5cf8c5..0589dc9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.91.0" +version = "0.91.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index c41bd89c..bd658355 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.91.0' +__version__ = '0.91.1' __license__ = 'LGPL' From 0890f628dbbd577fb77d3e6f2e267052b2b2b515 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 2 Sep 2023 15:27:14 -0500 Subject: [PATCH 074/434] feat: cache construction of records used to answer queries from the service registry (#1243) --- src/zeroconf/_core.py | 16 ++--- src/zeroconf/_handlers/query_handler.py | 33 +++++----- src/zeroconf/_services/info.py | 81 ++++++++++++++++++------- tests/services/test_browser.py | 10 +++ tests/test_handlers.py | 1 + 5 files changed, 95 insertions(+), 46 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index aebcee34..0264f72a 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -400,7 +400,7 @@ def generate_service_query(self, info: ServiceInfo) -> DNSOutgoing: # pylint: d # # _CLASS_UNIQUE is the "QU" bit out.add_question(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN | _CLASS_UNIQUE)) - out.add_authorative_answer(info.dns_pointer(created=current_time_millis())) + out.add_authorative_answer(info.dns_pointer()) return out def _add_broadcast_answer( # pylint: disable=no-self-use @@ -411,14 +411,14 @@ def _add_broadcast_answer( # pylint: disable=no-self-use broadcast_addresses: bool = True, ) -> None: """Add answers to broadcast a service.""" - now = current_time_millis() - other_ttl = info.other_ttl if override_ttl is None else override_ttl - host_ttl = info.host_ttl if override_ttl is None else override_ttl - out.add_answer_at_time(info.dns_pointer(override_ttl=other_ttl, created=now), 0) - out.add_answer_at_time(info.dns_service(override_ttl=host_ttl, created=now), 0) - out.add_answer_at_time(info.dns_text(override_ttl=other_ttl, created=now), 0) + current_time_millis() + other_ttl = None if override_ttl is None else override_ttl + host_ttl = None if override_ttl is None else override_ttl + out.add_answer_at_time(info.dns_pointer(override_ttl=other_ttl), 0) + out.add_answer_at_time(info.dns_service(override_ttl=host_ttl), 0) + out.add_answer_at_time(info.dns_text(override_ttl=other_ttl), 0) if broadcast_addresses: - for record in info.get_address_and_nsec_records(override_ttl=host_ttl, created=now): + for record in info.get_address_and_nsec_records(override_ttl=host_ttl): out.add_answer_at_time(record, 0) def unregister_service(self, info: ServiceInfo) -> None: diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index e4b63508..cbb18eee 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -163,7 +163,7 @@ def __init__(self, registry: ServiceRegistry, cache: DNSCache, question_history: self.question_history = question_history def _add_service_type_enumeration_query_answers( - self, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float + self, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet ) -> None: """Provide an answer to a service type enumeration query. @@ -171,32 +171,31 @@ def _add_service_type_enumeration_query_answers( """ for stype in self.registry.async_get_types(): dns_pointer = DNSPointer( - _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, now + _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, 0.0 ) if not known_answers.suppresses(dns_pointer): answer_set[dns_pointer] = set() def _add_pointer_answers( - self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, now: float + self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet ) -> None: """Answer PTR/ANY question.""" for service in self.registry.async_get_infos_type(lower_name): # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.1. - dns_pointer = service.dns_pointer(created=now) + dns_pointer = service.dns_pointer() if known_answers.suppresses(dns_pointer): continue answer_set[dns_pointer] = { - service.dns_service(created=now), - service.dns_text(created=now), - } | service.get_address_and_nsec_records(created=now) + service.dns_service(), + service.dns_text(), + } | service.get_address_and_nsec_records() def _add_address_answers( self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, - now: float, type_: int, ) -> None: """Answer A/AAAA/ANY question.""" @@ -204,7 +203,7 @@ def _add_address_answers( answers: List[DNSAddress] = [] additionals: Set[DNSRecord] = set() seen_types: Set[int] = set() - for dns_address in service.dns_addresses(created=now): + for dns_address in service.dns_addresses(): seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) @@ -214,12 +213,12 @@ def _add_address_answers( if answers: if missing_types: assert service.server is not None, "Service server must be set for NSEC record." - additionals.add(service.dns_nsec(list(missing_types), created=now)) + additionals.add(service.dns_nsec(list(missing_types))) for answer in answers: answer_set[answer] = additionals elif type_ in missing_types: assert service.server is not None, "Service server must be set for NSEC record." - answer_set[service.dns_nsec(list(missing_types), created=now)] = set() + answer_set[service.dns_nsec(list(missing_types))] = set() def _answer_question( self, @@ -231,16 +230,16 @@ def _answer_question( question_lower_name = question.name.lower() if question.type == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: - self._add_service_type_enumeration_query_answers(answer_set, known_answers, now) + self._add_service_type_enumeration_query_answers(answer_set, known_answers) return answer_set type_ = question.type if type_ in (_TYPE_PTR, _TYPE_ANY): - self._add_pointer_answers(question_lower_name, answer_set, known_answers, now) + self._add_pointer_answers(question_lower_name, answer_set, known_answers) if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): - self._add_address_answers(question_lower_name, answer_set, known_answers, now, type_) + self._add_address_answers(question_lower_name, answer_set, known_answers, type_) if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): service = self.registry.async_get_info_name(question_lower_name) @@ -248,11 +247,11 @@ def _answer_question( if type_ in (_TYPE_SRV, _TYPE_ANY): # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.2. - dns_service = service.dns_service(created=now) + dns_service = service.dns_service() if not known_answers.suppresses(dns_service): - answer_set[dns_service] = service.get_address_and_nsec_records(created=now) + answer_set[dns_service] = service.get_address_and_nsec_records() if type_ in (_TYPE_TXT, _TYPE_ANY): - dns_text = service.dns_text(created=now) + dns_text = service.dns_text() if not known_answers.suppresses(dns_text): answer_set[dns_text] = set() diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index a308fddb..7ca8d29b 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -133,6 +133,11 @@ class ServiceInfo(RecordUpdateListener): "other_ttl", "interface_index", "_new_records_futures", + "_dns_pointer_cache", + "_dns_service_cache", + "_dns_text_cache", + "_dns_address_cache", + "_get_address_and_nsec_records_cache", ) def __init__( @@ -180,6 +185,11 @@ def __init__( self.other_ttl = other_ttl self.interface_index = interface_index self._new_records_futures: Set[asyncio.Future] = set() + self._dns_address_cache: Optional[List[DNSAddress]] = None + self._dns_pointer_cache: Optional[DNSPointer] = None + self._dns_service_cache: Optional[DNSService] = None + self._dns_text_cache: Optional[DNSText] = None + self._get_address_and_nsec_records_cache: Optional[Set[DNSRecord]] = None @property def name(self) -> str: @@ -191,6 +201,9 @@ def name(self, name: str) -> None: """Replace the the name and reset the key.""" self._name = name self.key = name.lower() + self._dns_service_cache = None + self._dns_pointer_cache = None + self._dns_text_cache = None @property def addresses(self) -> List[bytes]: @@ -210,6 +223,8 @@ def addresses(self, value: List[bytes]) -> None: """ self._ipv4_addresses.clear() self._ipv6_addresses.clear() + self._dns_address_cache = None + self._get_address_and_nsec_records_cache = None for address in value: try: @@ -489,42 +504,56 @@ def dns_addresses( self, override_ttl: Optional[int] = None, version: IPVersion = IPVersion.All, - created: Optional[float] = None, ) -> List[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" + cacheable = version is IPVersion.All and override_ttl is None + if self._dns_address_cache is not None and cacheable: + return self._dns_address_cache name = self.server or self._name ttl = override_ttl if override_ttl is not None else self.host_ttl class_ = _CLASS_IN_UNIQUE version_value = version.value - return [ + records = [ DNSAddress( name, _TYPE_AAAA if type(ip_addr) is IPv6Address else _TYPE_A, class_, ttl, ip_addr.packed, - created=created, + created=0.0, ) for ip_addr in self._ip_addresses_by_version_value(version_value) ] + if cacheable: + self._dns_address_cache = records + return records - def dns_pointer(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSPointer: + def dns_pointer(self, override_ttl: Optional[int] = None) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" - return DNSPointer( + cacheable = override_ttl is None + if self._dns_pointer_cache is not None and cacheable: + return self._dns_pointer_cache + record = DNSPointer( self.type, _TYPE_PTR, _CLASS_IN, override_ttl if override_ttl is not None else self.other_ttl, self._name, - created, + 0.0, ) + if cacheable: + self._dns_pointer_cache = record + return record - def dns_service(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSService: + def dns_service(self, override_ttl: Optional[int] = None) -> DNSService: """Return DNSService from ServiceInfo.""" + cacheable = override_ttl is None + if self._dns_service_cache is not None and cacheable: + return self._dns_service_cache port = self.port if TYPE_CHECKING: assert isinstance(port, int) - return DNSService( + record = DNSService( self._name, _TYPE_SRV, _CLASS_IN_UNIQUE, @@ -533,23 +562,30 @@ def dns_service(self, override_ttl: Optional[int] = None, created: Optional[floa self.weight, port, self.server or self._name, - created, + 0.0, ) + if cacheable: + self._dns_service_cache = record + return record - def dns_text(self, override_ttl: Optional[int] = None, created: Optional[float] = None) -> DNSText: + def dns_text(self, override_ttl: Optional[int] = None) -> DNSText: """Return DNSText from ServiceInfo.""" - return DNSText( + cacheable = override_ttl is None + if self._dns_text_cache is not None and cacheable: + return self._dns_text_cache + record = DNSText( self._name, _TYPE_TXT, _CLASS_IN_UNIQUE, override_ttl if override_ttl is not None else self.other_ttl, self.text, - created, + 0.0, ) + if cacheable: + self._dns_text_cache = record + return record - def dns_nsec( - self, missing_types: List[int], override_ttl: Optional[int] = None, created: Optional[float] = None - ) -> DNSNsec: + def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -558,21 +594,24 @@ def dns_nsec( override_ttl if override_ttl is not None else self.host_ttl, self._name, missing_types, - created, + 0.0, ) - def get_address_and_nsec_records( - self, override_ttl: Optional[int] = None, created: Optional[float] = None - ) -> Set[DNSRecord]: + def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" + cacheable = override_ttl is None + if self._get_address_and_nsec_records_cache is not None and cacheable: + return self._get_address_and_nsec_records_cache missing_types: Set[int] = _ADDRESS_RECORD_TYPES.copy() records: Set[DNSRecord] = set() - for dns_address in self.dns_addresses(override_ttl, IPVersion.All, created): + for dns_address in self.dns_addresses(override_ttl, IPVersion.All): missing_types.discard(dns_address.type) records.add(dns_address) if missing_types: assert self.server is not None, "Service server must be set for NSEC record." - records.add(self.dns_nsec(list(missing_types), override_ttl, created)) + records.add(self.dns_nsec(list(missing_types), override_ttl)) + if cacheable: + self._get_address_and_nsec_records_cache = records return records def _get_address_records_from_cache_by_type(self, zc: 'Zeroconf', _type: int) -> List[DNSAddress]: diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index d269f85c..aa13761d 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -792,6 +792,8 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: assert service_info.port == 80 info.port = 400 + info._dns_service_cache = None # we are mutating the record so clear the cache + _inject_response( zc, mock_incoming_msg([info.dns_service()]), @@ -856,6 +858,8 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), ) time.sleep(0.2) + info._dns_service_cache = None # we are mutating the record so clear the cache + info.port = 400 _inject_response( zc, @@ -914,6 +918,8 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: ) time.sleep(0.2) info.port = 400 + info._dns_service_cache = None # we are mutating the record so clear the cache + _inject_response( zc, mock_incoming_msg([info.dns_service()]), @@ -1131,6 +1137,8 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: ) time.sleep(0.2) info.port = 400 + info._dns_service_cache = None # we are mutating the record so clear the cache + _inject_response( zc, mock_incoming_msg([info.dns_service()]), @@ -1210,6 +1218,8 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: ) time.sleep(0.3) info.port = 400 + info._dns_service_cache = None # we are mutating the record so clear the cache + _inject_response( zc, mock_incoming_msg([info.dns_service()]), diff --git a/tests/test_handlers.py b/tests/test_handlers.py index bdd16c3c..6266ad91 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -986,6 +986,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): a_record = info.dns_addresses()[0] a_record.set_created_ttl(current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl) assert not a_record.is_recent(current_time_millis()) + info._dns_address_cache = None # we are mutating the record so clear the cache zc.cache.async_add_records([a_record]) # With QU should respond to only unicast when the answer has been recently multicast From 318094c0d6f8333fd5ed8bf6bfe8ecef606ef8c6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 20:35:18 +0000 Subject: [PATCH 075/434] 0.92.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 64ceb710..847e6acf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.92.0 (2023-09-02) + +### Feature + +* Cache construction of records used to answer queries from the service registry ([#1243](https://github.com/python-zeroconf/python-zeroconf/issues/1243)) ([`0890f62`](https://github.com/python-zeroconf/python-zeroconf/commit/0890f628dbbd577fb77d3e6f2e267052b2b2b515)) + ## v0.91.1 (2023-09-02) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 0589dc9d..5677aab6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.91.1" +version = "0.92.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index bd658355..56fd4bed 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.91.1' +__version__ = '0.92.0' __license__ = 'LGPL' From 7cb8da0c6c5c944588009fe36012c1197c422668 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 2 Sep 2023 17:19:26 -0500 Subject: [PATCH 076/434] feat: reduce overhead to answer questions (#1250) --- src/zeroconf/_handlers/query_handler.py | 33 ++++++++++++++----------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index cbb18eee..b232ea49 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -21,7 +21,7 @@ """ -from typing import TYPE_CHECKING, List, Set, cast +from typing import TYPE_CHECKING, List, Optional, Set, cast from .._cache import DNSCache, _UniqueRecordsType from .._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet @@ -109,19 +109,20 @@ def add_mcast_question_response(self, answers: _AnswerWithAdditionalsType) -> No else: self._mcast_aggregate.add(answer) - def _generate_answers_with_additionals(self, rrset: Set[DNSRecord]) -> _AnswerWithAdditionalsType: - """Create answers with additionals from an rrset.""" - return {record: self._additionals[record] for record in rrset} - def answers( self, ) -> QuestionAnswers: """Return answer sets that will be queued.""" return QuestionAnswers( - self._generate_answers_with_additionals(self._ucast), - self._generate_answers_with_additionals(self._mcast_now), - self._generate_answers_with_additionals(self._mcast_aggregate), - self._generate_answers_with_additionals(self._mcast_aggregate_last_second), + *( + {record: self._additionals[record] for record in rrset} + for rrset in ( + self._ucast, + self._mcast_now, + self._mcast_aggregate, + self._mcast_aggregate_last_second, + ) + ) ) def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: @@ -224,17 +225,16 @@ def _answer_question( self, question: DNSQuestion, known_answers: DNSRRSet, - now: float, ) -> _AnswerWithAdditionalsType: + """Answer a question.""" answer_set: _AnswerWithAdditionalsType = {} question_lower_name = question.name.lower() + type_ = question.type - if question.type == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: + if type_ == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: self._add_service_type_enumeration_query_answers(answer_set, known_answers) return answer_set - type_ = question.type - if type_ in (_TYPE_PTR, _TYPE_ANY): self._add_pointer_answers(question_lower_name, answer_set, known_answers) @@ -267,12 +267,15 @@ def async_response( # pylint: disable=unused-argument """ known_answers = DNSRRSet([msg.answers for msg in msgs if not msg.is_probe]) query_res = _QueryResponse(self.cache, msgs) + known_answers_set: Optional[Set[DNSRecord]] = None for msg in msgs: for question in msg.questions: if not question.unicast: - self.question_history.add_question_at_time(question, msg.now, set(known_answers.lookup)) - answer_set = self._answer_question(question, known_answers, msg.now) + if not known_answers_set: # pragma: no branch + known_answers_set = set(known_answers.lookup) + self.question_history.add_question_at_time(question, msg.now, known_answers_set) + answer_set = self._answer_question(question, known_answers) if not ucast_source and question.unicast: query_res.add_qu_question_response(answer_set) continue From 2b6056f199042259fe0e01e038cdd93689dcbc13 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Sep 2023 22:50:28 +0000 Subject: [PATCH 077/434] 0.93.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 847e6acf..0ab0348e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.93.0 (2023-09-02) + +### Feature + +* Reduce overhead to answer questions ([#1250](https://github.com/python-zeroconf/python-zeroconf/issues/1250)) ([`7cb8da0`](https://github.com/python-zeroconf/python-zeroconf/commit/7cb8da0c6c5c944588009fe36012c1197c422668)) + ## v0.92.0 (2023-09-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 5677aab6..edb50126 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.92.0" +version = "0.93.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 56fd4bed..a4b6db20 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.92.0' +__version__ = '0.93.0' __license__ = 'LGPL' From 730921b155dfb9c62251c8c643b1302e807aff3b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 2 Sep 2023 19:33:10 -0500 Subject: [PATCH 078/434] fix: no change re-release due to unrecoverable failed CI run (#1251) From 235d52877b8d959efb653e46daff684c53fa4e4d Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 3 Sep 2023 00:42:04 +0000 Subject: [PATCH 079/434] 0.93.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ab0348e..5593d1e8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.93.1 (2023-09-03) + +### Fix + +* No change re-release due to unrecoverable failed CI run ([#1251](https://github.com/python-zeroconf/python-zeroconf/issues/1251)) ([`730921b`](https://github.com/python-zeroconf/python-zeroconf/commit/730921b155dfb9c62251c8c643b1302e807aff3b)) + ## v0.93.0 (2023-09-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index edb50126..0dd86882 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.93.0" +version = "0.93.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index a4b6db20..ce4f7820 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.93.0' +__version__ = '0.93.1' __license__ = 'LGPL' From 8d3ec792277aaf7ef790318b5b35ab00839ca3b3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 3 Sep 2023 11:35:39 -0500 Subject: [PATCH 080/434] feat: optimize cache implementation (#1252) --- src/zeroconf/_cache.pxd | 25 +++++++++++++++++++----- src/zeroconf/_cache.py | 25 ++++++------------------ src/zeroconf/_handlers/record_manager.py | 5 +++-- 3 files changed, 29 insertions(+), 26 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 6bc9ea5d..3ffe0800 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -4,6 +4,7 @@ from ._dns cimport ( DNSAddress, DNSEntry, DNSHinfo, + DNSNsec, DNSPointer, DNSRecord, DNSService, @@ -13,7 +14,7 @@ from ._dns cimport ( cdef object _UNIQUE_RECORD_TYPES cdef object _TYPE_PTR -cdef object _ONE_SECOND +cdef cython.uint _ONE_SECOND cdef _remove_key(cython.dict cache, object key, DNSRecord record) @@ -27,23 +28,37 @@ cdef class DNSCache: cpdef async_remove_records(self, object entries) + @cython.locals( + store=cython.dict, + ) cpdef async_get_unique(self, DNSRecord entry) + @cython.locals( + record=DNSRecord, + ) + cpdef async_expire(self, float now) + @cython.locals( records=cython.dict, record=DNSRecord, ) - cdef _async_all_by_details(self, object name, object type_, object class_) + cpdef async_all_by_details(self, str name, object type_, object class_) + cpdef async_entries_with_name(self, str name) + + cpdef async_entries_with_server(self, str name) + + @cython.locals( + store=cython.dict, + ) cdef _async_add(self, DNSRecord record) cdef _async_remove(self, DNSRecord record) - cpdef async_mark_unique_records_older_than_1s_to_expire(self, object unique_types, object answers, object now) - @cython.locals( record=DNSRecord, + created_float=cython.float, ) - cdef _async_mark_unique_records_older_than_1s_to_expire(self, object unique_types, object answers, object now) + cpdef async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, float now) cdef _dns_record_matches(DNSRecord record, object key, object type_, object class_) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index ad339cd5..b1e6df38 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -20,7 +20,6 @@ USA """ -import itertools from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, cast from ._dns import ( @@ -115,12 +114,12 @@ def async_remove_records(self, entries: Iterable[DNSRecord]) -> None: for entry in entries: self._async_remove(entry) - def async_expire(self, now: float) -> List[DNSRecord]: + def async_expire(self, now: _float) -> List[DNSRecord]: """Purge expired entries from the cache. This function must be run in from event loop. """ - expired = [record for record in itertools.chain(*self.cache.values()) if record.is_expired(now)] + expired = [record for records in self.cache.values() for record in records if record.is_expired(now)] self.async_remove_records(expired) return expired @@ -136,15 +135,7 @@ def async_get_unique(self, entry: _UniqueRecordsType) -> Optional[DNSRecord]: return None return store.get(entry) - def async_all_by_details(self, name: _str, type_: int, class_: int) -> Iterable[DNSRecord]: - """Gets all matching entries by details. - - This function is not thread-safe and must be called from - the event loop. - """ - return self._async_all_by_details(name, type_, class_) - - def _async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DNSRecord]: + def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DNSRecord]: """Gets all matching entries by details. This function is not thread-safe and must be called from @@ -240,11 +231,6 @@ def names(self) -> List[str]: def async_mark_unique_records_older_than_1s_to_expire( self, unique_types: Set[Tuple[_str, _int, _int]], answers: Iterable[DNSRecord], now: _float - ) -> None: - self._async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) - - def _async_mark_unique_records_older_than_1s_to_expire( - self, unique_types: Set[Tuple[_str, _int, _int]], answers: Iterable[DNSRecord], now: _float ) -> None: # rfc6762#section-10.2 para 2 # Since unique is set, all old records with that name, rrtype, @@ -252,8 +238,9 @@ def _async_mark_unique_records_older_than_1s_to_expire( # invalid, and marked to expire from the cache in one second. answers_rrset = set(answers) for name, type_, class_ in unique_types: - for record in self._async_all_by_details(name, type_, class_): - if (now - record.created > _ONE_SECOND) and record not in answers_rrset: + for record in self.async_all_by_details(name, type_, class_): + created_float = record.created + if (now - created_float > _ONE_SECOND) and record not in answers_rrset: # Expire in 1s record.set_created_ttl(now, 1) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 5e4f7c9b..dcbe5e91 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -86,8 +86,9 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: now_float = now unique_types: Set[Tuple[str, int, int]] = set() cache = self.cache + answers = msg.answers - for record in msg.answers: + for record in answers: # Protect zeroconf from records that can cause denial of service. # # We enforce a minimum TTL for PTR records to avoid @@ -127,7 +128,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, msg.answers, now) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) if updates: self.async_updates(now, updates) From 72d6886642820e1976aba93c57431e1b7b8789bf Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 3 Sep 2023 16:44:32 +0000 Subject: [PATCH 081/434] 0.94.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5593d1e8..d4232448 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.94.0 (2023-09-03) + +### Feature + +* Optimize cache implementation ([#1252](https://github.com/python-zeroconf/python-zeroconf/issues/1252)) ([`8d3ec79`](https://github.com/python-zeroconf/python-zeroconf/commit/8d3ec792277aaf7ef790318b5b35ab00839ca3b3)) + ## v0.93.1 (2023-09-03) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 0dd86882..11ef8c86 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.93.1" +version = "0.94.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ce4f7820..9eee273c 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.93.1' +__version__ = '0.94.0' __license__ = 'LGPL' From 22e4a296d440b3038c0ff5ed6fc8878304ec4937 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 3 Sep 2023 12:26:34 -0500 Subject: [PATCH 082/434] feat: speed up adding and removing RecordUpdateListeners (#1253) --- src/zeroconf/_core.py | 2 +- src/zeroconf/_handlers/record_manager.pxd | 7 ++++++- src/zeroconf/_handlers/record_manager.py | 4 ++-- src/zeroconf/_services/info.py | 4 ---- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 0264f72a..40375484 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -240,7 +240,7 @@ async def async_wait_for_start(self) -> None: raise NotRunningException @property - def listeners(self) -> List[RecordUpdateListener]: + def listeners(self) -> Set[RecordUpdateListener]: return self.record_manager.listeners async def async_wait(self, timeout: float) -> None: diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 7a55e64f..e0792d72 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -12,11 +12,12 @@ cdef object RecordUpdate cdef object TYPE_CHECKING cdef object _TYPE_PTR + cdef class RecordManager: cdef public object zc cdef public DNSCache cache - cdef public cython.list listeners + cdef public cython.set listeners cpdef async_updates(self, object now, object records) @@ -29,3 +30,7 @@ cdef class RecordManager: now_float=cython.float ) cpdef async_updates_from_response(self, DNSIncoming msg) + + cpdef async_add_listener(self, object listener, object question) + + cpdef async_remove_listener(self, object listener) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index dcbe5e91..586fba0b 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -45,7 +45,7 @@ def __init__(self, zeroconf: 'Zeroconf') -> None: """Init the record manager.""" self.zc = zeroconf self.cache = zeroconf.cache - self.listeners: List[RecordUpdateListener] = [] + self.listeners: Set[RecordUpdateListener] = set() def async_updates(self, now: _float, records: List[RecordUpdate]) -> None: """Used to notify listeners of new information that has updated @@ -175,7 +175,7 @@ def async_add_listener( " In the future this will fail" ) - self.listeners.append(listener) + self.listeners.add(listener) if question is None: return diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 7ca8d29b..1ffd9570 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -758,10 +758,6 @@ def generate_request_query( question.unicast = True return out - def __eq__(self, other: object) -> bool: - """Tests equality of service name""" - return isinstance(other, ServiceInfo) and other._name == self._name - def __repr__(self) -> str: """String representation""" return '{}({})'.format( From 46adba9f432edce96e2850840d9db4ca0dbd0510 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 3 Sep 2023 17:46:38 +0000 Subject: [PATCH 083/434] 0.95.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d4232448..13c6640d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.95.0 (2023-09-03) + +### Feature + +* Speed up adding and removing RecordUpdateListeners ([#1253](https://github.com/python-zeroconf/python-zeroconf/issues/1253)) ([`22e4a29`](https://github.com/python-zeroconf/python-zeroconf/commit/22e4a296d440b3038c0ff5ed6fc8878304ec4937)) + ## v0.94.0 (2023-09-03) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 11ef8c86..3922c9c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.94.0" +version = "0.95.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 9eee273c..aa5dae76 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.94.0' +__version__ = '0.95.0' __license__ = 'LGPL' From ce59787a170781ffdaa22425018d288b395ac081 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 3 Sep 2023 13:36:04 -0500 Subject: [PATCH 084/434] feat: optimize DNSCache.get_by_details (#1254) * feat: optimize DNSCache.get_by_details This is one of the most called functions since ServiceInfo.load_from_cache calls it * fix: make get_all_by_details thread-safe * fix: remove unneeded key checks --- src/zeroconf/_cache.pxd | 13 +++++++++++-- src/zeroconf/_cache.py | 24 ++++++++++++------------ 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 3ffe0800..cdba8176 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -48,6 +48,17 @@ cdef class DNSCache: cpdef async_entries_with_server(self, str name) + @cython.locals( + cached_entry=DNSRecord, + ) + cpdef get_by_details(self, str name, object type_, object class_) + + @cython.locals( + records=cython.dict, + entry=DNSRecord, + ) + cpdef get_all_by_details(self, str name, object type_, object class_) + @cython.locals( store=cython.dict, ) @@ -60,5 +71,3 @@ cdef class DNSCache: created_float=cython.float, ) cpdef async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, float now) - -cdef _dns_record_matches(DNSRecord record, object key, object type_, object class_) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index b1e6df38..83206e79 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -147,7 +147,7 @@ def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DN if records is None: return matches for record in records: - if _dns_record_matches(record, key, type_, class_): + if type_ == record.type and class_ == record.class_: matches.append(record) return matches @@ -181,7 +181,7 @@ def get(self, entry: DNSEntry) -> Optional[DNSRecord]: return cached_entry return None - def get_by_details(self, name: str, type_: int, class_: int) -> Optional[DNSRecord]: + def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRecord]: """Gets the first matching entry by details. Returns None if no entries match. Calling this function is not recommended as it will only @@ -194,17 +194,21 @@ def get_by_details(self, name: str, type_: int, class_: int) -> Optional[DNSReco Use get_all_by_details instead. """ key = name.lower() - for cached_entry in reversed(list(self.cache.get(key, []))): - if _dns_record_matches(cached_entry, key, type_, class_): + records = self.cache.get(key) + if records is None: + return None + for cached_entry in reversed(list(records)): + if type_ == cached_entry.type and class_ == cached_entry.class_: return cached_entry return None - def get_all_by_details(self, name: str, type_: int, class_: int) -> List[DNSRecord]: + def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRecord]: """Gets all matching entries by details.""" key = name.lower() - return [ - entry for entry in list(self.cache.get(key, [])) if _dns_record_matches(entry, key, type_, class_) - ] + records = self.cache.get(key) + if records is None: + return [] + return [entry for entry in list(records) if type_ == entry.type and class_ == entry.class_] def entries_with_server(self, server: str) -> List[DNSRecord]: """Returns a list of entries whose server matches the name.""" @@ -243,7 +247,3 @@ def async_mark_unique_records_older_than_1s_to_expire( if (now - created_float > _ONE_SECOND) and record not in answers_rrset: # Expire in 1s record.set_created_ttl(now, 1) - - -def _dns_record_matches(record: _DNSRecord, key: _str, type_: _int, class_: _int) -> bool: - return key == record.key and type_ == record.type and class_ == record.class_ From 5c884b0b93be1afdc7ac363ee4df60da5111d0ff Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 3 Sep 2023 18:43:43 +0000 Subject: [PATCH 085/434] 0.96.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 13c6640d..d4fb7fd1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.96.0 (2023-09-03) + +### Feature + +* Optimize DNSCache.get_by_details ([#1254](https://github.com/python-zeroconf/python-zeroconf/issues/1254)) ([`ce59787`](https://github.com/python-zeroconf/python-zeroconf/commit/ce59787a170781ffdaa22425018d288b395ac081)) + ## v0.95.0 (2023-09-03) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 3922c9c8..f454d3fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.95.0" +version = "0.96.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index aa5dae76..fc47b0f1 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.95.0' +__version__ = '0.96.0' __license__ = 'LGPL' From 2d3aed36e24c73013fcf4acc90803fc1737d0917 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 3 Sep 2023 18:03:18 -0500 Subject: [PATCH 086/434] feat: speed up answering queries (#1255) --- build_ext.py | 1 + src/zeroconf/_handlers/query_handler.pxd | 64 ++++++++++++++++++++++++ src/zeroconf/_handlers/query_handler.py | 20 +++----- src/zeroconf/_history.pxd | 6 ++- src/zeroconf/_services/registry.pxd | 8 +++ 5 files changed, 85 insertions(+), 14 deletions(-) create mode 100644 src/zeroconf/_handlers/query_handler.pxd diff --git a/build_ext.py b/build_ext.py index 8e4e7b99..55d76d3c 100644 --- a/build_ext.py +++ b/build_ext.py @@ -30,6 +30,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", "src/zeroconf/_handlers/record_manager.py", + "src/zeroconf/_handlers/query_handler.py", "src/zeroconf/_services/registry.py", "src/zeroconf/_utils/time.py", ], diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd new file mode 100644 index 00000000..3457128c --- /dev/null +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -0,0 +1,64 @@ + +import cython + +from .._cache cimport DNSCache +from .._dns cimport DNSPointer, DNSQuestion, DNSRecord, DNSRRSet +from .._history cimport QuestionHistory +from .._protocol.incoming cimport DNSIncoming +from .._services.registry cimport ServiceRegistry + + +cdef object TYPE_CHECKING, QuestionAnswers +cdef cython.uint _ONE_SECOND, _TYPE_PTR, _TYPE_ANY, _TYPE_A, _TYPE_AAAA, _TYPE_SRV, _TYPE_TXT +cdef str _SERVICE_TYPE_ENUMERATION_NAME +cdef cython.set _RESPOND_IMMEDIATE_TYPES + +cdef class _QueryResponse: + + cdef object _is_probe + cdef DNSIncoming _msg + cdef float _now + cdef DNSCache _cache + cdef cython.dict _additionals + cdef cython.set _ucast + cdef cython.set _mcast_now + cdef cython.set _mcast_aggregate + cdef cython.set _mcast_aggregate_last_second + + cpdef add_qu_question_response(self, cython.dict answers) + + cpdef add_ucast_question_response(self, cython.dict answers) + + cpdef add_mcast_question_response(self, cython.dict answers) + + @cython.locals(maybe_entry=DNSRecord) + cpdef _has_mcast_within_one_quarter_ttl(self, DNSRecord record) + + @cython.locals(maybe_entry=DNSRecord) + cpdef _has_mcast_record_in_last_second(self, DNSRecord record) + + cpdef answers(self) + +cdef class QueryHandler: + + cdef ServiceRegistry registry + cdef DNSCache cache + cdef QuestionHistory question_history + + cdef _add_service_type_enumeration_query_answers(self, cython.dict answer_set, DNSRRSet known_answers) + + cdef _add_pointer_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers) + + cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) + + @cython.locals(question_lower_name=str, type_=cython.uint) + cdef _answer_question(self, DNSQuestion question, DNSRRSet known_answers) + + @cython.locals( + msg=DNSIncoming, + question=DNSQuestion, + answer_set=cython.dict, + known_answers=DNSRRSet, + known_answers_set=cython.set, + ) + cpdef async_response(self, cython.list msgs, object unicast_source) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index b232ea49..34fde547 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -46,6 +46,8 @@ _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} +_int = int + class _QueryResponse: """A pair for unicast and multicast DNSOutgoing responses.""" @@ -113,17 +115,11 @@ def answers( self, ) -> QuestionAnswers: """Return answer sets that will be queued.""" - return QuestionAnswers( - *( - {record: self._additionals[record] for record in rrset} - for rrset in ( - self._ucast, - self._mcast_now, - self._mcast_aggregate, - self._mcast_aggregate_last_second, - ) - ) - ) + ucast = {r: self._additionals[r] for r in self._ucast} + mcast_now = {r: self._additionals[r] for r in self._mcast_now} + mcast_aggregate = {r: self._additionals[r] for r in self._mcast_aggregate} + mcast_aggregate_last_second = {r: self._additionals[r] for r in self._mcast_aggregate_last_second} + return QuestionAnswers(ucast, mcast_now, mcast_aggregate, mcast_aggregate_last_second) def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: """Check to see if a record has been mcasted recently. @@ -197,7 +193,7 @@ def _add_address_answers( lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, - type_: int, + type_: _int, ) -> None: """Answer A/AAAA/ANY question.""" for service in self.registry.async_get_infos_server(lower_name): diff --git a/src/zeroconf/_history.pxd b/src/zeroconf/_history.pxd index 6e4e374f..d4e1c833 100644 --- a/src/zeroconf/_history.pxd +++ b/src/zeroconf/_history.pxd @@ -1,5 +1,7 @@ import cython +from ._dns cimport DNSQuestion + cdef cython.double _DUPLICATE_QUESTION_INTERVAL @@ -7,10 +9,10 @@ cdef class QuestionHistory: cdef cython.dict _history + cpdef add_question_at_time(self, DNSQuestion question, float now, cython.set known_answers) @cython.locals(than=cython.double, previous_question=cython.tuple, previous_known_answers=cython.set) - cpdef suppresses(self, object question, cython.double now, cython.set known_answers) - + cpdef suppresses(self, DNSQuestion question, cython.double now, cython.set known_answers) @cython.locals(than=cython.double, now_known_answers=cython.tuple) cpdef async_expire(self, cython.double now) diff --git a/src/zeroconf/_services/registry.pxd b/src/zeroconf/_services/registry.pxd index 722ef0ec..a741b93a 100644 --- a/src/zeroconf/_services/registry.pxd +++ b/src/zeroconf/_services/registry.pxd @@ -16,3 +16,11 @@ cdef class ServiceRegistry: cdef _add(self, object info) cdef _remove(self, cython.list infos) + + cpdef async_get_info_name(self, str name) + + cpdef async_get_types(self) + + cpdef async_get_infos_type(self, str type_) + + cpdef async_get_infos_server(self, str server) From aebabd9f8486e4f7f6e79e8a5b5eed1da6c900c3 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 3 Sep 2023 23:11:15 +0000 Subject: [PATCH 087/434] 0.97.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d4fb7fd1..7f4a7531 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.97.0 (2023-09-03) + +### Feature + +* Speed up answering queries ([#1255](https://github.com/python-zeroconf/python-zeroconf/issues/1255)) ([`2d3aed3`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3aed36e24c73013fcf4acc90803fc1737d0917)) + ## v0.96.0 (2023-09-03) ### Feature diff --git a/pyproject.toml b/pyproject.toml index f454d3fc..62138aec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.96.0" +version = "0.97.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index fc47b0f1..1cc1df87 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.96.0' +__version__ = '0.97.0' __license__ = 'LGPL' From ac081cf00addde1ceea2c076f73905fdb293de3a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 6 Sep 2023 10:47:14 -0500 Subject: [PATCH 088/434] feat: speed up decoding incoming packets (#1256) --- src/zeroconf/_protocol/incoming.pxd | 15 ++++++++++----- src/zeroconf/_protocol/incoming.py | 26 +++++++++++++++----------- 2 files changed, 25 insertions(+), 16 deletions(-) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index 604b1e30..ebd09a0e 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -7,8 +7,6 @@ cdef cython.uint MAX_DNS_LABELS cdef cython.uint DNS_COMPRESSION_POINTER_LEN cdef cython.uint MAX_NAME_LENGTH -cdef object current_time_millis - cdef cython.uint _TYPE_A cdef cython.uint _TYPE_CNAME cdef cython.uint _TYPE_PTR @@ -43,6 +41,7 @@ from .._dns cimport ( DNSService, DNSText, ) +from .._utils.time cimport current_time_millis cdef class DNSIncoming: @@ -62,6 +61,7 @@ cdef class DNSIncoming: cdef public cython.uint num_additionals cdef public object valid cdef public object now + cdef cython.float _now_float cdef public object scope_id cdef public object source @@ -79,7 +79,9 @@ cdef class DNSIncoming: label_idx=cython.uint, length=cython.uint, link=cython.uint, - link_data=cython.uint + link_data=cython.uint, + link_py_int=object, + linked_labels=cython.list ) cdef _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) @@ -95,9 +97,12 @@ cdef class DNSIncoming: cdef _read_questions(self) - cdef bytes _read_character_string(self) + @cython.locals( + length=cython.uint, + ) + cdef str _read_character_string(self) - cdef _read_string(self, unsigned int length) + cdef bytes _read_string(self, unsigned int length) @cython.locals( name_start=cython.uint diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 352a6141..87d25816 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -89,6 +89,7 @@ class DNSIncoming: 'num_additionals', 'valid', 'now', + '_now_float', 'scope_id', 'source', ) @@ -116,6 +117,7 @@ def __init__( self.valid = False self._did_read_others = False self.now = now or current_time_millis() + self._now_float = self.now self.source = source self.scope_id = scope_id try: @@ -226,11 +228,13 @@ def _read_questions(self) -> None: question = DNSQuestion(name, type_, class_) self.questions.append(question) - def _read_character_string(self) -> bytes: + def _read_character_string(self) -> str: """Reads a character string from the packet""" length = self.data[self.offset] self.offset += 1 - return self._read_string(length) + info = self.data[self.offset : self.offset + length].decode('utf-8', 'replace') + self.offset += length + return info def _read_string(self, length: _int) -> bytes: """Reads a string of a given length from the packet""" @@ -273,7 +277,7 @@ def _read_record( """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: dns_address = DNSAddress(domain, type_, class_, ttl, self._read_string(4)) - dns_address.created = self.now + dns_address.created = self._now_float return dns_address if type_ in (_TYPE_CNAME, _TYPE_PTR): return DNSPointer(domain, type_, class_, ttl, self._read_name(), self.now) @@ -299,13 +303,13 @@ def _read_record( type_, class_, ttl, - self._read_character_string().decode('utf-8', 'replace'), - self._read_character_string().decode('utf-8', 'replace'), + self._read_character_string(), + self._read_character_string(), self.now, ) if type_ == _TYPE_AAAA: dns_address = DNSAddress(domain, type_, class_, ttl, self._read_string(16)) - dns_address.created = self.now + dns_address.created = self._now_float dns_address.scope_id = self.scope_id return dns_address if type_ == _TYPE_NSEC: @@ -377,7 +381,7 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: # We have a DNS compression pointer link_data = self.data[off + 1] link = (length & 0x3F) * 256 + link_data - lint_int = int(link) + link_py_int = link if link > self._data_len: raise IncomingDecodeError( f"DNS compression pointer at {off} points to {link} beyond packet from {self.source}" @@ -386,16 +390,16 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: raise IncomingDecodeError( f"DNS compression pointer at {off} points to itself from {self.source}" ) - if lint_int in seen_pointers: + if link_py_int in seen_pointers: raise IncomingDecodeError( f"DNS compression pointer at {off} was seen again from {self.source}" ) - linked_labels = self.name_cache.get(lint_int) + linked_labels = self.name_cache.get(link_py_int) if not linked_labels: linked_labels = [] - seen_pointers.add(lint_int) + seen_pointers.add(link_py_int) self._decode_labels_at_offset(link, linked_labels, seen_pointers) - self.name_cache[lint_int] = linked_labels + self.name_cache[link_py_int] = linked_labels labels.extend(linked_labels) if len(labels) > MAX_DNS_LABELS: raise IncomingDecodeError( From 3a53e79fda52e124bb96f1df4ddbe85ebe429311 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 6 Sep 2023 15:55:54 +0000 Subject: [PATCH 089/434] 0.98.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f4a7531..8295fd66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.98.0 (2023-09-06) + +### Feature + +* Speed up decoding incoming packets ([#1256](https://github.com/python-zeroconf/python-zeroconf/issues/1256)) ([`ac081cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac081cf00addde1ceea2c076f73905fdb293de3a)) + ## v0.97.0 (2023-09-03) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 62138aec..79cf3cfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.97.0" +version = "0.98.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 1cc1df87..9f49ac79 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.97.0' +__version__ = '0.98.0' __license__ = 'LGPL' From 83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 6 Sep 2023 14:54:54 -0500 Subject: [PATCH 090/434] feat: reduce IP Address parsing overhead in ServiceInfo (#1257) --- src/zeroconf/_services/info.py | 39 +++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 1ffd9570..517b41be 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -91,7 +91,13 @@ def instance_name_from_service_info(info: "ServiceInfo", strict: bool = True) -> return info.name[: -len(service_name) - 1] -_cached_ip_addresses = lru_cache(maxsize=256)(ip_address) +@lru_cache(maxsize=512) +def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4Address, IPv6Address]]: + """Cache IP addresses.""" + try: + return ip_address(address) + except ValueError: + return None class ServiceInfo(RecordUpdateListener): @@ -227,16 +233,19 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - try: - addr = _cached_ip_addresses(address) - except ValueError: + addr = _cached_ip_addresses(address) + if addr is None: raise TypeError( "Addresses must either be IPv4 or IPv6 strings, bytes, or integers;" f" got {address!r}. Hint: convert string addresses with socket.inet_pton" ) if addr.version == 4: + if TYPE_CHECKING: + assert isinstance(addr, IPv4Address) self._ipv4_addresses.append(addr) else: + if TYPE_CHECKING: + assert isinstance(addr, IPv6Address) self._ipv6_addresses.append(addr) @property @@ -394,11 +403,8 @@ def _get_ip_addresses_from_cache_lifo( for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue - try: - ip_addr = _cached_ip_addresses(record.address) - except ValueError: - continue - else: + ip_addr = _cached_ip_addresses(record.address) + if ip_addr is not None: address_list.append(ip_addr) address_list.reverse() # Reverse to get LIFO order return address_list @@ -446,13 +452,14 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo if record_key == self.server_key and record_type is DNSAddress: if TYPE_CHECKING: assert isinstance(record, DNSAddress) - try: - ip_addr = _cached_ip_addresses(record.address) - except ValueError as ex: - log.warning("Encountered invalid address while processing %s: %s", record, ex) + ip_addr = _cached_ip_addresses(record.address) + if ip_addr is None: + log.warning("Encountered invalid address while processing %s: %s", record, record.address) return False - if type(ip_addr) is IPv4Address: + if ip_addr.version == 4: + if TYPE_CHECKING: + assert isinstance(ip_addr, IPv4Address) ipv4_addresses = self._ipv4_addresses if ip_addr not in ipv4_addresses: ipv4_addresses.insert(0, ip_addr) @@ -463,6 +470,8 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo return False + if TYPE_CHECKING: + assert isinstance(ip_addr, IPv6Address) ipv6_addresses = self._ipv6_addresses if ip_addr not in self._ipv6_addresses: ipv6_addresses.insert(0, ip_addr) @@ -516,7 +525,7 @@ def dns_addresses( records = [ DNSAddress( name, - _TYPE_AAAA if type(ip_addr) is IPv6Address else _TYPE_A, + _TYPE_AAAA if ip_addr.version == 6 else _TYPE_A, class_, ttl, ip_addr.packed, From bb743094e469452c39972bd518985475454b7c43 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 6 Sep 2023 20:03:59 +0000 Subject: [PATCH 091/434] 0.99.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8295fd66..568a209f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.99.0 (2023-09-06) + +### Feature + +* Reduce IP Address parsing overhead in ServiceInfo ([#1257](https://github.com/python-zeroconf/python-zeroconf/issues/1257)) ([`83d0b7f`](https://github.com/python-zeroconf/python-zeroconf/commit/83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb)) + ## v0.98.0 (2023-09-06) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 79cf3cfa..73120ef0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.98.0" +version = "0.99.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 9f49ac79..d69e5c9e 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.98.0' +__version__ = '0.99.0' __license__ = 'LGPL' From 1ed6bd2ec4db0612b71384f923ffff1efd3ce878 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 7 Sep 2023 13:05:58 -0500 Subject: [PATCH 092/434] feat: small speed up to writing outgoing dns records (#1258) --- src/zeroconf/_protocol/outgoing.pxd | 38 ++++++++++++++++++++++------- src/zeroconf/_protocol/outgoing.py | 12 +++++---- 2 files changed, 36 insertions(+), 14 deletions(-) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index e7da04b3..0338cfd8 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -1,7 +1,8 @@ import cython -from .._dns cimport DNSEntry, DNSQuestion, DNSRecord +from .._cache cimport DNSCache +from .._dns cimport DNSEntry, DNSPointer, DNSQuestion, DNSRecord from .incoming cimport DNSIncoming @@ -14,6 +15,7 @@ cdef cython.uint _FLAGS_TC cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _MAX_MSG_TYPICAL +cdef object TYPE_CHECKING cdef class DNSOutgoing: @@ -70,13 +72,31 @@ cdef class DNSOutgoing: cpdef write_short(self, object value) @cython.locals( - questions_offset=cython.uint, - answer_offset=cython.uint, - authority_offset=cython.uint, - additional_offset=cython.uint, - questions_written=cython.uint, - answers_written=cython.uint, - authorities_written=cython.uint, - additionals_written=cython.uint, + questions_offset=object, + answer_offset=object, + authority_offset=object, + additional_offset=object, + questions_written=object, + answers_written=object, + authorities_written=object, + additionals_written=object, ) cdef _packets(self) + + cpdef add_question_or_all_cache(self, DNSCache cache, object now, str name, object type_, object class_) + + cpdef add_question_or_one_cache(self, DNSCache cache, object now, str name, object type_, object class_) + + cpdef add_question(self, DNSQuestion question) + + cpdef add_answer(self, DNSIncoming inp, DNSRecord record) + + cpdef add_answer_at_time(self, DNSRecord record, object now) + + cpdef add_authorative_answer(self, DNSPointer record) + + cpdef add_additional_answer(self, DNSRecord record) + + cpdef is_query(self) + + cpdef is_response(self) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index e13750f6..069b2936 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -22,7 +22,7 @@ import enum import logging -from typing import Dict, List, Optional, Sequence, Tuple, Union +from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple, Union from .._cache import DNSCache from .._dns import DNSPointer, DNSQuestion, DNSRecord @@ -176,7 +176,7 @@ def add_additional_answer(self, record: DNSRecord) -> None: self.additionals.append(record) def add_question_or_one_cache( - self, cache: DNSCache, now: float, name: str, type_: int, class_: int + self, cache: DNSCache, now: float_, name: str_, type_: int_, class_: int_ ) -> None: """Add a question if it is not already cached.""" cached_entry = cache.get_by_details(name, type_, class_) @@ -186,7 +186,7 @@ def add_question_or_one_cache( self.add_answer_at_time(cached_entry, now) def add_question_or_all_cache( - self, cache: DNSCache, now: float, name: str, type_: int, class_: int + self, cache: DNSCache, now: float_, name: str_, type_: int_, class_: int_ ) -> None: """Add a question if it is not already cached. This is currently only used for IPv6 addresses. @@ -223,7 +223,8 @@ def _write_int(self, value: Union[float, int]) -> None: def write_string(self, value: bytes) -> None: """Writes a string to the packet""" - assert isinstance(value, bytes) + if TYPE_CHECKING: + assert isinstance(value, bytes) self.data.append(value) self.size += len(value) @@ -237,7 +238,8 @@ def _write_utf(self, s: str) -> None: self.write_string(utfstr) def write_character_string(self, value: bytes) -> None: - assert isinstance(value, bytes) + if TYPE_CHECKING: + assert isinstance(value, bytes) length = len(value) if length > 256: raise NamePartTooLongException From cd41743f7c36903cdfb772137889f6f919d8bb6b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 7 Sep 2023 18:15:09 +0000 Subject: [PATCH 093/434] 0.100.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 568a209f..8885dd83 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.100.0 (2023-09-07) + +### Feature + +* Small speed up to writing outgoing dns records ([#1258](https://github.com/python-zeroconf/python-zeroconf/issues/1258)) ([`1ed6bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/1ed6bd2ec4db0612b71384f923ffff1efd3ce878)) + ## v0.99.0 (2023-09-06) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 73120ef0..52d340a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.99.0" +version = "0.100.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d69e5c9e..a7998473 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.99.0' +__version__ = '0.100.0' __license__ = 'LGPL' From 248655f0276223b089373c70ec13a0385dfaa4d6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 7 Sep 2023 14:01:11 -0500 Subject: [PATCH 094/434] feat: speed up writing outgoing dns records (#1259) --- src/zeroconf/_dns.pxd | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 5622a5ed..126fe451 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -1,6 +1,8 @@ import cython +from ._protocol.outgoing cimport DNSOutgoing + cdef object _LEN_BYTE cdef object _LEN_SHORT @@ -66,6 +68,8 @@ cdef class DNSAddress(DNSRecord): cdef _eq(self, DNSAddress other) + cpdef write(self, DNSOutgoing out) + cdef class DNSHinfo(DNSRecord): @@ -75,6 +79,7 @@ cdef class DNSHinfo(DNSRecord): cdef _eq(self, DNSHinfo other) + cpdef write(self, DNSOutgoing out) cdef class DNSPointer(DNSRecord): @@ -84,6 +89,7 @@ cdef class DNSPointer(DNSRecord): cdef _eq(self, DNSPointer other) + cpdef write(self, DNSOutgoing out) cdef class DNSText(DNSRecord): @@ -92,6 +98,7 @@ cdef class DNSText(DNSRecord): cdef _eq(self, DNSText other) + cpdef write(self, DNSOutgoing out) cdef class DNSService(DNSRecord): @@ -104,6 +111,7 @@ cdef class DNSService(DNSRecord): cdef _eq(self, DNSService other) + cpdef write(self, DNSOutgoing out) cdef class DNSNsec(DNSRecord): @@ -113,6 +121,7 @@ cdef class DNSNsec(DNSRecord): cdef _eq(self, DNSNsec other) + cpdef write(self, DNSOutgoing out) cdef class DNSRRSet: From 72bd07ddede25511377ec9d519eef8cae384a7d4 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 7 Sep 2023 19:10:31 +0000 Subject: [PATCH 095/434] 0.101.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8885dd83..f073fc3c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.101.0 (2023-09-07) + +### Feature + +* Speed up writing outgoing dns records ([#1259](https://github.com/python-zeroconf/python-zeroconf/issues/1259)) ([`248655f`](https://github.com/python-zeroconf/python-zeroconf/commit/248655f0276223b089373c70ec13a0385dfaa4d6)) + ## v0.100.0 (2023-09-07) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 52d340a0..e3e5a974 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.100.0" +version = "0.101.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index a7998473..de603e54 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.100.0' +__version__ = '0.101.0' __license__ = 'LGPL' From bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 7 Sep 2023 14:55:52 -0500 Subject: [PATCH 096/434] feat: significantly speed up writing outgoing dns records (#1260) --- src/zeroconf/_protocol/outgoing.pxd | 20 +++++++++++++---- src/zeroconf/_protocol/outgoing.py | 34 +++++++++++++++++++---------- 2 files changed, 39 insertions(+), 15 deletions(-) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 0338cfd8..4caaf453 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -17,6 +17,10 @@ cdef cython.uint _MAX_MSG_TYPICAL cdef object TYPE_CHECKING +cdef object PACK_BYTE +cdef object PACK_SHORT +cdef object PACK_LONG + cdef class DNSOutgoing: cdef public unsigned int flags @@ -24,7 +28,7 @@ cdef class DNSOutgoing: cdef public object id cdef public bint multicast cdef public cython.list packets_data - cdef public object names + cdef public cython.dict names cdef public cython.list data cdef public unsigned int size cdef public object allow_long @@ -53,7 +57,7 @@ cdef class DNSOutgoing: ) cdef _write_record(self, DNSRecord record, object now) - cdef _write_record_class(self, object record) + cdef _write_record_class(self, DNSEntry record) cdef _check_data_limit_or_rollback(self, object start_data_length, object start_size) @@ -61,16 +65,24 @@ cdef class DNSOutgoing: cdef _write_answers_from_offset(self, object answer_offset) - cdef _write_records_from_offset(self, object records, object offset) + cdef _write_records_from_offset(self, cython.list records, object offset) cdef _has_more_to_add(self, object questions_offset, object answer_offset, object authority_offset, object additional_offset) cdef _write_ttl(self, DNSRecord record, object now) - cpdef write_name(self, object name) + @cython.locals( + labels=cython.list, + label=cython.str, + ) + cpdef write_name(self, cython.str name) cpdef write_short(self, object value) + cpdef write_string(self, cython.bytes value) + + cpdef _write_utf(self, cython.str value) + @cython.locals( questions_offset=object, answer_offset=object, diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 069b2936..4d17cce0 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -22,6 +22,7 @@ import enum import logging +from struct import Struct from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple, Union from .._cache import DNSCache @@ -43,10 +44,16 @@ str_ = str float_ = float int_ = int +bytes_ = bytes DNSQuestion_ = DNSQuestion DNSRecord_ = DNSRecord +PACK_BYTE = Struct('>B').pack +PACK_SHORT = Struct('>H').pack +PACK_LONG = Struct('>L').pack + + class State(enum.Enum): init = 0 finished = 1 @@ -200,35 +207,35 @@ def add_question_or_all_cache( def _write_byte(self, value: int_) -> None: """Writes a single byte to the packet""" - self.data.append(value.to_bytes(1, 'big')) + self.data.append(PACK_BYTE(value)) self.size += 1 def _insert_short_at_start(self, value: int_) -> None: """Inserts an unsigned short at the start of the packet""" - self.data.insert(0, value.to_bytes(2, 'big')) + self.data.insert(0, PACK_SHORT(value)) def _replace_short(self, index: int_, value: int_) -> None: """Replaces an unsigned short in a certain position in the packet""" - self.data[index] = value.to_bytes(2, 'big') + self.data[index] = PACK_SHORT(value) def write_short(self, value: int_) -> None: """Writes an unsigned short to the packet""" - self.data.append(value.to_bytes(2, 'big')) + self.data.append(PACK_SHORT(value)) self.size += 2 def _write_int(self, value: Union[float, int]) -> None: """Writes an unsigned integer to the packet""" - self.data.append(int(value).to_bytes(4, 'big')) + self.data.append(PACK_LONG(int(value))) self.size += 4 - def write_string(self, value: bytes) -> None: + def write_string(self, value: bytes_) -> None: """Writes a string to the packet""" if TYPE_CHECKING: assert isinstance(value, bytes) self.data.append(value) self.size += len(value) - def _write_utf(self, s: str) -> None: + def _write_utf(self, s: str_) -> None: """Writes a UTF-8 string of a given length to the packet""" utfstr = s.encode('utf-8') length = len(utfstr) @@ -446,7 +453,8 @@ def _packets(self) -> List[bytes]: questions_offset, answer_offset, authority_offset, additional_offset ): # https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 - log.debug("Setting TC flag") + if debug_enable: # pragma: no branch + log.debug("Setting TC flag") self._insert_short_at_start(self.flags | _FLAGS_TC) else: self._insert_short_at_start(self.flags) @@ -459,9 +467,13 @@ def _packets(self) -> List[bytes]: self.packets_data.append(b''.join(self.data)) self._reset_for_next_packet() - if (questions_written + answers_written + authorities_written + additionals_written) == 0 and ( - len(self.questions) + len(self.answers) + len(self.authorities) + len(self.additionals) - ) > 0: + if ( + not questions_written + and not answers_written + and not authorities_written + and not additionals_written + and (self.questions or self.answers or self.authorities or self.additionals) + ): log.warning("packets() made no progress adding records; returning") break self.state = State.finished From dac5d4be501870fd0814756067ff3f98a04e612b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 7 Sep 2023 20:04:21 +0000 Subject: [PATCH 097/434] 0.102.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f073fc3c..9409d611 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.102.0 (2023-09-07) + +### Feature + +* Significantly speed up writing outgoing dns records ([#1260](https://github.com/python-zeroconf/python-zeroconf/issues/1260)) ([`bf2f366`](https://github.com/python-zeroconf/python-zeroconf/commit/bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077)) + ## v0.101.0 (2023-09-07) ### Feature diff --git a/pyproject.toml b/pyproject.toml index e3e5a974..5f512829 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.101.0" +version = "0.102.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index de603e54..3487db3e 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.101.0' +__version__ = '0.102.0' __license__ = 'LGPL' From 33a2714cadff96edf016b869cc63b0661d16ef2c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 9 Sep 2023 17:36:32 -0500 Subject: [PATCH 098/434] feat: avoid calling get_running_loop when resolving ServiceInfo (#1261) --- src/zeroconf/_services/info.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 517b41be..5fb05107 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -263,11 +263,11 @@ def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: assert self._properties is not None return self._properties - async def async_wait(self, timeout: float) -> None: + async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: """Calling task waits for a given number of milliseconds or until notified.""" - loop = get_running_loop() - assert loop is not None - await wait_for_future_set_or_timeout(loop, self._new_records_futures, timeout) + await wait_for_future_set_or_timeout( + loop or asyncio.get_running_loop(), self._new_records_futures, timeout + ) def addresses_by_version(self, version: IPVersion) -> List[bytes]: """List addresses matching IP version. @@ -722,6 +722,9 @@ async def async_request( if self.load_from_cache(zc, now): return True + if TYPE_CHECKING: + assert zc.loop is not None + first_request = True delay = _LISTENER_TIME next_ = now @@ -743,7 +746,7 @@ async def async_request( delay *= 2 next_ += random.randint(*_AVOID_SYNC_DELAY_RANDOM_INTERVAL) - await self.async_wait(min(next_, last) - now) + await self.async_wait(min(next_, last) - now, zc.loop) now = current_time_millis() finally: zc.async_remove_listener(self) From 24f276e1dfab55b203a5de6e3184079cbd2fda3e Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 9 Sep 2023 22:47:14 +0000 Subject: [PATCH 099/434] 0.103.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9409d611..9e64bb64 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.103.0 (2023-09-09) + +### Feature + +* Avoid calling get_running_loop when resolving ServiceInfo ([#1261](https://github.com/python-zeroconf/python-zeroconf/issues/1261)) ([`33a2714`](https://github.com/python-zeroconf/python-zeroconf/commit/33a2714cadff96edf016b869cc63b0661d16ef2c)) + ## v0.102.0 (2023-09-07) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 5f512829..d04574a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.102.0" +version = "0.103.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 3487db3e..1a165ff5 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.102.0' +__version__ = '0.103.0' __license__ = 'LGPL' From 50a8f066b6ab90bc9e3300f81cf9332550b720df Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Sep 2023 12:06:48 -0500 Subject: [PATCH 100/434] feat: speed up generating answers (#1262) --- build_ext.py | 1 + src/zeroconf/_handlers/answers.pxd | 16 ++++++++++++++++ src/zeroconf/_handlers/answers.py | 10 +++++++--- 3 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 src/zeroconf/_handlers/answers.pxd diff --git a/build_ext.py b/build_ext.py index 55d76d3c..4af7b4fc 100644 --- a/build_ext.py +++ b/build_ext.py @@ -29,6 +29,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", + "src/zeroconf/_handlers/answers.py", "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_handlers/query_handler.py", "src/zeroconf/_services/registry.py", diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd new file mode 100644 index 00000000..df34014a --- /dev/null +++ b/src/zeroconf/_handlers/answers.pxd @@ -0,0 +1,16 @@ + +import cython + +from .._protocol.outgoing cimport DNSOutgoing + + +cdef object _FLAGS_QR_RESPONSE_AA +cdef object NAME_GETTER + +cpdef construct_outgoing_multicast_answers(cython.dict answers) + +cpdef construct_outgoing_unicast_answers( + cython.dict answers, object ucast_source, cython.list questions, object id_ +) + +cdef _add_answers_additionals(DNSOutgoing out, cython.dict answers) diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index a80d2367..44aa11cf 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -29,11 +29,15 @@ _AnswerWithAdditionalsType = Dict[DNSRecord, Set[DNSRecord]] +int_ = int + MULTICAST_DELAY_RANDOM_INTERVAL = (20, 120) NAME_GETTER = attrgetter('name') +_FLAGS_QR_RESPONSE_AA = _FLAGS_QR_RESPONSE | _FLAGS_AA + class QuestionAnswers(NamedTuple): ucast: _AnswerWithAdditionalsType @@ -52,16 +56,16 @@ class AnswerGroup(NamedTuple): def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing: """Add answers and additionals to a DNSOutgoing.""" - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=True) + out = DNSOutgoing(_FLAGS_QR_RESPONSE_AA, True) _add_answers_additionals(out, answers) return out def construct_outgoing_unicast_answers( - answers: _AnswerWithAdditionalsType, ucast_source: bool, questions: List[DNSQuestion], id_: int + answers: _AnswerWithAdditionalsType, ucast_source: bool, questions: List[DNSQuestion], id_: int_ ) -> DNSOutgoing: """Add answers and additionals to a DNSOutgoing.""" - out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, multicast=False, id_=id_) + out = DNSOutgoing(_FLAGS_QR_RESPONSE_AA, False, id_) # Adding the questions back when the source is legacy unicast behavior if ucast_source: for question in questions: From a537a31db9a5125544ef2310f4cafdcf83a6406b Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Sep 2023 17:14:51 +0000 Subject: [PATCH 101/434] 0.104.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9e64bb64..660f3c5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.104.0 (2023-09-10) + +### Feature + +* Speed up generating answers ([#1262](https://github.com/python-zeroconf/python-zeroconf/issues/1262)) ([`50a8f06`](https://github.com/python-zeroconf/python-zeroconf/commit/50a8f066b6ab90bc9e3300f81cf9332550b720df)) + ## v0.103.0 (2023-09-09) ### Feature diff --git a/pyproject.toml b/pyproject.toml index d04574a3..9b90834f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.103.0" +version = "0.104.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 1a165ff5..ce3b7a5d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -84,7 +84,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.103.0' +__version__ = '0.104.0' __license__ = 'LGPL' From 6bf5d95a75ef7998f4b846b700bb160bc1c28300 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Sep 2023 17:05:14 -0500 Subject: [PATCH 102/434] chore: prepare ServiceInfo base class RecordUpdateListener for cython (#1263) --- build_ext.py | 1 + src/zeroconf/__init__.py | 3 ++- src/zeroconf/_engine.py | 2 +- src/zeroconf/_handlers/record_manager.py | 3 ++- src/zeroconf/_record_update.py | 30 ++++++++++++++++++++++++ src/zeroconf/_services/browser.py | 3 ++- src/zeroconf/_services/info.py | 3 ++- src/zeroconf/_updates.pxd | 9 +++++++ src/zeroconf/_updates.py | 9 ++++--- 9 files changed, 53 insertions(+), 10 deletions(-) create mode 100644 src/zeroconf/_record_update.py create mode 100644 src/zeroconf/_updates.pxd diff --git a/build_ext.py b/build_ext.py index 4af7b4fc..8c39f495 100644 --- a/build_ext.py +++ b/build_ext.py @@ -33,6 +33,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_handlers/query_handler.py", "src/zeroconf/_services/registry.py", + "src/zeroconf/_updates.py", "src/zeroconf/_utils/time.py", ], compiler_directives={"language_level": "3"}, # Python 3 diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ce3b7a5d..b1730794 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -50,6 +50,7 @@ from ._logger import QuietLogger, log # noqa # import needed for backwards compat from ._protocol.incoming import DNSIncoming # noqa # import needed for backwards compat from ._protocol.outgoing import DNSOutgoing # noqa # import needed for backwards compat +from ._record_update import RecordUpdate from ._services import ( # noqa # import needed for backwards compat ServiceListener, ServiceStateChange, @@ -65,7 +66,7 @@ ServiceRegistry, ) from ._services.types import ZeroconfServiceTypes -from ._updates import RecordUpdate, RecordUpdateListener +from ._updates import RecordUpdateListener from ._utils.name import service_type_name # noqa # import needed for backwards compat from ._utils.net import ( # noqa # import needed for backwards compat InterfaceChoice, diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index a74c091c..9e455003 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -26,7 +26,7 @@ import threading from typing import TYPE_CHECKING, List, Optional, cast -from ._updates import RecordUpdate +from ._record_update import RecordUpdate from ._utils.asyncio import get_running_loop, run_coro_with_timeout from ._utils.time import current_time_millis from .const import _CACHE_CLEANUP_INTERVAL diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 586fba0b..396bad45 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -26,7 +26,8 @@ from .._dns import DNSQuestion, DNSRecord from .._logger import log from .._protocol.incoming import DNSIncoming -from .._updates import RecordUpdate, RecordUpdateListener +from .._record_update import RecordUpdate +from .._updates import RecordUpdateListener from .._utils.time import current_time_millis from ..const import _ADDRESS_RECORD_TYPES, _DNS_PTR_MIN_TTL, _TYPE_PTR diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py new file mode 100644 index 00000000..fbcacd5f --- /dev/null +++ b/src/zeroconf/_record_update.py @@ -0,0 +1,30 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" + +from typing import NamedTuple, Optional + +from ._dns import DNSRecord + + +class RecordUpdate(NamedTuple): + new: DNSRecord + old: Optional[DNSRecord] diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 17307c99..de1769bc 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -44,13 +44,14 @@ from .._dns import DNSPointer, DNSQuestion, DNSQuestionType from .._logger import log from .._protocol.outgoing import DNSOutgoing +from .._record_update import RecordUpdate from .._services import ( ServiceListener, ServiceStateChange, Signal, SignalRegistrationInterface, ) -from .._updates import RecordUpdate, RecordUpdateListener +from .._updates import RecordUpdateListener from .._utils.name import cached_possible_types, service_type_name from .._utils.time import current_time_millis, millis_to_seconds from ..const import ( diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 5fb05107..14398b6a 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -38,7 +38,8 @@ from .._exceptions import BadTypeInNameException from .._logger import log from .._protocol.outgoing import DNSOutgoing -from .._updates import RecordUpdate, RecordUpdateListener +from .._record_update import RecordUpdate +from .._updates import RecordUpdateListener from .._utils.asyncio import ( _resolve_all_futures_to_none, get_running_loop, diff --git a/src/zeroconf/_updates.pxd b/src/zeroconf/_updates.pxd new file mode 100644 index 00000000..6309537c --- /dev/null +++ b/src/zeroconf/_updates.pxd @@ -0,0 +1,9 @@ + +import cython + + +cdef class RecordUpdateListener: + + cpdef async_update_records(self, object zc, object now, cython.list records) + + cpdef async_update_records_complete(self) diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index b760daf9..a117cc2b 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -20,17 +20,16 @@ USA """ -from typing import TYPE_CHECKING, List, NamedTuple, Optional +from typing import TYPE_CHECKING, List from ._dns import DNSRecord +from ._record_update import RecordUpdate if TYPE_CHECKING: from ._core import Zeroconf -class RecordUpdate(NamedTuple): - new: DNSRecord - old: Optional[DNSRecord] +float_ = float class RecordUpdateListener: @@ -50,7 +49,7 @@ def update_record( # pylint: disable=no-self-use """ raise RuntimeError("update_record is deprecated and will be removed in a future version.") - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: """Update multiple records in one shot. All records that are received in a single packet are passed From 7ca690ac3fa75e7474d3412944bbd5056cb313dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Sep 2023 18:03:42 -0500 Subject: [PATCH 103/434] feat: speed up ServiceInfo with a cython pxd (#1264) --- build_ext.py | 1 + src/zeroconf/_services/info.pxd | 87 +++++++++++++++++++++++++++++++++ src/zeroconf/_services/info.py | 67 +++++++++++++++---------- 3 files changed, 128 insertions(+), 27 deletions(-) create mode 100644 src/zeroconf/_services/info.pxd diff --git a/build_ext.py b/build_ext.py index 8c39f495..b9deeecb 100644 --- a/build_ext.py +++ b/build_ext.py @@ -32,6 +32,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_handlers/answers.py", "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_handlers/query_handler.py", + "src/zeroconf/_services/info.py", "src/zeroconf/_services/registry.py", "src/zeroconf/_updates.py", "src/zeroconf/_utils/time.py", diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd new file mode 100644 index 00000000..b06ea88d --- /dev/null +++ b/src/zeroconf/_services/info.pxd @@ -0,0 +1,87 @@ + +import cython + +from .._cache cimport DNSCache +from .._dns cimport DNSPointer, DNSRecord, DNSService, DNSText +from .._protocol.outgoing cimport DNSOutgoing +from .._updates cimport RecordUpdateListener +from .._utils.time cimport current_time_millis + + +cdef object _resolve_all_futures_to_none + +cdef object _TYPE_SRV +cdef object _TYPE_TXT +cdef object _TYPE_A +cdef object _TYPE_AAAA +cdef object _TYPE_PTR +cdef object _TYPE_NSEC +cdef object _CLASS_IN +cdef object _FLAGS_QR_QUERY + +cdef object service_type_name + +cdef object DNS_QUESTION_TYPE_QU +cdef object DNS_QUESTION_TYPE_QM + +cdef object _IPVersion_All_value +cdef object _IPVersion_V4Only_value + +cdef object TYPE_CHECKING + +cdef class ServiceInfo(RecordUpdateListener): + + cdef public cython.bytes text + cdef public str type + cdef str _name + cdef public str key + cdef public cython.list _ipv4_addresses + cdef public cython.list _ipv6_addresses + cdef public object port + cdef public object weight + cdef public object priority + cdef public str server + cdef public str server_key + cdef public cython.dict _properties + cdef public object host_ttl + cdef public object other_ttl + cdef public object interface_index + cdef public cython.set _new_records_futures + cdef public DNSPointer _dns_pointer_cache + cdef public DNSService _dns_service_cache + cdef public DNSText _dns_text_cache + cdef public cython.list _dns_address_cache + cdef public cython.set _get_address_and_nsec_records_cache + + @cython.locals( + cache=DNSCache + ) + cpdef async_update_records(self, object zc, object now, cython.list records) + + @cython.locals( + cache=DNSCache + ) + cpdef _load_from_cache(self, object zc, object now) + + cdef _unpack_text_into_properties(self) + + cdef _set_properties(self, cython.dict properties) + + cdef _set_text(self, cython.bytes text) + + cdef _get_ip_addresses_from_cache_lifo(self, object zc, object now, object type) + + cdef _process_record_threadsafe(self, object zc, DNSRecord record, object now) + + @cython.locals( + cache=DNSCache + ) + cdef cython.list _get_address_records_from_cache_by_type(self, object zc, object _type) + + cdef _set_ipv4_addresses_from_cache(self, object zc, object now) + + cdef _set_ipv6_addresses_from_cache(self, object zc, object now) + + cdef cython.list _ip_addresses_by_version_value(self, object version_value) + + cdef addresses_by_version(self, object version) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 14398b6a..425ad750 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -78,6 +78,12 @@ # the A/AAAA/SRV records for a host. _AVOID_SYNC_DELAY_RANDOM_INTERVAL = (20, 120) +float_ = float +int_ = int + +DNS_QUESTION_TYPE_QU = DNSQuestionType.QU +DNS_QUESTION_TYPE_QM = DNSQuestionType.QM + if TYPE_CHECKING: from .._core import Zeroconf @@ -281,10 +287,9 @@ def addresses_by_version(self, version: IPVersion) -> List[bytes]: """ version_value = version.value if version_value == _IPVersion_All_value: - return [ - *(addr.packed for addr in self._ipv4_addresses), - *(addr.packed for addr in self._ipv6_addresses), - ] + ip_v4_packed = [addr.packed for addr in self._ipv4_addresses] + ip_v6_packed = [addr.packed for addr in self._ipv6_addresses] + return [*ip_v4_packed, *ip_v6_packed] if version_value == _IPVersion_V4Only_value: return [addr.packed for addr in self._ipv4_addresses] return [addr.packed for addr in self._ipv6_addresses] @@ -303,7 +308,7 @@ def ip_addresses_by_version( return self._ip_addresses_by_version_value(version.value) def _ip_addresses_by_version_value( - self, version_value: int + self, version_value: int_ ) -> Union[List[IPv4Address], List[IPv6Address], List[_BaseAddress]]: """Backend for addresses_by_version that uses the raw value.""" if version_value == _IPVersion_All_value: @@ -397,7 +402,7 @@ def get_name(self) -> str: return self._name[: len(self._name) - len(self.type) - 1] def _get_ip_addresses_from_cache_lifo( - self, zc: 'Zeroconf', now: float, type: int + self, zc: 'Zeroconf', now: float_, type: int_ ) -> List[Union[IPv4Address, IPv6Address]]: """Set IPv6 addresses from the cache.""" address_list: List[Union[IPv4Address, IPv6Address]] = [] @@ -410,7 +415,7 @@ def _get_ip_addresses_from_cache_lifo( address_list.reverse() # Reverse to get LIFO order return address_list - def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: + def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float_) -> None: """Set IPv6 addresses from the cache.""" if TYPE_CHECKING: self._ipv6_addresses = cast( @@ -419,7 +424,7 @@ def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: else: self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) - def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: + def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float_) -> None: """Set IPv4 addresses from the cache.""" if TYPE_CHECKING: self._ipv4_addresses = cast( @@ -428,7 +433,7 @@ def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float) -> None: else: self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: """Updates service information from a DNS record. This method will be run in the event loop. @@ -440,7 +445,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU if updated and new_records_futures: _resolve_all_futures_to_none(new_records_futures) - def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: float) -> bool: + def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: float_) -> bool: """Thread safe record updating. Returns True if a new record was added. @@ -624,14 +629,15 @@ def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Se self._get_address_and_nsec_records_cache = records return records - def _get_address_records_from_cache_by_type(self, zc: 'Zeroconf', _type: int) -> List[DNSAddress]: + def _get_address_records_from_cache_by_type(self, zc: 'Zeroconf', _type: int_) -> List[DNSAddress]: """Get the addresses from the cache.""" if self.server_key is None: return [] + cache = zc.cache if TYPE_CHECKING: - records = cast("List[DNSAddress]", zc.cache.get_all_by_details(self.server_key, _type, _CLASS_IN)) + records = cast("List[DNSAddress]", cache.get_all_by_details(self.server_key, _type, _CLASS_IN)) else: - records = zc.cache.get_all_by_details(self.server_key, _type, _CLASS_IN) + records = cache.get_all_by_details(self.server_key, _type, _CLASS_IN) return records def set_server_if_missing(self) -> None: @@ -643,28 +649,33 @@ def set_server_if_missing(self) -> None: self.server = self._name self.server_key = self.key - def load_from_cache(self, zc: 'Zeroconf', now: Optional[float] = None) -> bool: + def load_from_cache(self, zc: 'Zeroconf', now: Optional[float_] = None) -> bool: + """Populate the service info from the cache. + + This method is designed to be threadsafe. + """ + return self._load_from_cache(zc, now or current_time_millis()) + + def _load_from_cache(self, zc: 'Zeroconf', now: float_) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. """ - if not now: - now = current_time_millis() + cache = zc.cache original_server_key = self.server_key - cached_srv_record = zc.cache.get_by_details(self._name, _TYPE_SRV, _CLASS_IN) + cached_srv_record = cache.get_by_details(self._name, _TYPE_SRV, _CLASS_IN) if cached_srv_record: self._process_record_threadsafe(zc, cached_srv_record, now) - cached_txt_record = zc.cache.get_by_details(self._name, _TYPE_TXT, _CLASS_IN) + cached_txt_record = cache.get_by_details(self._name, _TYPE_TXT, _CLASS_IN) if cached_txt_record: self._process_record_threadsafe(zc, cached_txt_record, now) if original_server_key == self.server_key: # If there is a srv which changes the server_key, # A and AAAA will already be loaded from the cache # and we do not want to do it twice - for record in [ - *self._get_address_records_from_cache_by_type(zc, _TYPE_A), - *self._get_address_records_from_cache_by_type(zc, _TYPE_AAAA), - ]: + for record in self._get_address_records_from_cache_by_type(zc, _TYPE_A): + self._process_record_threadsafe(zc, record, now) + for record in self._get_address_records_from_cache_by_type(zc, _TYPE_AAAA): self._process_record_threadsafe(zc, record, now) return self._is_complete @@ -720,7 +731,7 @@ async def async_request( now = current_time_millis() - if self.load_from_cache(zc, now): + if self._load_from_cache(zc, now): return True if TYPE_CHECKING: @@ -737,11 +748,13 @@ async def async_request( return False if next_ <= now: out = self.generate_request_query( - zc, now, question_type or DNSQuestionType.QU if first_request else DNSQuestionType.QM + zc, + now, + question_type or DNS_QUESTION_TYPE_QU if first_request else DNS_QUESTION_TYPE_QM, ) first_request = False if not out.questions: - return self.load_from_cache(zc, now) + return self._load_from_cache(zc, now) zc.async_send(out, addr, port) next_ = now + delay delay *= 2 @@ -755,7 +768,7 @@ async def async_request( return True def generate_request_query( - self, zc: 'Zeroconf', now: float, question_type: Optional[DNSQuestionType] = None + self, zc: 'Zeroconf', now: float_, question_type: Optional[DNSQuestionType] = None ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) @@ -766,7 +779,7 @@ def generate_request_query( out.add_question_or_one_cache(cache, now, name, _TYPE_TXT, _CLASS_IN) out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_A, _CLASS_IN) out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_AAAA, _CLASS_IN) - if question_type == DNSQuestionType.QU: + if question_type == DNS_QUESTION_TYPE_QU: for question in out.questions: question.unicast = True return out From eef99c7e632a112dd034d7e5257cdc0aa514f472 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Sep 2023 23:11:54 +0000 Subject: [PATCH 104/434] 0.105.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 660f3c5c..56115f85 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.105.0 (2023-09-10) + +### Feature + +* Speed up ServiceInfo with a cython pxd ([#1264](https://github.com/python-zeroconf/python-zeroconf/issues/1264)) ([`7ca690a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ca690ac3fa75e7474d3412944bbd5056cb313dd)) + ## v0.104.0 (2023-09-10) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 9b90834f..fc208f48 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.104.0" +version = "0.105.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b1730794..381fb0fe 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.104.0' +__version__ = '0.105.0' __license__ = 'LGPL' From 37bfaf2f630358e8c68652f3b3120931a6f94910 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 11 Sep 2023 09:55:15 -0500 Subject: [PATCH 105/434] feat: speed up answering questions (#1265) --- src/zeroconf/_handlers/query_handler.pxd | 9 ++++++- src/zeroconf/_handlers/query_handler.py | 21 ++++++++-------- src/zeroconf/_services/info.pxd | 21 +++++++++++++++- src/zeroconf/_services/info.py | 32 ++++++++++++++++++++++-- src/zeroconf/_services/registry.pxd | 14 ++++++----- tests/services/test_info.py | 15 +++++++++++ 6 files changed, 92 insertions(+), 20 deletions(-) diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 3457128c..afbaab95 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -5,6 +5,7 @@ from .._cache cimport DNSCache from .._dns cimport DNSPointer, DNSQuestion, DNSRecord, DNSRRSet from .._history cimport QuestionHistory from .._protocol.incoming cimport DNSIncoming +from .._services.info cimport ServiceInfo from .._services.registry cimport ServiceRegistry @@ -12,6 +13,9 @@ cdef object TYPE_CHECKING, QuestionAnswers cdef cython.uint _ONE_SECOND, _TYPE_PTR, _TYPE_ANY, _TYPE_A, _TYPE_AAAA, _TYPE_SRV, _TYPE_TXT cdef str _SERVICE_TYPE_ENUMERATION_NAME cdef cython.set _RESPOND_IMMEDIATE_TYPES +cdef cython.set _ADDRESS_RECORD_TYPES +cdef object IPVersion +cdef object _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL cdef class _QueryResponse: @@ -45,13 +49,16 @@ cdef class QueryHandler: cdef DNSCache cache cdef QuestionHistory question_history + @cython.locals(service=ServiceInfo) cdef _add_service_type_enumeration_query_answers(self, cython.dict answer_set, DNSRRSet known_answers) + @cython.locals(service=ServiceInfo) cdef _add_pointer_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers) + @cython.locals(service=ServiceInfo) cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) - @cython.locals(question_lower_name=str, type_=cython.uint) + @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) cdef _answer_question(self, DNSQuestion question, DNSRRSet known_answers) @cython.locals( diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 34fde547..66deab43 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -28,6 +28,7 @@ from .._history import QuestionHistory from .._protocol.incoming import DNSIncoming from .._services.registry import ServiceRegistry +from .._utils.net import IPVersion from ..const import ( _ADDRESS_RECORD_TYPES, _CLASS_IN, @@ -180,13 +181,13 @@ def _add_pointer_answers( for service in self.registry.async_get_infos_type(lower_name): # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.1. - dns_pointer = service.dns_pointer() + dns_pointer = service._dns_pointer(None) if known_answers.suppresses(dns_pointer): continue answer_set[dns_pointer] = { - service.dns_service(), - service.dns_text(), - } | service.get_address_and_nsec_records() + service._dns_service(None), + service._dns_text(None), + } | service._get_address_and_nsec_records(None) def _add_address_answers( self, @@ -200,7 +201,7 @@ def _add_address_answers( answers: List[DNSAddress] = [] additionals: Set[DNSRecord] = set() seen_types: Set[int] = set() - for dns_address in service.dns_addresses(): + for dns_address in service._dns_addresses(None, IPVersion.All): seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) @@ -210,12 +211,12 @@ def _add_address_answers( if answers: if missing_types: assert service.server is not None, "Service server must be set for NSEC record." - additionals.add(service.dns_nsec(list(missing_types))) + additionals.add(service._dns_nsec(list(missing_types), None)) for answer in answers: answer_set[answer] = additionals elif type_ in missing_types: assert service.server is not None, "Service server must be set for NSEC record." - answer_set[service.dns_nsec(list(missing_types))] = set() + answer_set[service._dns_nsec(list(missing_types), None)] = set() def _answer_question( self, @@ -243,11 +244,11 @@ def _answer_question( if type_ in (_TYPE_SRV, _TYPE_ANY): # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.2. - dns_service = service.dns_service() + dns_service = service._dns_service(None) if not known_answers.suppresses(dns_service): - answer_set[dns_service] = service.get_address_and_nsec_records() + answer_set[dns_service] = service._get_address_and_nsec_records(None) if type_ in (_TYPE_TXT, _TYPE_ANY): - dns_text = service.dns_text() + dns_text = service._dns_text(None) if not known_answers.suppresses(dns_text): answer_set[dns_text] = set() diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index b06ea88d..860cc9ba 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -2,7 +2,7 @@ import cython from .._cache cimport DNSCache -from .._dns cimport DNSPointer, DNSRecord, DNSService, DNSText +from .._dns cimport DNSNsec, DNSPointer, DNSRecord, DNSService, DNSText from .._protocol.outgoing cimport DNSOutgoing from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis @@ -27,6 +27,8 @@ cdef object DNS_QUESTION_TYPE_QM cdef object _IPVersion_All_value cdef object _IPVersion_V4Only_value +cdef cython.set _ADDRESS_RECORD_TYPES + cdef object TYPE_CHECKING cdef class ServiceInfo(RecordUpdateListener): @@ -85,3 +87,20 @@ cdef class ServiceInfo(RecordUpdateListener): cdef cython.list _ip_addresses_by_version_value(self, object version_value) cdef addresses_by_version(self, object version) + + @cython.locals(cacheable=cython.bint) + cdef cython.list _dns_addresses(self, object override_ttls, object version) + + @cython.locals(cacheable=cython.bint) + cdef DNSPointer _dns_pointer(self, object override_ttl) + + @cython.locals(cacheable=cython.bint) + cdef DNSService _dns_service(self, object override_ttl) + + @cython.locals(cacheable=cython.bint) + cdef DNSText _dns_text(self, object override_ttl) + + cdef DNSNsec _dns_nsec(self, cython.list missing_types, object override_ttl) + + @cython.locals(cacheable=cython.bint) + cdef cython.set _get_address_and_nsec_records(self, object override_ttl) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 425ad750..352c9b8e 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -519,6 +519,14 @@ def dns_addresses( self, override_ttl: Optional[int] = None, version: IPVersion = IPVersion.All, + ) -> List[DNSAddress]: + """Return matching DNSAddress from ServiceInfo.""" + return self._dns_addresses(override_ttl, version) + + def _dns_addresses( + self, + override_ttl: Optional[int], + version: IPVersion, ) -> List[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" cacheable = version is IPVersion.All and override_ttl is None @@ -544,6 +552,10 @@ def dns_addresses( return records def dns_pointer(self, override_ttl: Optional[int] = None) -> DNSPointer: + """Return DNSPointer from ServiceInfo.""" + return self._dns_pointer(override_ttl) + + def _dns_pointer(self, override_ttl: Optional[int]) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" cacheable = override_ttl is None if self._dns_pointer_cache is not None and cacheable: @@ -561,6 +573,10 @@ def dns_pointer(self, override_ttl: Optional[int] = None) -> DNSPointer: return record def dns_service(self, override_ttl: Optional[int] = None) -> DNSService: + """Return DNSService from ServiceInfo.""" + return self._dns_service(override_ttl) + + def _dns_service(self, override_ttl: Optional[int]) -> DNSService: """Return DNSService from ServiceInfo.""" cacheable = override_ttl is None if self._dns_service_cache is not None and cacheable: @@ -584,6 +600,10 @@ def dns_service(self, override_ttl: Optional[int] = None) -> DNSService: return record def dns_text(self, override_ttl: Optional[int] = None) -> DNSText: + """Return DNSText from ServiceInfo.""" + return self._dns_text(override_ttl) + + def _dns_text(self, override_ttl: Optional[int]) -> DNSText: """Return DNSText from ServiceInfo.""" cacheable = override_ttl is None if self._dns_text_cache is not None and cacheable: @@ -601,6 +621,10 @@ def dns_text(self, override_ttl: Optional[int] = None) -> DNSText: return record def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) -> DNSNsec: + """Return DNSNsec from ServiceInfo.""" + return self._dns_nsec(missing_types, override_ttl) + + def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -613,18 +637,22 @@ def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) ) def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Set[DNSRecord]: + """Build a set of address records and NSEC records for non-present record types.""" + return self._get_address_and_nsec_records(override_ttl) + + def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" cacheable = override_ttl is None if self._get_address_and_nsec_records_cache is not None and cacheable: return self._get_address_and_nsec_records_cache missing_types: Set[int] = _ADDRESS_RECORD_TYPES.copy() records: Set[DNSRecord] = set() - for dns_address in self.dns_addresses(override_ttl, IPVersion.All): + for dns_address in self._dns_addresses(override_ttl, IPVersion.All): missing_types.discard(dns_address.type) records.add(dns_address) if missing_types: assert self.server is not None, "Service server must be set for NSEC record." - records.add(self.dns_nsec(list(missing_types), override_ttl)) + records.add(self._dns_nsec(list(missing_types), override_ttl)) if cacheable: self._get_address_and_nsec_records_cache = records return records diff --git a/src/zeroconf/_services/registry.pxd b/src/zeroconf/_services/registry.pxd index a741b93a..1d0562c3 100644 --- a/src/zeroconf/_services/registry.pxd +++ b/src/zeroconf/_services/registry.pxd @@ -1,6 +1,8 @@ import cython +from .info cimport ServiceInfo + cdef class ServiceRegistry: @@ -11,16 +13,16 @@ cdef class ServiceRegistry: @cython.locals( record_list=cython.list, ) - cdef _async_get_by_index(self, cython.dict records, str key) + cdef cython.list _async_get_by_index(self, cython.dict records, str key) - cdef _add(self, object info) + cdef _add(self, ServiceInfo info) cdef _remove(self, cython.list infos) - cpdef async_get_info_name(self, str name) + cpdef ServiceInfo async_get_info_name(self, str name) - cpdef async_get_types(self) + cpdef cython.list async_get_types(self) - cpdef async_get_infos_type(self, str type_) + cpdef cython.list async_get_infos_type(self, str type_) - cpdef async_get_infos_server(self, str server) + cpdef cython.list async_get_infos_server(self, str server) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index c0a4e661..7d437d23 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1535,3 +1535,18 @@ async def test_release_wait_when_new_recorded_added_concurrency(): assert not pending assert info.addresses == [b'\x7f\x00\x00\x01'] await aiozc.async_close() + + +@pytest.mark.asyncio +async def test_service_info_nsec_records(): + """Test we can generate nsec records from ServiceInfo.""" + type_ = "_http._tcp.local." + registration_name = "multiareccon.%s" % type_ + desc = {'path': '/~paulsm/'} + host = "multahostcon.local." + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) + nsec_record = info.dns_nsec([const._TYPE_A, const._TYPE_AAAA], 50) + assert nsec_record.name == registration_name + assert nsec_record.type == const._TYPE_NSEC + assert nsec_record.ttl == 50 + assert nsec_record.rdtypes == [const._TYPE_A, const._TYPE_AAAA] From f38cf555f70f3d4c27f442b0db6eb8452603dbfb Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Sep 2023 15:04:40 +0000 Subject: [PATCH 106/434] 0.106.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 56115f85..11f75a19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.106.0 (2023-09-11) + +### Feature + +* Speed up answering questions ([#1265](https://github.com/python-zeroconf/python-zeroconf/issues/1265)) ([`37bfaf2`](https://github.com/python-zeroconf/python-zeroconf/commit/37bfaf2f630358e8c68652f3b3120931a6f94910)) + ## v0.105.0 (2023-09-10) ### Feature diff --git a/pyproject.toml b/pyproject.toml index fc208f48..84051a84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.105.0" +version = "0.106.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 381fb0fe..db9c166b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.105.0' +__version__ = '0.106.0' __license__ = 'LGPL' From 24a0a00b3e457979e279a2eeadc8fad2ab09e125 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 11 Sep 2023 10:19:56 -0500 Subject: [PATCH 107/434] feat: speed up responding to queries (#1266) --- src/zeroconf/_dns.pxd | 2 ++ src/zeroconf/_dns.py | 6 +++++- src/zeroconf/_handlers/query_handler.pxd | 4 ++-- src/zeroconf/_handlers/query_handler.py | 6 +++--- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 126fe451..afcb1985 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -136,3 +136,5 @@ cdef class DNSRRSet: record_sets=cython.list, ) cdef cython.dict _get_lookup(self) + + cpdef cython.set lookup_set(self) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 73b0c751..4c015eb3 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -22,7 +22,7 @@ import enum import socket -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union, cast from ._exceptions import AbstractMethodException from ._utils.net import _is_v6_address @@ -533,6 +533,10 @@ def lookup(self) -> Dict[DNSRecord, float]: """Return the lookup table.""" return self._get_lookup() + def lookup_set(self) -> Set[DNSRecord]: + """Return the lookup table as aset.""" + return set(self._get_lookup()) + def _get_lookup(self) -> Dict[DNSRecord, float]: """Return the lookup table, building it if needed.""" if self._lookup is None: diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index afbaab95..1f1e4da8 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -59,7 +59,7 @@ cdef class QueryHandler: cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) - cdef _answer_question(self, DNSQuestion question, DNSRRSet known_answers) + cdef cython.dict _answer_question(self, DNSQuestion question, DNSRRSet known_answers) @cython.locals( msg=DNSIncoming, @@ -68,4 +68,4 @@ cdef class QueryHandler: known_answers=DNSRRSet, known_answers_set=cython.set, ) - cpdef async_response(self, cython.list msgs, object unicast_source) + cpdef async_response(self, cython.list msgs, cython.bint unicast_source) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 66deab43..f4243021 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -268,12 +268,12 @@ def async_response( # pylint: disable=unused-argument for msg in msgs: for question in msg.questions: - if not question.unicast: + if not question.unique: # unique and unicast are the same flag if not known_answers_set: # pragma: no branch - known_answers_set = set(known_answers.lookup) + known_answers_set = known_answers.lookup_set() self.question_history.add_question_at_time(question, msg.now, known_answers_set) answer_set = self._answer_question(question, known_answers) - if not ucast_source and question.unicast: + if not ucast_source and question.unique: # unique and unicast are the same flag query_res.add_qu_question_response(answer_set) continue if ucast_source: From 49d3b0c656db3dfd8ff5b9d6385a22fd64c8a327 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Sep 2023 15:29:11 +0000 Subject: [PATCH 108/434] 0.107.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 11f75a19..7195a762 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.107.0 (2023-09-11) + +### Feature + +* Speed up responding to queries ([#1266](https://github.com/python-zeroconf/python-zeroconf/issues/1266)) ([`24a0a00`](https://github.com/python-zeroconf/python-zeroconf/commit/24a0a00b3e457979e279a2eeadc8fad2ab09e125)) + ## v0.106.0 (2023-09-11) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 84051a84..245272fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.106.0" +version = "0.107.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index db9c166b..609e8501 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.106.0' +__version__ = '0.107.0' __license__ = 'LGPL' From aed63911f6da0c61165bce79518e6e3f54cb9929 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 11 Sep 2023 13:37:42 -0500 Subject: [PATCH 109/434] chore: bump cpython to 3.12rc2 in the CI (#1269) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ff79b70b..d49d30e6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,7 +41,7 @@ jobs: - "3.9" - "3.10" - "3.11" - - "3.12.0-rc.1" + - "3.12.0-rc.2" - "pypy-3.7" os: - ubuntu-latest From 00c439a6400b7850ef9fdd75bc8d82d4e64b1da0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 11 Sep 2023 13:37:57 -0500 Subject: [PATCH 110/434] feat: improve performance of constructing outgoing queries (#1267) --- src/zeroconf/_handlers/answers.pxd | 4 ++- src/zeroconf/_handlers/answers.py | 3 ++- src/zeroconf/_protocol/outgoing.pxd | 25 ++++++++++++++---- src/zeroconf/_protocol/outgoing.py | 40 +++++++++++++++++++---------- 4 files changed, 52 insertions(+), 20 deletions(-) diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index df34014a..91e2375d 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -1,6 +1,7 @@ import cython +from .._dns cimport DNSRecord from .._protocol.outgoing cimport DNSOutgoing @@ -10,7 +11,8 @@ cdef object NAME_GETTER cpdef construct_outgoing_multicast_answers(cython.dict answers) cpdef construct_outgoing_unicast_answers( - cython.dict answers, object ucast_source, cython.list questions, object id_ + cython.dict answers, bint ucast_source, cython.list questions, object id_ ) +@cython.locals(answer=DNSRecord, additionals=cython.set, additional=DNSRecord) cdef _add_answers_additionals(DNSOutgoing out, cython.dict answers) diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index 44aa11cf..009bedbc 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -82,7 +82,8 @@ def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsTy # overall size of the outgoing response via name compression for answer in sorted(answers, key=NAME_GETTER): out.add_answer_at_time(answer, 0) - for additional in answers[answer]: + additionals = answers[answer] + for additional in additionals: if additional not in sending: out.add_additional_answer(additional) sending.add(additional) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 4caaf453..1c4d6af7 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -21,17 +21,25 @@ cdef object PACK_BYTE cdef object PACK_SHORT cdef object PACK_LONG +cdef object STATE_INIT +cdef object STATE_FINISHED + +cdef object LOGGING_IS_ENABLED_FOR +cdef object LOGGING_DEBUG + +cdef cython.tuple BYTE_TABLE + cdef class DNSOutgoing: cdef public unsigned int flags - cdef public object finished + cdef public bint finished cdef public object id cdef public bint multicast cdef public cython.list packets_data cdef public cython.dict names cdef public cython.list data cdef public unsigned int size - cdef public object allow_long + cdef public bint allow_long cdef public object state cdef public cython.list questions cdef public cython.list answers @@ -48,18 +56,21 @@ cdef class DNSOutgoing: cdef _write_int(self, object value) - cdef _write_question(self, DNSQuestion question) + cdef cython.bint _write_question(self, DNSQuestion question) @cython.locals( d=cython.bytes, data_view=cython.list, length=cython.uint ) - cdef _write_record(self, DNSRecord record, object now) + cdef cython.bint _write_record(self, DNSRecord record, object now) cdef _write_record_class(self, DNSEntry record) - cdef _check_data_limit_or_rollback(self, object start_data_length, object start_size) + @cython.locals( + start_size_int=object + ) + cdef cython.bint _check_data_limit_or_rollback(self, cython.uint start_data_length, cython.uint start_size) cdef _write_questions_from_offset(self, object questions_offset) @@ -74,6 +85,9 @@ cdef class DNSOutgoing: @cython.locals( labels=cython.list, label=cython.str, + index=cython.uint, + start_size=cython.uint, + name_length=cython.uint, ) cpdef write_name(self, cython.str name) @@ -103,6 +117,7 @@ cdef class DNSOutgoing: cpdef add_answer(self, DNSIncoming inp, DNSRecord record) + @cython.locals(now_float=cython.float) cpdef add_answer_at_time(self, DNSRecord record, object now) cpdef add_authorative_answer(self, DNSPointer record) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 4d17cce0..f4f68c3d 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -53,12 +53,21 @@ PACK_SHORT = Struct('>H').pack PACK_LONG = Struct('>L').pack +BYTE_TABLE = tuple(PACK_BYTE(i) for i in range(256)) + class State(enum.Enum): init = 0 finished = 1 +STATE_INIT = State.init +STATE_FINISHED = State.finished + +LOGGING_IS_ENABLED_FOR = log.isEnabledFor +LOGGING_DEBUG = logging.DEBUG + + class DNSOutgoing: """Object representation of an outgoing packet""" @@ -93,7 +102,7 @@ def __init__(self, flags: int, multicast: bool = True, id_: int = 0) -> None: self.size: int = _DNS_PACKET_HEADER_LEN self.allow_long: bool = True - self.state = State.init + self.state = STATE_INIT self.questions: List[DNSQuestion] = [] self.answers: List[Tuple[DNSRecord, float]] = [] @@ -137,7 +146,8 @@ def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: def add_answer_at_time(self, record: Optional[DNSRecord], now: Union[float, int]) -> None: """Adds an answer if it does not expire by a certain time""" - if record is not None and (now == 0 or not record.is_expired(now)): + now_float = now + if record is not None and (now_float == 0 or not record.is_expired(now_float)): self.answers.append((record, now)) def add_authorative_answer(self, record: DNSPointer) -> None: @@ -207,7 +217,7 @@ def add_question_or_all_cache( def _write_byte(self, value: int_) -> None: """Writes a single byte to the packet""" - self.data.append(PACK_BYTE(value)) + self.data.append(BYTE_TABLE[value]) self.size += 1 def _insert_short_at_start(self, value: int_) -> None: @@ -267,7 +277,7 @@ def write_name(self, name: str_) -> None: """ # split name into each label - name_length = None + name_length = 0 if name.endswith('.'): name = name[: len(name) - 1] labels = name.split('.') @@ -276,14 +286,14 @@ def write_name(self, name: str_) -> None: start_size = self.size for count in range(len(labels)): label = name if count == 0 else '.'.join(labels[count:]) - index = self.names.get(label) + index = self.names.get(label, 0) if index: # If part of the name already exists in the packet, # create a pointer to it self._write_byte((index >> 8) | 0xC0) self._write_byte(index & 0xFF) return - if name_length is None: + if name_length == 0: name_length = len(name.encode('utf-8')) self.names[label] = start_size + name_length - len(label.encode('utf-8')) self._write_utf(labels[count]) @@ -293,7 +303,8 @@ def write_name(self, name: str_) -> None: def _write_question(self, question: DNSQuestion_) -> bool: """Writes a question to the packet""" - start_data_length, start_size = len(self.data), self.size + start_data_length = len(self.data) + start_size = self.size self.write_name(question.name) self.write_short(question.type) self._write_record_class(question) @@ -314,7 +325,8 @@ def _write_record(self, record: DNSRecord_, now: float_) -> bool: """Writes a record (answer, authoritative answer, additional) to the packet. Returns True on success, or False if we did not because the packet because the record does not fit.""" - start_data_length, start_size = len(self.data), self.size + start_data_length = len(self.data) + start_size = self.size self.write_name(record.name) self.write_short(record.type) self._write_record_class(record) @@ -339,11 +351,13 @@ def _check_data_limit_or_rollback(self, start_data_length: int_, start_size: int if self.size <= len_limit: return True - log.debug("Reached data limit (size=%d) > (limit=%d) - rolling back", self.size, len_limit) + if LOGGING_IS_ENABLED_FOR(LOGGING_DEBUG): # pragma: no branch + log.debug("Reached data limit (size=%d) > (limit=%d) - rolling back", self.size, len_limit) del self.data[start_data_length:] self.size = start_size - rollback_names = [name for name, idx in self.names.items() if idx >= start_size] + start_size_int = start_size + rollback_names = [name for name, idx in self.names.items() if idx >= start_size_int] for name in rollback_names: del self.names[name] return False @@ -395,7 +409,7 @@ def packets(self) -> List[bytes]: return self._packets() def _packets(self) -> List[bytes]: - if self.state == State.finished: + if self.state == STATE_FINISHED: return self.packets_data questions_offset = 0 @@ -404,7 +418,7 @@ def _packets(self) -> List[bytes]: additional_offset = 0 # we have to at least write out the question first_time = True - debug_enable = log.isEnabledFor(logging.DEBUG) + debug_enable = LOGGING_IS_ENABLED_FOR(LOGGING_DEBUG) while first_time or self._has_more_to_add( questions_offset, answer_offset, authority_offset, additional_offset @@ -476,5 +490,5 @@ def _packets(self) -> List[bytes]: ): log.warning("packets() made no progress adding records; returning") break - self.state = State.finished + self.state = STATE_FINISHED return self.packets_data From c88530bc808dbaf9aff83044938469da7b999278 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 11 Sep 2023 18:46:45 +0000 Subject: [PATCH 111/434] 0.108.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7195a762..8baadc23 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.108.0 (2023-09-11) + +### Feature + +* Improve performance of constructing outgoing queries ([#1267](https://github.com/python-zeroconf/python-zeroconf/issues/1267)) ([`00c439a`](https://github.com/python-zeroconf/python-zeroconf/commit/00c439a6400b7850ef9fdd75bc8d82d4e64b1da0)) + ## v0.107.0 (2023-09-11) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 245272fe..56067c91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.107.0" +version = "0.108.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 609e8501..6af1ddae 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.107.0' +__version__ = '0.108.0' __license__ = 'LGPL' From 48378769c3887b5746ca00de30067a4c0851765c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Sep 2023 19:36:27 -0500 Subject: [PATCH 112/434] feat: speed up ServiceBrowsers with a cython pxd (#1270) --- build_ext.py | 1 + src/zeroconf/_services/browser.pxd | 74 ++++++++++++++++++++++++++++++ src/zeroconf/_services/browser.py | 65 +++++++++++++++----------- src/zeroconf/_services/info.pxd | 6 +-- src/zeroconf/_updates.pxd | 2 +- 5 files changed, 116 insertions(+), 32 deletions(-) create mode 100644 src/zeroconf/_services/browser.pxd diff --git a/build_ext.py b/build_ext.py index b9deeecb..870c8058 100644 --- a/build_ext.py +++ b/build_ext.py @@ -32,6 +32,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_handlers/answers.py", "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_handlers/query_handler.py", + "src/zeroconf/_services/browser.py", "src/zeroconf/_services/info.py", "src/zeroconf/_services/registry.py", "src/zeroconf/_updates.py", diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd new file mode 100644 index 00000000..f01645ee --- /dev/null +++ b/src/zeroconf/_services/browser.pxd @@ -0,0 +1,74 @@ + +import cython + +from .._cache cimport DNSCache +from .._protocol.outgoing cimport DNSOutgoing, DNSPointer, DNSQuestion, DNSRecord +from .._updates cimport RecordUpdateListener +from .._utils.time cimport current_time_millis, millis_to_seconds + + +cdef object TYPE_CHECKING +cdef object cached_possible_types +cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT +cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED + +cdef class _DNSPointerOutgoingBucket: + + cdef public object now + cdef public DNSOutgoing out + cdef public cython.uint bytes + + cpdef add(self, cython.uint max_compressed_size, DNSQuestion question, cython.set answers) + + +@cython.locals(answer=DNSPointer) +cdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) + +cdef class QueryScheduler: + + cdef cython.set _types + cdef cython.dict _next_time + cdef object _first_random_delay_interval + cdef cython.dict _delay + + cpdef millis_to_wait(self, object now) + + cpdef reschedule_type(self, object type_, object next_time) + + cpdef process_ready_types(self, object now) + +cdef class _ServiceBrowserBase(RecordUpdateListener): + + cdef public cython.set types + cdef public object zc + cdef object _loop + cdef public object addr + cdef public object port + cdef public object multicast + cdef public object question_type + cdef public cython.dict _pending_handlers + cdef public object _service_state_changed + cdef public QueryScheduler query_scheduler + cdef public bint done + cdef public object _first_request + cdef public object _next_send_timer + cdef public object _query_sender_task + + cpdef _generate_ready_queries(self, object first_request, object now) + + cpdef _enqueue_callback(self, object state_change, object type_, object name) + + @cython.locals(record=DNSRecord, cache=DNSCache, service=DNSRecord) + cpdef async_update_records(self, object zc, cython.float now, cython.list records) + + cpdef _names_matching_types(self, object types) + + cpdef reschedule_type(self, object type_, object now, object next_time) + + cpdef _fire_service_state_changed_event(self, cython.tuple event) + + cpdef _async_send_ready_queries_schedule_next(self) + + cpdef _async_schedule_next(self, object now) + + cpdef _async_send_ready_queries(self, object now) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index de1769bc..d559109c 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -78,9 +78,17 @@ ServiceStateChange.Updated: "update_service", } +SERVICE_STATE_CHANGE_ADDED = ServiceStateChange.Added +SERVICE_STATE_CHANGE_REMOVED = ServiceStateChange.Removed +SERVICE_STATE_CHANGE_UPDATED = ServiceStateChange.Updated + if TYPE_CHECKING: from .._core import Zeroconf +float_ = float +int_ = int +bool_ = bool +str_ = str _QuestionWithKnownAnswers = Dict[DNSQuestion, Set[DNSPointer]] @@ -96,7 +104,7 @@ def __init__(self, now: float, multicast: bool) -> None: self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast=multicast) self.bytes = 0 - def add(self, max_compressed_size: int, question: DNSQuestion, answers: Set[DNSPointer]) -> None: + def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer]) -> None: """Add a new set of questions and known answers to the outgoing.""" self.out.add_question(question) for answer in answers: @@ -105,7 +113,7 @@ def add(self, max_compressed_size: int, question: DNSQuestion, answers: Set[DNSP def _group_ptr_queries_with_known_answers( - now: float, multicast: bool, question_with_known_answers: _QuestionWithKnownAnswers + now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers ) -> List[DNSOutgoing]: """Aggregate queries so that as many known answers as possible fit in the same packet without having known answers spill over into the next packet unless the @@ -205,7 +213,7 @@ class QueryScheduler: """ - __slots__ = ('_schedule_changed_event', '_types', '_next_time', '_first_random_delay_interval', '_delay') + __slots__ = ('_types', '_next_time', '_first_random_delay_interval', '_delay') def __init__( self, @@ -213,18 +221,16 @@ def __init__( delay: int, first_random_delay_interval: Tuple[int, int], ) -> None: - self._schedule_changed_event: Optional[asyncio.Event] = None self._types = types self._next_time: Dict[str, float] = {} self._first_random_delay_interval = first_random_delay_interval self._delay: Dict[str, float] = {check_type_: delay for check_type_ in self._types} - def start(self, now: float) -> None: + def start(self, now: float_) -> None: """Start the scheduler.""" - self._schedule_changed_event = asyncio.Event() self._generate_first_next_time(now) - def _generate_first_next_time(self, now: float) -> None: + def _generate_first_next_time(self, now: float_) -> None: """Generate the initial next query times. https://datatracker.ietf.org/doc/html/rfc6762#section-5.2 @@ -238,20 +244,20 @@ def _generate_first_next_time(self, now: float) -> None: next_time = now + delay self._next_time = {check_type_: next_time for check_type_ in self._types} - def millis_to_wait(self, now: float) -> float: + def millis_to_wait(self, now: float_) -> float: """Returns the number of milliseconds to wait for the next event.""" # Wait for the type has the smallest next time next_time = min(self._next_time.values()) return 0 if next_time <= now else next_time - now - def reschedule_type(self, type_: str, next_time: float) -> bool: + def reschedule_type(self, type_: str_, next_time: float_) -> bool: """Reschedule the query for a type to happen sooner.""" if next_time >= self._next_time[type_]: return False self._next_time[type_] = next_time return True - def process_ready_types(self, now: float) -> List[str]: + def process_ready_types(self, now: float_) -> List[str]: """Generate a list of ready types that is due and schedule the next time.""" if self.millis_to_wait(now): return [] @@ -275,6 +281,7 @@ class _ServiceBrowserBase(RecordUpdateListener): __slots__ = ( 'types', 'zc', + '_loop', 'addr', 'port', 'multicast', @@ -322,6 +329,8 @@ def __init__( # Will generate BadTypeInNameException on a bad name service_type_name(check_type_, strict=False) self.zc = zc + assert zc.loop is not None + self._loop = zc.loop self.addr = addr self.port = port self.multicast = self.addr in (None, _MDNS_ADDR, _MDNS_ADDR6) @@ -370,23 +379,23 @@ def _names_matching_types(self, names: Iterable[str]) -> List[Tuple[str, str]]: def _enqueue_callback( self, state_change: ServiceStateChange, - type_: str, - name: str, + type_: str_, + name: str_, ) -> None: # Code to ensure we only do a single update message # Precedence is; Added, Remove, Update key = (name, type_) if ( - state_change is ServiceStateChange.Added + state_change is SERVICE_STATE_CHANGE_ADDED or ( - state_change is ServiceStateChange.Removed - and self._pending_handlers.get(key) != ServiceStateChange.Added + state_change is SERVICE_STATE_CHANGE_REMOVED + and self._pending_handlers.get(key) != SERVICE_STATE_CHANGE_ADDED ) - or (state_change is ServiceStateChange.Updated and key not in self._pending_handlers) + or (state_change is SERVICE_STATE_CHANGE_UPDATED and key not in self._pending_handlers) ): self._pending_handlers[key] = state_change - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: """Callback invoked by Zeroconf when new information arrives. Updates information required by browser in the Zeroconf cache. @@ -404,9 +413,9 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU record = cast(DNSPointer, record) for type_ in self.types.intersection(cached_possible_types(record.name)): if old_record is None: - self._enqueue_callback(ServiceStateChange.Added, type_, record.alias) + self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, record.alias) elif record.is_expired(now): - self._enqueue_callback(ServiceStateChange.Removed, type_, record.alias) + self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, record.alias) else: expire_time = record.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) self.reschedule_type(type_, now, expire_time) @@ -417,15 +426,16 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[RecordU continue if record_type in _ADDRESS_RECORD_TYPES: + cache = self.zc.cache # Iterate through the DNSCache and callback any services that use this address for type_, name in self._names_matching_types( - {service.name for service in self.zc.cache.async_entries_with_server(record.name)} + {service.name for service in cache.async_entries_with_server(record.name)} ): - self._enqueue_callback(ServiceStateChange.Updated, type_, name) + self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) continue for type_, name in self._names_matching_types((record.name,)): - self._enqueue_callback(ServiceStateChange.Updated, type_, name) + self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) @abstractmethod def async_update_records_complete(self) -> None: @@ -460,7 +470,7 @@ def _async_cancel(self) -> None: assert self._query_sender_task is not None, "Attempted to cancel a browser that was not started" self._query_sender_task.cancel() - def _generate_ready_queries(self, first_request: bool, now: float) -> List[DNSOutgoing]: + def _generate_ready_queries(self, first_request: bool_, now: float_) -> List[DNSOutgoing]: """Generate the service browser query for any type that is due.""" ready_types = self.query_scheduler.process_ready_types(now) if not ready_types: @@ -485,7 +495,7 @@ def _cancel_send_timer(self) -> None: self._next_send_timer.cancel() self._next_send_timer = None - def reschedule_type(self, type_: str, now: float, next_time: float) -> None: + def reschedule_type(self, type_: str_, now: float_, next_time: float_) -> None: """Reschedule a type to be refreshed in the future.""" if self.query_scheduler.reschedule_type(type_, next_time): # We need to send the queries before rescheduling the next one @@ -496,7 +506,7 @@ def reschedule_type(self, type_: str, now: float, next_time: float) -> None: self._cancel_send_timer() self._async_schedule_next(now) - def _async_send_ready_queries(self, now: float) -> None: + def _async_send_ready_queries(self, now: float_) -> None: """Send any ready queries.""" outs = self._generate_ready_queries(self._first_request, now) if outs: @@ -512,11 +522,10 @@ def _async_send_ready_queries_schedule_next(self) -> None: self._async_send_ready_queries(now) self._async_schedule_next(now) - def _async_schedule_next(self, now: float) -> None: + def _async_schedule_next(self, now: float_) -> None: """Scheule the next time.""" - assert self.zc.loop is not None delay = millis_to_seconds(self.query_scheduler.millis_to_wait(now)) - self._next_send_timer = self.zc.loop.call_later(delay, self._async_send_ready_queries_schedule_next) + self._next_send_timer = self._loop.call_later(delay, self._async_send_ready_queries_schedule_next) class ServiceBrowser(_ServiceBrowserBase, threading.Thread): diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 860cc9ba..33834e41 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -56,9 +56,9 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public cython.set _get_address_and_nsec_records_cache @cython.locals( - cache=DNSCache + cache=DNSCache, ) - cpdef async_update_records(self, object zc, object now, cython.list records) + cpdef async_update_records(self, object zc, cython.float now, cython.list records) @cython.locals( cache=DNSCache @@ -73,7 +73,7 @@ cdef class ServiceInfo(RecordUpdateListener): cdef _get_ip_addresses_from_cache_lifo(self, object zc, object now, object type) - cdef _process_record_threadsafe(self, object zc, DNSRecord record, object now) + cdef _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) @cython.locals( cache=DNSCache diff --git a/src/zeroconf/_updates.pxd b/src/zeroconf/_updates.pxd index 6309537c..23edf643 100644 --- a/src/zeroconf/_updates.pxd +++ b/src/zeroconf/_updates.pxd @@ -4,6 +4,6 @@ import cython cdef class RecordUpdateListener: - cpdef async_update_records(self, object zc, object now, cython.list records) + cpdef async_update_records(self, object zc, cython.float now, cython.list records) cpdef async_update_records_complete(self) From 1b3225ffc63bd304250695cd8f256c579b941878 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 14 Sep 2023 00:46:22 +0000 Subject: [PATCH 113/434] 0.109.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8baadc23..0bfdae68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.109.0 (2023-09-14) + +### Feature + +* Speed up ServiceBrowsers with a cython pxd ([#1270](https://github.com/python-zeroconf/python-zeroconf/issues/1270)) ([`4837876`](https://github.com/python-zeroconf/python-zeroconf/commit/48378769c3887b5746ca00de30067a4c0851765c)) + ## v0.108.0 (2023-09-11) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 56067c91..a49d7e37 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.108.0" +version = "0.109.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 6af1ddae..a5a9c10b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.108.0' +__version__ = '0.109.0' __license__ = 'LGPL' From 22c433ddaea3049ac49933325ba938fd87a529c0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Sep 2023 20:06:21 -0500 Subject: [PATCH 114/434] feat: small speed ups to ServiceBrowser (#1271) --- src/zeroconf/_services/browser.pxd | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index f01645ee..540b20ca 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -72,3 +72,5 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _async_schedule_next(self, object now) cpdef _async_send_ready_queries(self, object now) + + cpdef _cancel_send_timer(self) From 549a104af6375ed5d371cd425e29760ed4097fbd Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 14 Sep 2023 01:19:09 +0000 Subject: [PATCH 115/434] 0.110.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0bfdae68..a98341bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.110.0 (2023-09-14) + +### Feature + +* Small speed ups to ServiceBrowser ([#1271](https://github.com/python-zeroconf/python-zeroconf/issues/1271)) ([`22c433d`](https://github.com/python-zeroconf/python-zeroconf/commit/22c433ddaea3049ac49933325ba938fd87a529c0)) + ## v0.109.0 (2023-09-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index a49d7e37..e543ed4a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.109.0" +version = "0.110.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index a5a9c10b..541fcd3d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.109.0' +__version__ = '0.110.0' __license__ = 'LGPL' From d24722bfa4201d48ab482d35b0ef004f070ada80 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Sep 2023 22:26:08 -0500 Subject: [PATCH 116/434] feat: speed up question and answer internals (#1272) --- src/zeroconf/_dns.pxd | 8 ++--- src/zeroconf/_dns.py | 6 ++-- src/zeroconf/_handlers/answers.pxd | 17 +++++++++++ src/zeroconf/_handlers/answers.py | 37 +++++++++++++++++------- src/zeroconf/_handlers/query_handler.pxd | 17 ++++++----- 5 files changed, 61 insertions(+), 24 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index afcb1985..2e9b778e 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -22,8 +22,8 @@ cdef object current_time_millis cdef class DNSEntry: - cdef public object key - cdef public object name + cdef public str key + cdef public str name cdef public object type cdef public object class_ cdef public object unique @@ -84,8 +84,8 @@ cdef class DNSHinfo(DNSRecord): cdef class DNSPointer(DNSRecord): cdef public cython.int _hash - cdef public object alias - cdef public object alias_key + cdef public str alias + cdef public str alias_key cdef _eq(self, DNSPointer other) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 4c015eb3..b546d273 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -67,9 +67,9 @@ class DNSEntry: __slots__ = ('key', 'name', 'type', 'class_', 'unique') - def __init__(self, name: str, type_: int, class_: int) -> None: - self.key = name.lower() + def __init__(self, name: str, type_: _int, class_: _int) -> None: self.name = name + self.key = name.lower() self.type = type_ self.class_ = class_ & _CLASS_MASK self.unique = (class_ & _CLASS_UNIQUE) != 0 @@ -328,7 +328,7 @@ def __init__( ) -> None: super().__init__(name, type_, class_, ttl, created) self.alias = alias - self.alias_key = self.alias.lower() + self.alias_key = alias.lower() self._hash = hash((self.key, type_, self.class_, self.alias_key)) @property diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index 91e2375d..6a0f0e3d 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -5,6 +5,23 @@ from .._dns cimport DNSRecord from .._protocol.outgoing cimport DNSOutgoing +cdef class QuestionAnswers: + + cdef public object ucast + cdef public object mcast_now + cdef public object mcast_aggregate + cdef public object mcast_aggregate_last_second + + +cdef class AnswerGroup: + + cdef public object send_after + cdef public object send_before + cdef public object answers + + + + cdef object _FLAGS_QR_RESPONSE_AA cdef object NAME_GETTER diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index 009bedbc..6ba502ac 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -21,7 +21,7 @@ """ from operator import attrgetter -from typing import Dict, List, NamedTuple, Set +from typing import Dict, List, Set from .._dns import DNSQuestion, DNSRecord from .._protocol.outgoing import DNSOutgoing @@ -38,20 +38,37 @@ _FLAGS_QR_RESPONSE_AA = _FLAGS_QR_RESPONSE | _FLAGS_AA +float_ = float -class QuestionAnswers(NamedTuple): - ucast: _AnswerWithAdditionalsType - mcast_now: _AnswerWithAdditionalsType - mcast_aggregate: _AnswerWithAdditionalsType - mcast_aggregate_last_second: _AnswerWithAdditionalsType +class QuestionAnswers: + """A group of answers to a question.""" -class AnswerGroup(NamedTuple): + __slots__ = ('ucast', 'mcast_now', 'mcast_aggregate', 'mcast_aggregate_last_second') + + def __init__( + self, + ucast: _AnswerWithAdditionalsType, + mcast_now: _AnswerWithAdditionalsType, + mcast_aggregate: _AnswerWithAdditionalsType, + mcast_aggregate_last_second: _AnswerWithAdditionalsType, + ) -> None: + """Initialize a QuestionAnswers.""" + self.ucast = ucast + self.mcast_now = mcast_now + self.mcast_aggregate = mcast_aggregate + self.mcast_aggregate_last_second = mcast_aggregate_last_second + + +class AnswerGroup: """A group of answers scheduled to be sent at the same time.""" - send_after: float # Must be sent after this time - send_before: float # Must be sent before this time - answers: _AnswerWithAdditionalsType + __slots__ = ('send_after', 'send_before', 'answers') + + def __init__(self, send_after: float_, send_before: float_, answers: _AnswerWithAdditionalsType) -> None: + self.send_after = send_after # Must be sent after this time + self.send_before = send_before # Must be sent before this time + self.answers = answers def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing: diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 1f1e4da8..31261a69 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -2,14 +2,15 @@ import cython from .._cache cimport DNSCache -from .._dns cimport DNSPointer, DNSQuestion, DNSRecord, DNSRRSet +from .._dns cimport DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet from .._history cimport QuestionHistory from .._protocol.incoming cimport DNSIncoming from .._services.info cimport ServiceInfo from .._services.registry cimport ServiceRegistry +from .answers cimport QuestionAnswers -cdef object TYPE_CHECKING, QuestionAnswers +cdef object TYPE_CHECKING cdef cython.uint _ONE_SECOND, _TYPE_PTR, _TYPE_ANY, _TYPE_A, _TYPE_AAAA, _TYPE_SRV, _TYPE_TXT cdef str _SERVICE_TYPE_ENUMERATION_NAME cdef cython.set _RESPOND_IMMEDIATE_TYPES @@ -19,7 +20,7 @@ cdef object _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL cdef class _QueryResponse: - cdef object _is_probe + cdef bint _is_probe cdef DNSIncoming _msg cdef float _now cdef DNSCache _cache @@ -29,17 +30,19 @@ cdef class _QueryResponse: cdef cython.set _mcast_aggregate cdef cython.set _mcast_aggregate_last_second + @cython.locals(record=DNSRecord) cpdef add_qu_question_response(self, cython.dict answers) cpdef add_ucast_question_response(self, cython.dict answers) + @cython.locals(answer=DNSRecord) cpdef add_mcast_question_response(self, cython.dict answers) @cython.locals(maybe_entry=DNSRecord) - cpdef _has_mcast_within_one_quarter_ttl(self, DNSRecord record) + cdef bint _has_mcast_within_one_quarter_ttl(self, DNSRecord record) @cython.locals(maybe_entry=DNSRecord) - cpdef _has_mcast_record_in_last_second(self, DNSRecord record) + cdef bint _has_mcast_record_in_last_second(self, DNSRecord record) cpdef answers(self) @@ -55,8 +58,8 @@ cdef class QueryHandler: @cython.locals(service=ServiceInfo) cdef _add_pointer_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers) - @cython.locals(service=ServiceInfo) - cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) + @cython.locals(service=ServiceInfo, dns_address=DNSAddress) + cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, object type_) @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) cdef cython.dict _answer_question(self, DNSQuestion question, DNSRRSet known_answers) From 2734db9826c9d9ebf1412f244cd6d40b72545f4f Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 14 Sep 2023 03:34:30 +0000 Subject: [PATCH 117/434] 0.111.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a98341bb..50b91d31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.111.0 (2023-09-14) + +### Feature + +* Speed up question and answer internals ([#1272](https://github.com/python-zeroconf/python-zeroconf/issues/1272)) ([`d24722b`](https://github.com/python-zeroconf/python-zeroconf/commit/d24722bfa4201d48ab482d35b0ef004f070ada80)) + ## v0.110.0 (2023-09-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index e543ed4a..9953b56e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.110.0" +version = "0.111.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 541fcd3d..98bfe4ee 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.110.0' +__version__ = '0.111.0' __license__ = 'LGPL' From 0c88ecf5ef6b9b256f991e7a630048de640999a6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 14 Sep 2023 09:32:08 -0500 Subject: [PATCH 118/434] feat: improve AsyncServiceBrowser performance (#1273) --- src/zeroconf/_services/browser.pxd | 2 ++ src/zeroconf/_services/browser.py | 10 +++++++--- src/zeroconf/asyncio.py | 11 ----------- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 540b20ca..1006ee3c 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -74,3 +74,5 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _async_send_ready_queries(self, object now) cpdef _cancel_send_timer(self) + + cpdef async_update_records_complete(self) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index d559109c..cb611d1a 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -25,7 +25,6 @@ import random import threading import warnings -from abc import abstractmethod from types import TracebackType # noqa # used in type hints from typing import ( TYPE_CHECKING, @@ -437,14 +436,18 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record for type_, name in self._names_matching_types((record.name,)): self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) - @abstractmethod def async_update_records_complete(self) -> None: """Called when a record update has completed for all handlers. At this point the cache will have the new records. This method will be run in the event loop. + + This method is expected to be overridden by subclasses. """ + for pending in self._pending_handlers.items(): + self._fire_service_state_changed_event(pending) + self._pending_handlers.clear() def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], ServiceStateChange]) -> None: """Fire a service state changed event. @@ -454,7 +457,8 @@ def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], Servic When running with AsyncServiceBrowser, this will happen in the event loop. """ - name_type, state_change = event + name_type = event[0] + state_change = event[1] self._service_state_changed.fire( zeroconf=self.zc, service_type=name_type[1], diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 5aaee35f..cfe3693e 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -82,17 +82,6 @@ async def async_cancel(self) -> None: """Cancel the browser.""" self._async_cancel() - def async_update_records_complete(self) -> None: - """Called when a record update has completed for all handlers. - - At this point the cache will have the new records. - - This method will be run in the event loop. - """ - for pending in self._pending_handlers.items(): - self._fire_service_state_changed_event(pending) - self._pending_handlers.clear() - async def __aenter__(self) -> 'AsyncServiceBrowser': return self From 248b5062b29e3ef80a549b3180f2b85fc2265f1d Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 14 Sep 2023 14:41:27 +0000 Subject: [PATCH 119/434] 0.112.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 50b91d31..28a4d4dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.112.0 (2023-09-14) + +### Feature + +* Improve AsyncServiceBrowser performance ([#1273](https://github.com/python-zeroconf/python-zeroconf/issues/1273)) ([`0c88ecf`](https://github.com/python-zeroconf/python-zeroconf/commit/0c88ecf5ef6b9b256f991e7a630048de640999a6)) + ## v0.111.0 (2023-09-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 9953b56e..b8c1918c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.111.0" +version = "0.112.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 98bfe4ee..77f54c36 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.111.0' +__version__ = '0.112.0' __license__ = 'LGPL' From 6257d49952e02107f800f4ad4894716508edfcda Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 24 Sep 2023 13:37:47 +0200 Subject: [PATCH 120/434] feat: improve performance of loading records from cache in ServiceInfo (#1274) --- src/zeroconf/_dns.pxd | 4 +-- src/zeroconf/_services/info.pxd | 25 +++++++++--------- src/zeroconf/_services/info.py | 46 +++++++++++++++++++++------------ 3 files changed, 44 insertions(+), 31 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 2e9b778e..fa73d692 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -106,8 +106,8 @@ cdef class DNSService(DNSRecord): cdef public object priority cdef public object weight cdef public object port - cdef public object server - cdef public object server_key + cdef public str server + cdef public str server_key cdef _eq(self, DNSService other) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 33834e41..de7eb97b 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -2,13 +2,14 @@ import cython from .._cache cimport DNSCache -from .._dns cimport DNSNsec, DNSPointer, DNSRecord, DNSService, DNSText +from .._dns cimport DNSAddress, DNSNsec, DNSPointer, DNSRecord, DNSService, DNSText from .._protocol.outgoing cimport DNSOutgoing from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis cdef object _resolve_all_futures_to_none +cdef object _cached_ip_addresses_wrapper cdef object _TYPE_SRV cdef object _TYPE_TXT @@ -55,15 +56,11 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public cython.list _dns_address_cache cdef public cython.set _get_address_and_nsec_records_cache - @cython.locals( - cache=DNSCache, - ) + @cython.locals(cache=DNSCache) cpdef async_update_records(self, object zc, cython.float now, cython.list records) - @cython.locals( - cache=DNSCache - ) - cpdef _load_from_cache(self, object zc, object now) + @cython.locals(cache=DNSCache) + cpdef _load_from_cache(self, object zc, cython.float now) cdef _unpack_text_into_properties(self) @@ -71,13 +68,17 @@ cdef class ServiceInfo(RecordUpdateListener): cdef _set_text(self, cython.bytes text) - cdef _get_ip_addresses_from_cache_lifo(self, object zc, object now, object type) - - cdef _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) + @cython.locals(record=DNSAddress) + cdef _get_ip_addresses_from_cache_lifo(self, object zc, cython.float now, object type) @cython.locals( - cache=DNSCache + dns_service_record=DNSService, + dns_text_record=DNSText, + dns_address_record=DNSAddress ) + cdef _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) + + @cython.locals(cache=DNSCache) cdef cython.list _get_address_records_from_cache_by_type(self, object zc, object _type) cdef _set_ipv4_addresses_from_cache(self, object zc, object now) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 352c9b8e..0600d5d3 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -107,6 +107,9 @@ def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4 return None +_cached_ip_addresses_wrapper = _cached_ip_addresses + + class ServiceInfo(RecordUpdateListener): """Service information. @@ -197,7 +200,7 @@ def __init__( self.host_ttl = host_ttl self.other_ttl = other_ttl self.interface_index = interface_index - self._new_records_futures: Set[asyncio.Future] = set() + self._new_records_futures: Optional[Set[asyncio.Future]] = None self._dns_address_cache: Optional[List[DNSAddress]] = None self._dns_pointer_cache: Optional[DNSPointer] = None self._dns_service_cache: Optional[DNSService] = None @@ -240,7 +243,7 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - addr = _cached_ip_addresses(address) + addr = _cached_ip_addresses_wrapper(address) if addr is None: raise TypeError( "Addresses must either be IPv4 or IPv6 strings, bytes, or integers;" @@ -272,6 +275,8 @@ def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: """Calling task waits for a given number of milliseconds or until notified.""" + if not self._new_records_futures: + self._new_records_futures = set() await wait_for_future_set_or_timeout( loop or asyncio.get_running_loop(), self._new_records_futures, timeout ) @@ -409,7 +414,7 @@ def _get_ip_addresses_from_cache_lifo( for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue - ip_addr = _cached_ip_addresses(record.address) + ip_addr = _cached_ip_addresses_wrapper(record.address) if ip_addr is not None: address_list.append(ip_addr) address_list.reverse() # Reverse to get LIFO order @@ -455,12 +460,17 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo record_key = record.key record_type = type(record) - if record_key == self.server_key and record_type is DNSAddress: + if record_type is DNSAddress and record_key == self.server_key: + dns_address_record = record if TYPE_CHECKING: - assert isinstance(record, DNSAddress) - ip_addr = _cached_ip_addresses(record.address) + assert isinstance(dns_address_record, DNSAddress) + ip_addr = _cached_ip_addresses_wrapper(dns_address_record.address) if ip_addr is None: - log.warning("Encountered invalid address while processing %s: %s", record, record.address) + log.warning( + "Encountered invalid address while processing %s: %s", + dns_address_record, + dns_address_record.address, + ) return False if ip_addr.version == 4: @@ -492,22 +502,24 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo return False if record_type is DNSText: + dns_text_record = record if TYPE_CHECKING: - assert isinstance(record, DNSText) - self._set_text(record.text) + assert isinstance(dns_text_record, DNSText) + self._set_text(dns_text_record.text) return True if record_type is DNSService: + dns_service_record = record if TYPE_CHECKING: - assert isinstance(record, DNSService) + assert isinstance(dns_service_record, DNSService) old_server_key = self.server_key - self._name = record.name - self.key = record.key - self.server = record.server - self.server_key = record.server_key - self.port = record.port - self.weight = record.weight - self.priority = record.priority + self._name = dns_service_record.name + self.key = dns_service_record.key + self.server = dns_service_record.server + self.server_key = dns_service_record.server_key + self.port = dns_service_record.port + self.weight = dns_service_record.weight + self.priority = dns_service_record.priority if old_server_key != self.server_key: self._set_ipv4_addresses_from_cache(zc, now) self._set_ipv6_addresses_from_cache(zc, now) From aa8fd1ace1ae9c018d3aada49e311e0c799b61c1 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 24 Sep 2023 11:46:00 +0000 Subject: [PATCH 121/434] 0.113.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 28a4d4dc..50a79d9b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.113.0 (2023-09-24) + +### Feature + +* Improve performance of loading records from cache in ServiceInfo ([#1274](https://github.com/python-zeroconf/python-zeroconf/issues/1274)) ([`6257d49`](https://github.com/python-zeroconf/python-zeroconf/commit/6257d49952e02107f800f4ad4894716508edfcda)) + ## v0.112.0 (2023-09-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index b8c1918c..b8f4d7de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.112.0" +version = "0.113.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 77f54c36..f9e6799f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.112.0' +__version__ = '0.113.0' __license__ = 'LGPL' From 3c6b18cdf4c94773ad6f4497df98feb337939ee9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 25 Sep 2023 14:33:47 -0500 Subject: [PATCH 122/434] feat: speed up responding to queries (#1275) --- src/zeroconf/_dns.pxd | 2 +- src/zeroconf/_dns.py | 25 +++++----- src/zeroconf/_handlers/query_handler.pxd | 14 +++--- src/zeroconf/_handlers/query_handler.py | 43 ++++++++++------- src/zeroconf/_handlers/record_manager.py | 2 +- src/zeroconf/_protocol/incoming.pxd | 4 ++ src/zeroconf/_protocol/incoming.py | 4 +- tests/test_asyncio.py | 2 +- tests/test_dns.py | 4 +- tests/test_handlers.py | 14 +++--- tests/test_protocol.py | 60 ++++++++++++------------ 11 files changed, 93 insertions(+), 81 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index fa73d692..ccdcc34f 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -125,7 +125,7 @@ cdef class DNSNsec(DNSRecord): cdef class DNSRRSet: - cdef cython.list _record_sets + cdef cython.list _records cdef cython.dict _lookup @cython.locals(other=DNSRecord) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index b546d273..0b43f410 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -174,7 +174,7 @@ def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use def suppressed_by(self, msg: 'DNSIncoming') -> bool: """Returns true if any answer in a message can suffice for the information held in this record.""" - answers = msg.answers + answers = msg.answers() for record in answers: if self._suppressed_by_answer(record): return True @@ -521,15 +521,15 @@ def __repr__(self) -> str: class DNSRRSet: """A set of dns records with a lookup to get the ttl.""" - __slots__ = ('_record_sets', '_lookup') + __slots__ = ('_records', '_lookup') - def __init__(self, record_sets: List[List[DNSRecord]]) -> None: + def __init__(self, records: List[DNSRecord]) -> None: """Create an RRset from records sets.""" - self._record_sets = record_sets - self._lookup: Optional[Dict[DNSRecord, float]] = None + self._records = records + self._lookup: Optional[Dict[DNSRecord, DNSRecord]] = None @property - def lookup(self) -> Dict[DNSRecord, float]: + def lookup(self) -> Dict[DNSRecord, DNSRecord]: """Return the lookup table.""" return self._get_lookup() @@ -537,21 +537,18 @@ def lookup_set(self) -> Set[DNSRecord]: """Return the lookup table as aset.""" return set(self._get_lookup()) - def _get_lookup(self) -> Dict[DNSRecord, float]: + def _get_lookup(self) -> Dict[DNSRecord, DNSRecord]: """Return the lookup table, building it if needed.""" if self._lookup is None: # Build the hash table so we can lookup the record ttl - self._lookup = {} - for record_sets in self._record_sets: - for record in record_sets: - self._lookup[record] = record.ttl + self._lookup = {record: record for record in self._records} return self._lookup def suppresses(self, record: _DNSRecord) -> bool: """Returns true if any answer in the rrset can suffice for the information held in this record.""" lookup = self._get_lookup() - other_ttl = lookup.get(record) - if other_ttl is None: + other = lookup.get(record) + if other is None: return False - return other_ttl > (record.ttl / 2) + return other.ttl > (record.ttl / 2) diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 31261a69..365e3a27 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -21,7 +21,7 @@ cdef object _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL cdef class _QueryResponse: cdef bint _is_probe - cdef DNSIncoming _msg + cdef cython.list _questions cdef float _now cdef DNSCache _cache cdef cython.dict _additionals @@ -31,12 +31,12 @@ cdef class _QueryResponse: cdef cython.set _mcast_aggregate_last_second @cython.locals(record=DNSRecord) - cpdef add_qu_question_response(self, cython.dict answers) + cdef add_qu_question_response(self, cython.dict answers) - cpdef add_ucast_question_response(self, cython.dict answers) + cdef add_ucast_question_response(self, cython.dict answers) - @cython.locals(answer=DNSRecord) - cpdef add_mcast_question_response(self, cython.dict answers) + @cython.locals(answer=DNSRecord, question=DNSQuestion) + cdef add_mcast_question_response(self, cython.dict answers) @cython.locals(maybe_entry=DNSRecord) cdef bint _has_mcast_within_one_quarter_ttl(self, DNSRecord record) @@ -44,7 +44,7 @@ cdef class _QueryResponse: @cython.locals(maybe_entry=DNSRecord) cdef bint _has_mcast_record_in_last_second(self, DNSRecord record) - cpdef answers(self) + cdef QuestionAnswers answers(self) cdef class QueryHandler: @@ -70,5 +70,7 @@ cdef class QueryHandler: answer_set=cython.dict, known_answers=DNSRRSet, known_answers_set=cython.set, + is_probe=object, + now=object ) cpdef async_response(self, cython.list msgs, cython.bint unicast_source) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index f4243021..776d6a3f 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -55,7 +55,7 @@ class _QueryResponse: __slots__ = ( "_is_probe", - "_msg", + "_questions", "_now", "_cache", "_additionals", @@ -65,15 +65,11 @@ class _QueryResponse: "_mcast_aggregate_last_second", ) - def __init__(self, cache: DNSCache, msgs: List[DNSIncoming]) -> None: + def __init__(self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float) -> None: """Build a query response.""" - self._is_probe = False - for msg in msgs: - if msg.is_probe: - self._is_probe = True - break - self._msg = msgs[0] - self._now = self._msg.now + self._is_probe = is_probe + self._questions = questions + self._now = now self._cache = cache self._additionals: _AnswerWithAdditionalsType = {} self._ucast: Set[DNSRecord] = set() @@ -107,10 +103,15 @@ def add_mcast_question_response(self, answers: _AnswerWithAdditionalsType) -> No if self._has_mcast_record_in_last_second(answer): self._mcast_aggregate_last_second.add(answer) - elif len(self._msg.questions) == 1 and self._msg.questions[0].type in _RESPOND_IMMEDIATE_TYPES: - self._mcast_now.add(answer) - else: - self._mcast_aggregate.add(answer) + continue + + if len(self._questions) == 1: + question = self._questions[0] + if question.type in _RESPOND_IMMEDIATE_TYPES: + self._mcast_now.add(answer) + continue + + self._mcast_aggregate.add(answer) def answers( self, @@ -262,8 +263,18 @@ def async_response( # pylint: disable=unused-argument This function must be run in the event loop as it is not threadsafe. """ - known_answers = DNSRRSet([msg.answers for msg in msgs if not msg.is_probe]) - query_res = _QueryResponse(self.cache, msgs) + answers: List[DNSRecord] = [] + is_probe = False + msg = msgs[0] + questions = msg.questions + now = msg.now + for msg in msgs: + if not msg.is_probe(): + answers.extend(msg.answers()) + else: + is_probe = True + known_answers = DNSRRSet(answers) + query_res = _QueryResponse(self.cache, questions, is_probe, now) known_answers_set: Optional[Set[DNSRecord]] = None for msg in msgs: @@ -271,7 +282,7 @@ def async_response( # pylint: disable=unused-argument if not question.unique: # unique and unicast are the same flag if not known_answers_set: # pragma: no branch known_answers_set = known_answers.lookup_set() - self.question_history.add_question_at_time(question, msg.now, known_answers_set) + self.question_history.add_question_at_time(question, now, known_answers_set) answer_set = self._answer_question(question, known_answers) if not ucast_source and question.unique: # unique and unicast are the same flag query_res.add_qu_question_response(answer_set) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 396bad45..63572c1e 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -87,7 +87,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: now_float = now unique_types: Set[Tuple[str, int, int]] = set() cache = self.cache - answers = msg.answers + answers = msg.answers() for record in answers: # Protect zeroconf from records that can cause denial of service. diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index ebd09a0e..37fc91e7 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -72,6 +72,10 @@ cdef class DNSIncoming: cpdef is_query(self) + cpdef is_probe(self) + + cpdef answers(self) + cpdef is_response(self) @cython.locals( diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 87d25816..5838657a 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -172,7 +172,6 @@ def _log_exception_debug(cls, *logger_data: Any) -> None: log_exc_info = True log.debug(*(logger_data or ['Exception occurred']), exc_info=log_exc_info) - @property def answers(self) -> List[DNSRecord]: """Answers in the packet.""" if not self._did_read_others: @@ -187,7 +186,6 @@ def answers(self) -> List[DNSRecord]: ) return self._answers - @property def is_probe(self) -> bool: """Returns true if this is a probe.""" return self.num_authorities > 0 @@ -203,7 +201,7 @@ def __repr__(self) -> str: 'n_auth=%s' % self.num_authorities, 'n_add=%s' % self.num_additionals, 'questions=%s' % self.questions, - 'answers=%s' % self.answers, + 'answers=%s' % self.answers(), ] ) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 18e8c8e0..d77e7e83 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -997,7 +997,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" pout = DNSIncoming(out.packets()[0]) nonlocal nbr_answers - for answer in pout.answers: + for answer in pout.answers(): nbr_answers += 1 if not answer.ttl > expected_ttl / 2: unexpected_ttl.set() diff --git a/tests/test_dns.py b/tests/test_dns.py index b82f5d81..08f805f0 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -392,7 +392,7 @@ def test_rrset_does_not_consider_ttl(): longaaaarec = r.DNSAddress('irrelevant', const._TYPE_AAAA, const._CLASS_IN, 100, b'same') shortaaaarec = r.DNSAddress('irrelevant', const._TYPE_AAAA, const._CLASS_IN, 10, b'same') - rrset = DNSRRSet([[longarec, shortaaaarec]]) + rrset = DNSRRSet([longarec, shortaaaarec]) assert rrset.suppresses(longarec) assert rrset.suppresses(shortarec) @@ -404,7 +404,7 @@ def test_rrset_does_not_consider_ttl(): mediumarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 60, b'same') shortarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 10, b'same') - rrset2 = DNSRRSet([[mediumarec]]) + rrset2 = DNSRRSet([mediumarec]) assert not rrset2.suppresses(verylongarec) assert rrset2.suppresses(longarec) assert rrset2.suppresses(mediumarec) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 6266ad91..11b58292 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1425,8 +1425,8 @@ async def test_response_aggregation_timings(run_isolated): outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) zc.record_manager.async_updates_from_response(incoming) - assert info.dns_pointer() in incoming.answers - assert info2.dns_pointer() in incoming.answers + assert info.dns_pointer() in incoming.answers() + assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() protocol.datagram_received(query3.packets()[0], ('127.0.0.1', const._MDNS_PORT)) @@ -1439,7 +1439,7 @@ async def test_response_aggregation_timings(run_isolated): outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) zc.record_manager.async_updates_from_response(incoming) - assert info3.dns_pointer() in incoming.answers + assert info3.dns_pointer() in incoming.answers() send_mock.reset_mock() # Because the response was sent in the last second we need to make @@ -1461,7 +1461,7 @@ async def test_response_aggregation_timings(run_isolated): assert len(calls) == 1 outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) - assert info.dns_pointer() in incoming.answers + assert info.dns_pointer() in incoming.answers() await aiozc.async_close() @@ -1501,7 +1501,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) zc.record_manager.async_updates_from_response(incoming) - assert info2.dns_pointer() in incoming.answers + assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) @@ -1511,7 +1511,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) zc.record_manager.async_updates_from_response(incoming) - assert info2.dns_pointer() in incoming.answers + assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) @@ -1534,7 +1534,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli outgoing = send_mock.call_args[0][0] incoming = r.DNSIncoming(outgoing.packets()[0]) zc.record_manager.async_updates_from_response(incoming) - assert info2.dns_pointer() in incoming.answers + assert info2.dns_pointer() in incoming.answers() @pytest.mark.asyncio diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 79f32755..a8593850 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -63,7 +63,7 @@ def test_parse_own_packet_nsec(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time(answer, 0) parsed = r.DNSIncoming(generated.packets()[0]) - assert answer in parsed.answers + assert answer in parsed.answers() # Types > 255 should be ignored answer_invalid_types = r.DNSNsec( @@ -77,7 +77,7 @@ def test_parse_own_packet_nsec(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time(answer_invalid_types, 0) parsed = r.DNSIncoming(generated.packets()[0]) - assert answer in parsed.answers + assert answer in parsed.answers() def test_parse_own_packet_response(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -96,7 +96,7 @@ def test_parse_own_packet_response(self): ) parsed = r.DNSIncoming(generated.packets()[0]) assert len(generated.answers) == 1 - assert len(generated.answers) == len(parsed.answers) + assert len(generated.answers) == len(parsed.answers()) def test_adding_empty_answer(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -119,7 +119,7 @@ def test_adding_empty_answer(self): ) parsed = r.DNSIncoming(generated.packets()[0]) assert len(generated.answers) == 1 - assert len(generated.answers) == len(parsed.answers) + assert len(generated.answers) == len(parsed.answers()) def test_adding_expired_answer(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -138,7 +138,7 @@ def test_adding_expired_answer(self): ) parsed = r.DNSIncoming(generated.packets()[0]) assert len(generated.answers) == 0 - assert len(generated.answers) == len(parsed.answers) + assert len(generated.answers) == len(parsed.answers()) def test_match_question(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -221,7 +221,7 @@ def test_dns_hinfo(self): generated = r.DNSOutgoing(0) generated.add_additional_answer(DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu', 'os')) parsed = r.DNSIncoming(generated.packets()[0]) - answer = cast(r.DNSHinfo, parsed.answers[0]) + answer = cast(r.DNSHinfo, parsed.answers()[0]) assert answer.cpu == 'cpu' assert answer.os == 'os' @@ -276,15 +276,15 @@ def test_many_questions_with_many_known_answers(self): parsed1 = r.DNSIncoming(packets[0]) assert len(parsed1.questions) == 30 - assert len(parsed1.answers) == 88 + assert len(parsed1.answers()) == 88 assert parsed1.truncated parsed2 = r.DNSIncoming(packets[1]) assert len(parsed2.questions) == 0 - assert len(parsed2.answers) == 101 + assert len(parsed2.answers()) == 101 assert parsed2.truncated parsed3 = r.DNSIncoming(packets[2]) assert len(parsed3.questions) == 0 - assert len(parsed3.answers) == 11 + assert len(parsed3.answers()) == 11 assert not parsed3.truncated def test_massive_probe_packet_split(self): @@ -375,7 +375,7 @@ def test_only_one_answer_can_by_large(self): for packet in packets: parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 1 + assert len(parsed.answers()) == 1 def test_questions_do_not_end_up_every_packet(self): """Test that questions are not sent again when multiple packets are needed. @@ -413,11 +413,11 @@ def test_questions_do_not_end_up_every_packet(self): parsed1 = r.DNSIncoming(packets[0]) assert len(parsed1.questions) == 35 - assert len(parsed1.answers) == 33 + assert len(parsed1.answers()) == 33 parsed2 = r.DNSIncoming(packets[1]) assert len(parsed2.questions) == 0 - assert len(parsed2.answers) == 2 + assert len(parsed2.answers()) == 2 class PacketForm(unittest.TestCase): @@ -482,7 +482,7 @@ def test_incoming_unknown_type(self): generated.add_additional_answer(answer) packet = generated.packets()[0] parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 0 + assert len(parsed.answers()) == 0 assert parsed.is_query() != parsed.is_response() def test_incoming_circular_reference(self): @@ -505,7 +505,7 @@ def test_incoming_ipv6(self): generated.add_additional_answer(answer) packet = generated.packets()[0] parsed = r.DNSIncoming(packet) - record = parsed.answers[0] + record = parsed.answers()[0] assert isinstance(record, r.DNSAddress) assert record.address == packed @@ -662,7 +662,7 @@ def test_dns_compression_rollback_for_corruption(): incoming = r.DNSIncoming(packet) assert incoming.valid is True assert ( - len(incoming.answers) + len(incoming.answers()) == incoming.num_answers + incoming.num_authorities + incoming.num_additionals ) @@ -767,7 +767,7 @@ def test_parse_packet_with_nsec_record(): b"\x00\x00\x80\x00@" ) parsed = DNSIncoming(nsec_packet) - nsec_record = cast(r.DNSNsec, parsed.answers[3]) + nsec_record = cast(r.DNSNsec, parsed.answers()[3]) assert "nsec," in str(nsec_record) assert nsec_record.rdtypes == [16, 33] assert nsec_record.next_name == "MyHome54 (2)._meshcop._udp.local." @@ -794,8 +794,8 @@ def test_records_same_packet_share_fate(): for packet in out.packets(): dnsin = DNSIncoming(packet) - first_time = dnsin.answers[0].created - for answer in dnsin.answers: + first_time = dnsin.answers()[0].created + for answer in dnsin.answers(): assert answer.created == first_time @@ -828,7 +828,7 @@ def test_dns_compression_all_invalid(caplog): ) parsed = r.DNSIncoming(packet, ("2.4.5.4", 5353)) assert len(parsed.questions) == 0 - assert len(parsed.answers) == 0 + assert len(parsed.answers()) == 0 assert " Unable to parse; skipping record" in caplog.text @@ -845,7 +845,7 @@ def test_invalid_next_name_ignored(): ) parsed = r.DNSIncoming(packet) assert len(parsed.questions) == 1 - assert len(parsed.answers) == 2 + assert len(parsed.answers()) == 2 def test_dns_compression_invalid_skips_record(): @@ -868,7 +868,7 @@ def test_dns_compression_invalid_skips_record(): 'eufy HomeBase2-2464._hap._tcp.local.', [const._TYPE_TXT, const._TYPE_SRV], ) - assert answer in parsed.answers + assert answer in parsed.answers() def test_dns_compression_points_forward(): @@ -893,7 +893,7 @@ def test_dns_compression_points_forward(): 'TV Beneden (2)._androidtvremote._tcp.local.', [const._TYPE_TXT, const._TYPE_SRV], ) - assert answer in parsed.answers + assert answer in parsed.answers() def test_dns_compression_points_to_itself(): @@ -904,7 +904,7 @@ def test_dns_compression_points_to_itself(): b"\x01\x00\x04\xc0\xa8\xd0\x06" ) parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 1 + assert len(parsed.answers()) == 1 def test_dns_compression_points_beyond_packet(): @@ -915,7 +915,7 @@ def test_dns_compression_points_beyond_packet(): b'\x00\x01\x00\x04\xc0\xa8\xd0\x06' ) parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 1 + assert len(parsed.answers()) == 1 def test_dns_compression_generic_failure(caplog): @@ -926,7 +926,7 @@ def test_dns_compression_generic_failure(caplog): b'\x00\x01\x00\x04\xc0\xa8\xd0\x06' ) parsed = r.DNSIncoming(packet, ("1.2.3.4", 5353)) - assert len(parsed.answers) == 1 + assert len(parsed.answers()) == 1 assert "Received invalid packet from ('1.2.3.4', 5353)" in caplog.text @@ -946,7 +946,7 @@ def test_label_length_attack(): b'\x01\x00\x04\xc0\xa8\xd0\x06' ) parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 0 + assert len(parsed.answers()) == 0 def test_label_compression_attack(): @@ -976,7 +976,7 @@ def test_label_compression_attack(): b'\x0c\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x06' ) parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 1 + assert len(parsed.answers()) == 1 def test_dns_compression_loop_attack(): @@ -993,7 +993,7 @@ def test_dns_compression_loop_attack(): b'\x04\xc0\xa8\xd0\x05' ) parsed = r.DNSIncoming(packet) - assert len(parsed.answers) == 0 + assert len(parsed.answers()) == 0 def test_txt_after_invalid_nsec_name_still_usable(): @@ -1013,7 +1013,7 @@ def test_txt_after_invalid_nsec_name_still_usable(): b'ce=0' ) parsed = r.DNSIncoming(packet) - txt_record = cast(r.DNSText, parsed.answers[4]) + txt_record = cast(r.DNSText, parsed.answers()[4]) # The NSEC record with the invalid name compression should be skipped assert txt_record.text == ( b'2info=/api/v1/players/RINCON_542A1BC9220E01400/info\x06vers=3\x10protovers' @@ -1022,4 +1022,4 @@ def test_txt_after_invalid_nsec_name_still_usable(): b'00/xml/device_description.xml\x0csslport=1443\x0ehhsslport=1843\tvarian' b't=2\x0emdnssequence=0' ) - assert len(parsed.answers) == 5 + assert len(parsed.answers()) == 5 From 6552f882ce47ea0cf190e80f313688b9c33475f5 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 25 Sep 2023 19:42:30 +0000 Subject: [PATCH 123/434] 0.114.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 50a79d9b..543004ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.114.0 (2023-09-25) + +### Feature + +* Speed up responding to queries ([#1275](https://github.com/python-zeroconf/python-zeroconf/issues/1275)) ([`3c6b18c`](https://github.com/python-zeroconf/python-zeroconf/commit/3c6b18cdf4c94773ad6f4497df98feb337939ee9)) + ## v0.113.0 (2023-09-24) ### Feature diff --git a/pyproject.toml b/pyproject.toml index b8f4d7de..11e4b4bd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.113.0" +version = "0.114.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index f9e6799f..efd9ef3a 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.113.0' +__version__ = '0.114.0' __license__ = 'LGPL' From a13fd49d77474fd5858de809e48cbab1ccf89173 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 26 Sep 2023 09:08:07 -0500 Subject: [PATCH 124/434] feat: speed up outgoing multicast queue (#1277) --- build_ext.py | 1 + src/zeroconf/_handlers/answers.pxd | 6 ++--- .../_handlers/multicast_outgoing_queue.pxd | 25 ++++++++++++++++++ .../_handlers/multicast_outgoing_queue.py | 26 +++++++++++++------ 4 files changed, 47 insertions(+), 11 deletions(-) create mode 100644 src/zeroconf/_handlers/multicast_outgoing_queue.pxd diff --git a/build_ext.py b/build_ext.py index 870c8058..c431d748 100644 --- a/build_ext.py +++ b/build_ext.py @@ -31,6 +31,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_protocol/outgoing.py", "src/zeroconf/_handlers/answers.py", "src/zeroconf/_handlers/record_manager.py", + "src/zeroconf/_handlers/multicast_outgoing_queue.py", "src/zeroconf/_handlers/query_handler.py", "src/zeroconf/_services/browser.py", "src/zeroconf/_services/info.py", diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index 6a0f0e3d..7efc45c7 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -15,9 +15,9 @@ cdef class QuestionAnswers: cdef class AnswerGroup: - cdef public object send_after - cdef public object send_before - cdef public object answers + cdef public float send_after + cdef public float send_before + cdef public cython.dict answers diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd new file mode 100644 index 00000000..ff01ce54 --- /dev/null +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd @@ -0,0 +1,25 @@ + +import cython + +from .._utils.time cimport current_time_millis, millis_to_seconds +from .answers cimport AnswerGroup, construct_outgoing_multicast_answers + + +cdef object TYPE_CHECKING +cdef tuple MULTICAST_DELAY_RANDOM_INTERVAL +cdef object RAND_INT + +cdef class MulticastOutgoingQueue: + + cdef object zc + cdef object queue + cdef cython.uint additional_delay + cdef cython.uint aggregation_delay + + @cython.locals(last_group=AnswerGroup, random_int=cython.uint, random_delay=float, send_after=float, send_before=float) + cpdef async_add(self, float now, cython.dict answers) + + @cython.locals(pending=AnswerGroup) + cdef _remove_answers_from_queue(self, cython.dict answers) + + cpdef async_ready(self) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index 0e469d28..d45940fb 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -32,9 +32,13 @@ construct_outgoing_multicast_answers, ) +RAND_INT = random.randint + if TYPE_CHECKING: from .._core import Zeroconf +_float = float + class MulticastOutgoingQueue: """An outgoing queue used to aggregate multicast responses.""" @@ -50,10 +54,13 @@ def __init__(self, zeroconf: 'Zeroconf', additional_delay: int, max_aggregation_ self.additional_delay = additional_delay self.aggregation_delay = max_aggregation_delay - def async_add(self, now: float, answers: _AnswerWithAdditionalsType) -> None: + def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: """Add a group of answers with additionals to the outgoing queue.""" - assert self.zc.loop is not None - random_delay = random.randint(*MULTICAST_DELAY_RANDOM_INTERVAL) + self.additional_delay + loop = self.zc.loop + if TYPE_CHECKING: + assert loop is not None + random_int = RAND_INT(*MULTICAST_DELAY_RANDOM_INTERVAL) + random_delay = random_int + self.additional_delay send_after = now + random_delay send_before = now + self.aggregation_delay + self.additional_delay if len(self.queue): @@ -66,7 +73,7 @@ def async_add(self, now: float, answers: _AnswerWithAdditionalsType) -> None: last_group.answers.update(answers) return else: - self.zc.loop.call_later(millis_to_seconds(random_delay), self.async_ready) + loop.call_at(loop.time() + millis_to_seconds(random_delay), self.async_ready) self.queue.append(AnswerGroup(send_after, send_before, answers)) def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> None: @@ -77,13 +84,16 @@ def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> Non def async_ready(self) -> None: """Process anything in the queue that is ready.""" - assert self.zc.loop is not None + zc = self.zc + loop = zc.loop + if TYPE_CHECKING: + assert loop is not None now = current_time_millis() if len(self.queue) > 1 and self.queue[0].send_before > now: # There is more than one answer in the queue, # delay until we have to send it (first answer group reaches send_before) - self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_before - now), self.async_ready) + loop.call_at(loop.time() + millis_to_seconds(self.queue[0].send_before - now), self.async_ready) return answers: _AnswerWithAdditionalsType = {} @@ -94,9 +104,9 @@ def async_ready(self) -> None: if len(self.queue): # If there are still groups in the queue that are not ready to send # be sure we schedule them to go out later - self.zc.loop.call_later(millis_to_seconds(self.queue[0].send_after - now), self.async_ready) + loop.call_at(loop.time() + millis_to_seconds(self.queue[0].send_after - now), self.async_ready) if answers: # If we have the same answer scheduled to go out, remove them self._remove_answers_from_queue(answers) - self.zc.async_send(construct_outgoing_multicast_answers(answers)) + zc.async_send(construct_outgoing_multicast_answers(answers)) From ed84067bf8ea97a7a11ebef9077f2001edd6e7e8 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 26 Sep 2023 14:16:57 +0000 Subject: [PATCH 125/434] 0.115.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 543004ab..9fa9d670 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.115.0 (2023-09-26) + +### Feature + +* Speed up outgoing multicast queue ([#1277](https://github.com/python-zeroconf/python-zeroconf/issues/1277)) ([`a13fd49`](https://github.com/python-zeroconf/python-zeroconf/commit/a13fd49d77474fd5858de809e48cbab1ccf89173)) + ## v0.114.0 (2023-09-25) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 11e4b4bd..b9fcc3c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.114.0" +version = "0.115.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index efd9ef3a..79320bb9 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.114.0' +__version__ = '0.115.0' __license__ = 'LGPL' From 52ee02b16860e344c402124f4b2e2869536ec839 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Oct 2023 09:33:21 +0100 Subject: [PATCH 126/434] fix: add missing python definition for addresses_by_version (#1278) --- src/zeroconf/_services/info.pxd | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index de7eb97b..22388316 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -87,7 +87,9 @@ cdef class ServiceInfo(RecordUpdateListener): cdef cython.list _ip_addresses_by_version_value(self, object version_value) - cdef addresses_by_version(self, object version) + cpdef addresses_by_version(self, object version) + + cpdef ip_addresses_by_version(self, object version) @cython.locals(cacheable=cython.bint) cdef cython.list _dns_addresses(self, object override_ttls, object version) From b0fa5ca5620f28fa92b8a900256811d4b86eec4d Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 1 Oct 2023 08:41:38 +0000 Subject: [PATCH 127/434] 0.115.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9fa9d670..c46fd4c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.115.1 (2023-10-01) + +### Fix + +* Add missing python definition for addresses_by_version ([#1278](https://github.com/python-zeroconf/python-zeroconf/issues/1278)) ([`52ee02b`](https://github.com/python-zeroconf/python-zeroconf/commit/52ee02b16860e344c402124f4b2e2869536ec839)) + ## v0.115.0 (2023-09-26) ### Feature diff --git a/pyproject.toml b/pyproject.toml index b9fcc3c3..989661bc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.115.0" +version = "0.115.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 79320bb9..7962e878 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.115.0' +__version__ = '0.115.1' __license__ = 'LGPL' From 2060eb2cc43489c34bea08924c3f40b875d5a498 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Oct 2023 10:59:13 -0500 Subject: [PATCH 128/434] fix: ensure ServiceInfo cache is cleared when adding to the registry (#1279) * There were production use cases that mutated the service info and re-registered it that need to be accounted for --- src/zeroconf/_services/info.pxd | 2 ++ src/zeroconf/_services/info.py | 8 ++++++++ src/zeroconf/_services/registry.py | 1 + tests/test_asyncio.py | 7 +++++++ 4 files changed, 18 insertions(+) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 22388316..dcfc3a8f 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -107,3 +107,5 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(cacheable=cython.bint) cdef cython.set _get_address_and_nsec_records(self, object override_ttl) + + cpdef async_clear_cache(self) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 0600d5d3..ee033c82 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -273,6 +273,14 @@ def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: assert self._properties is not None return self._properties + def async_clear_cache(self) -> None: + """Clear the cache for this service info.""" + self._dns_address_cache = None + self._dns_pointer_cache = None + self._dns_service_cache = None + self._dns_text_cache = None + self._get_address_and_nsec_records_cache = None + async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: """Calling task waits for a given number of milliseconds or until notified.""" if not self._new_records_futures: diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 12051275..e9dc4a62 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -91,6 +91,7 @@ def _add(self, info: ServiceInfo) -> None: if info.key in self._services: raise ServiceNameAlreadyRegistered + info.async_clear_cache() self._services[info.key] = info self.types.setdefault(info.type.lower(), []).append(info.key) self.servers.setdefault(info.server_key, []).append(info.key) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index d77e7e83..25dd4681 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -171,6 +171,12 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ) task = await aiozc.async_update_service(new_info) await task + assert new_info.dns_service().server_key == "ash-2.local." + new_info.server = "ash-3.local." + task = await aiozc.async_update_service(new_info) + await task + assert new_info.dns_service().server_key == "ash-3.local." + task = await aiozc.async_unregister_service(new_info) await task await aiozc.async_close() @@ -178,6 +184,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: assert calls == [ ('add', type_, registration_name), ('update', type_, registration_name), + ('update', type_, registration_name), ('remove', type_, registration_name), ] From fc154878027db9f7a910b565b2e826d9270f1df7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 5 Oct 2023 11:54:37 -0500 Subject: [PATCH 129/434] chore: re-trigger release (#1280) From 8f2fe22be891bba926cc19856a20d797f9e8263f Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 5 Oct 2023 17:04:05 +0000 Subject: [PATCH 130/434] 0.115.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c46fd4c4..0f4a7e33 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.115.2 (2023-10-05) + +### Fix + +* Ensure ServiceInfo cache is cleared when adding to the registry ([#1279](https://github.com/python-zeroconf/python-zeroconf/issues/1279)) ([`2060eb2`](https://github.com/python-zeroconf/python-zeroconf/commit/2060eb2cc43489c34bea08924c3f40b875d5a498)) + ## v0.115.1 (2023-10-01) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 989661bc..79259eaf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.115.1" +version = "0.115.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 7962e878..cfe0d840 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.115.1' +__version__ = '0.115.2' __license__ = 'LGPL' From 0677ce9b4a0524ae922a04b4b107215d5759d838 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Oct 2023 09:32:10 -1000 Subject: [PATCH 131/434] chore: bump py3.12 version in the ci (#1276) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d49d30e6..e7ac69f6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -41,7 +41,7 @@ jobs: - "3.9" - "3.10" - "3.11" - - "3.12.0-rc.2" + - "3.12" - "pypy-3.7" os: - ubuntu-latest From 8f300996e5bd4316b2237f0502791dd0d6a855fe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 12 Oct 2023 15:17:30 -1000 Subject: [PATCH 132/434] feat: reduce type checking overhead at run time (#1281) --- src/zeroconf/_handlers/multicast_outgoing_queue.pxd | 2 +- src/zeroconf/_handlers/query_handler.pxd | 2 +- src/zeroconf/_handlers/record_manager.pxd | 2 +- src/zeroconf/_listener.pxd | 2 +- src/zeroconf/_protocol/outgoing.pxd | 2 +- src/zeroconf/_services/browser.pxd | 2 +- src/zeroconf/_services/info.pxd | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd index ff01ce54..244c07f8 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd @@ -5,7 +5,7 @@ from .._utils.time cimport current_time_millis, millis_to_seconds from .answers cimport AnswerGroup, construct_outgoing_multicast_answers -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef tuple MULTICAST_DELAY_RANDOM_INTERVAL cdef object RAND_INT diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 365e3a27..a1a4f8a6 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -10,7 +10,7 @@ from .._services.registry cimport ServiceRegistry from .answers cimport QuestionAnswers -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef cython.uint _ONE_SECOND, _TYPE_PTR, _TYPE_ANY, _TYPE_A, _TYPE_AAAA, _TYPE_SRV, _TYPE_TXT cdef str _SERVICE_TYPE_ENUMERATION_NAME cdef cython.set _RESPOND_IMMEDIATE_TYPES diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index e0792d72..89ad5484 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -9,7 +9,7 @@ from .._protocol.incoming cimport DNSIncoming cdef cython.float _DNS_PTR_MIN_TTL cdef object _ADDRESS_RECORD_TYPES cdef object RecordUpdate -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef object _TYPE_PTR diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 4e4144c7..a49fe96a 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -8,7 +8,7 @@ from ._utils.time cimport current_time_millis, millis_to_seconds cdef object log cdef object logging_DEBUG -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _DUPLICATE_PACKET_SUPPRESSION_INTERVAL diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 1c4d6af7..2374f8b3 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -15,7 +15,7 @@ cdef cython.uint _FLAGS_TC cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _MAX_MSG_TYPICAL -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef object PACK_BYTE cdef object PACK_SHORT diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 1006ee3c..a095d6eb 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -7,7 +7,7 @@ from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis, millis_to_seconds -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef object cached_possible_types cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index dcfc3a8f..2e516a9e 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -30,7 +30,7 @@ cdef object _IPVersion_V4Only_value cdef cython.set _ADDRESS_RECORD_TYPES -cdef object TYPE_CHECKING +cdef bint TYPE_CHECKING cdef class ServiceInfo(RecordUpdateListener): From 68be89c2eb679c2fa8531942d063379d6219c19f Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 13 Oct 2023 01:26:47 +0000 Subject: [PATCH 133/434] 0.116.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f4a7e33..65e4edbc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.116.0 (2023-10-13) + +### Feature + +* Reduce type checking overhead at run time ([#1281](https://github.com/python-zeroconf/python-zeroconf/issues/1281)) ([`8f30099`](https://github.com/python-zeroconf/python-zeroconf/commit/8f300996e5bd4316b2237f0502791dd0d6a855fe)) + ## v0.115.2 (2023-10-05) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 79259eaf..6920498f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.115.2" +version = "0.116.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index cfe0d840..670b88ca 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.115.2' +__version__ = '0.116.0' __license__ = 'LGPL' From 4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 14 Oct 2023 09:29:50 -1000 Subject: [PATCH 134/434] feat: small cleanups to incoming data handlers (#1282) --- src/zeroconf/_dns.pxd | 2 +- src/zeroconf/_handlers/query_handler.pxd | 4 ++-- src/zeroconf/_handlers/query_handler.py | 6 ++++-- src/zeroconf/_handlers/record_manager.pxd | 13 ++++++++++--- src/zeroconf/_handlers/record_manager.py | 9 ++++----- 5 files changed, 21 insertions(+), 13 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index ccdcc34f..6785d1a3 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -24,7 +24,7 @@ cdef class DNSEntry: cdef public str key cdef public str name - cdef public object type + cdef public cython.uint type cdef public object class_ cdef public object unique diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index a1a4f8a6..ff970d76 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -15,7 +15,7 @@ cdef cython.uint _ONE_SECOND, _TYPE_PTR, _TYPE_ANY, _TYPE_A, _TYPE_AAAA, _TYPE_S cdef str _SERVICE_TYPE_ENUMERATION_NAME cdef cython.set _RESPOND_IMMEDIATE_TYPES cdef cython.set _ADDRESS_RECORD_TYPES -cdef object IPVersion +cdef object IPVersion, _IPVersion_ALL cdef object _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL cdef class _QueryResponse: @@ -59,7 +59,7 @@ cdef class QueryHandler: cdef _add_pointer_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers) @cython.locals(service=ServiceInfo, dns_address=DNSAddress) - cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, object type_) + cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) cdef cython.dict _answer_question(self, DNSQuestion question, DNSRRSet known_answers) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 776d6a3f..cab11662 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -47,6 +47,8 @@ _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} +_IPVersion_ALL = IPVersion.All + _int = int @@ -202,7 +204,7 @@ def _add_address_answers( answers: List[DNSAddress] = [] additionals: Set[DNSRecord] = set() seen_types: Set[int] = set() - for dns_address in service._dns_addresses(None, IPVersion.All): + for dns_address in service._dns_addresses(None, _IPVersion_ALL): seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) @@ -269,7 +271,7 @@ def async_response( # pylint: disable=unused-argument questions = msg.questions now = msg.now for msg in msgs: - if not msg.is_probe(): + if msg.is_probe() is False: answers.extend(msg.answers()) else: is_probe = True diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 89ad5484..8775108b 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -2,11 +2,14 @@ import cython from .._cache cimport DNSCache -from .._dns cimport DNSRecord +from .._dns cimport DNSQuestion, DNSRecord from .._protocol.incoming cimport DNSIncoming +from .._updates cimport RecordUpdateListener +from .._utils.time cimport current_time_millis cdef cython.float _DNS_PTR_MIN_TTL +cdef cython.uint _TYPE_PTR cdef object _ADDRESS_RECORD_TYPES cdef object RecordUpdate cdef bint TYPE_CHECKING @@ -26,11 +29,15 @@ cdef class RecordManager: @cython.locals( cache=DNSCache, record=DNSRecord, + answers=cython.list, maybe_entry=DNSRecord, now_float=cython.float ) cpdef async_updates_from_response(self, DNSIncoming msg) - cpdef async_add_listener(self, object listener, object question) + cpdef async_add_listener(self, RecordUpdateListener listener, object question) - cpdef async_remove_listener(self, object listener) + cpdef async_remove_listener(self, RecordUpdateListener listener) + + @cython.locals(question=DNSQuestion, record=DNSRecord) + cdef _async_update_matching_records(self, RecordUpdateListener listener, cython.list questions) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 63572c1e..6fb11f55 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -106,14 +106,14 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: ) record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) - if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 + if record.unique is True: # https://tools.ietf.org/html/rfc6762#section-10.2 unique_types.add((record.name, record_type, record.class_)) if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = cache.async_get_unique(record) - if not record.is_expired(now_float): + if record.is_expired(now_float) is False: if maybe_entry is not None: maybe_entry.reset_ttl(record) else: @@ -129,7 +129,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now_float) if updates: self.async_updates(now, updates) @@ -151,7 +151,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: new = False if other_adds or address_adds: new = cache.async_add_records(address_adds) - if cache.async_add_records(other_adds): + if cache.async_add_records(other_adds) is True: new = True # Removes are processed last since # ServiceInfo could generate an un-needed query @@ -182,7 +182,6 @@ def async_add_listener( return questions = [question] if isinstance(question, DNSQuestion) else question - assert self.zc.loop is not None self._async_update_matching_records(listener, questions) def _async_update_matching_records( From 29d694a4ac27552805134b7081c53ddf6b16f1e6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 14 Oct 2023 19:38:25 +0000 Subject: [PATCH 135/434] 0.117.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65e4edbc..8a4e31d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.117.0 (2023-10-14) + +### Feature + +* Small cleanups to incoming data handlers ([#1282](https://github.com/python-zeroconf/python-zeroconf/issues/1282)) ([`4f4bd9f`](https://github.com/python-zeroconf/python-zeroconf/commit/4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9)) + ## v0.116.0 (2023-10-13) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 6920498f..5cbd4967 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.116.0" +version = "0.117.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 670b88ca..c1aed845 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.116.0' +__version__ = '0.117.0' __license__ = 'LGPL' From 0fc031b1e7bf1766d5a1d39d70d300b86e36715e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 14 Oct 2023 09:41:44 -1000 Subject: [PATCH 136/434] feat: small improvements to ServiceBrowser performance (#1283) --- src/zeroconf/_services/browser.pxd | 3 ++- src/zeroconf/_services/browser.py | 16 +++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index a095d6eb..8b77c80e 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -10,6 +10,7 @@ from .._utils.time cimport current_time_millis, millis_to_seconds cdef bint TYPE_CHECKING cdef object cached_possible_types cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT +cdef cython.uint _TYPE_PTR cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED cdef class _DNSPointerOutgoingBucket: @@ -58,7 +59,7 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _enqueue_callback(self, object state_change, object type_, object name) - @cython.locals(record=DNSRecord, cache=DNSCache, service=DNSRecord) + @cython.locals(record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) cpdef async_update_records(self, object zc, cython.float now, cython.list records) cpdef _names_matching_types(self, object types) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index cb611d1a..c302de54 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -404,24 +404,26 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record This method will be run in the event loop. """ for record_update in records: - record, old_record = record_update + record = record_update[0] + old_record = record_update[1] record_type = record.type if record_type is _TYPE_PTR: if TYPE_CHECKING: record = cast(DNSPointer, record) - for type_ in self.types.intersection(cached_possible_types(record.name)): + pointer = record + for type_ in self.types.intersection(cached_possible_types(pointer.name)): if old_record is None: - self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, record.alias) - elif record.is_expired(now): - self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, record.alias) + self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias) + elif pointer.is_expired(now): + self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias) else: - expire_time = record.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) + expire_time = pointer.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) self.reschedule_type(type_, now, expire_time) continue # If its expired or already exists in the cache it cannot be updated. - if old_record or record.is_expired(now): + if old_record or record.is_expired(now) is True: continue if record_type in _ADDRESS_RECORD_TYPES: From e3ce4559590f858755058cb2512d0e0f17d457be Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 14 Oct 2023 19:50:44 +0000 Subject: [PATCH 137/434] 0.118.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a4e31d7..b48414cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.118.0 (2023-10-14) + +### Feature + +* Small improvements to ServiceBrowser performance ([#1283](https://github.com/python-zeroconf/python-zeroconf/issues/1283)) ([`0fc031b`](https://github.com/python-zeroconf/python-zeroconf/commit/0fc031b1e7bf1766d5a1d39d70d300b86e36715e)) + ## v0.117.0 (2023-10-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 5cbd4967..3e36eb1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.117.0" +version = "0.118.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index c1aed845..7615a44b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.117.0' +__version__ = '0.118.0' __license__ = 'LGPL' From b6afa4b2775a1fdb090145eccdc5711c98e7147a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 17 Oct 2023 21:01:21 -1000 Subject: [PATCH 138/434] fix: reduce size of wheels by excluding generated .c files (#1284) --- MANIFEST.in | 1 + build_ext.py | 1 + 2 files changed, 2 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 9491f804..f8eef337 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include README.rst include COPYING +global-exclude *.c diff --git a/build_ext.py b/build_ext.py index c431d748..2134f61d 100644 --- a/build_ext.py +++ b/build_ext.py @@ -44,6 +44,7 @@ def build(setup_kwargs: Any) -> None: cmdclass=dict(build_ext=BuildExt), ) ) + setup_kwargs["exclude_package_data"] = {pkg: ["*.c"] for pkg in setup_kwargs["packages"]} except Exception: if os.environ.get("REQUIRE_CYTHON"): raise From 1514712a97a6411eff43ecfb423d0b23ec11fc34 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 18 Oct 2023 07:10:47 +0000 Subject: [PATCH 139/434] 0.118.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b48414cd..5e881fb9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.118.1 (2023-10-18) + +### Fix + +* Reduce size of wheels by excluding generated .c files ([#1284](https://github.com/python-zeroconf/python-zeroconf/issues/1284)) ([`b6afa4b`](https://github.com/python-zeroconf/python-zeroconf/commit/b6afa4b2775a1fdb090145eccdc5711c98e7147a)) + ## v0.118.0 (2023-10-14) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 3e36eb1f..48836351 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.118.0" +version = "0.118.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 7615a44b..ebd4a637 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.118.0' +__version__ = '0.118.1' __license__ = 'LGPL' From e8c9083bb118764a85b12fac9055152a2f62a212 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 17 Oct 2023 21:45:10 -1000 Subject: [PATCH 140/434] feat: update cibuildwheel to build wheels on latest cython final release (#1285) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e7ac69f6..cfdb5adb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -145,7 +145,7 @@ jobs: fetch-depth: 0 - name: Build wheels - uses: pypa/cibuildwheel@v2.15.0 + uses: pypa/cibuildwheel@v2.16.2 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* From 461e6c1504186e3e4c6399b4d903351e0f17ff33 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 18 Oct 2023 07:56:59 +0000 Subject: [PATCH 141/434] 0.119.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5e881fb9..d91772e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.119.0 (2023-10-18) + +### Feature + +* Update cibuildwheel to build wheels on latest cython final release ([#1285](https://github.com/python-zeroconf/python-zeroconf/issues/1285)) ([`e8c9083`](https://github.com/python-zeroconf/python-zeroconf/commit/e8c9083bb118764a85b12fac9055152a2f62a212)) + ## v0.118.1 (2023-10-18) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 48836351..85367cc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.118.1" +version = "0.119.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ebd4a637..92aa9aea 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.118.1' +__version__ = '0.119.0' __license__ = 'LGPL' From bdcf286ce5bbb19922701a1ba436759507747e04 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 16:53:26 -0500 Subject: [PATCH 142/434] chore: fix test patching (#1292) --- src/zeroconf/_services/browser.pxd | 2 +- tests/conftest.py | 6 ++---- tests/services/test_browser.py | 8 ++++++-- tests/test_handlers.py | 4 ++++ 4 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 8b77c80e..3cce4977 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -23,7 +23,7 @@ cdef class _DNSPointerOutgoingBucket: @cython.locals(answer=DNSPointer) -cdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) +cpdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) cdef class QueryScheduler: diff --git a/tests/conftest.py b/tests/conftest.py index 5cdff18e..c0e926a3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,7 +8,7 @@ import pytest -from zeroconf import _core, _listener, const +from zeroconf import _core, const @pytest.fixture(autouse=True) @@ -34,7 +34,5 @@ def disable_duplicate_packet_suppression(): Some tests run too slowly because of the duplicate packet suppression. """ - with patch.object(_listener, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0), patch.object( - const, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0 - ): + with patch.object(const, "_DUPLICATE_PACKET_SUPPRESSION_INTERVAL", 0): yield diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index aa13761d..f6f3c345 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -26,7 +26,6 @@ current_time_millis, millis_to_seconds, ) -from zeroconf._handlers import record_manager from zeroconf._services import ServiceStateChange from zeroconf._services.browser import ServiceBrowser from zeroconf._services.info import ServiceInfo @@ -1159,7 +1158,6 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: zc.close() -@patch.object(record_manager, '_DNS_PTR_MIN_TTL', 1) @patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01) def test_service_browser_expire_callbacks(): """Test that the ServiceBrowser matching does not match partial names.""" @@ -1216,6 +1214,12 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), ) + # Force the ttl to be 1 second + now = current_time_millis() + for cache_record in zc.cache.cache.values(): + for record in cache_record: + record.set_created_ttl(now, 1) + time.sleep(0.3) info.port = 400 info._dns_service_cache = None # we are mutating the record so clear the cache diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 11b58292..a1c6ff5d 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1495,6 +1495,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli with unittest.mock.patch.object(aiozc.zeroconf, "async_send") as send_mock: send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression await asyncio.sleep(0.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1505,6 +1506,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression await asyncio.sleep(1.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1515,7 +1517,9 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression # The delay should increase with two packets and # 900ms is beyond the maximum aggregation delay # when there is no network protection delay From 1e1877adef729a73a639fd9b66c85816081763bd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 17:16:19 -0500 Subject: [PATCH 143/434] chore: fix cythonize of browser (#1293) --- src/zeroconf/_services/browser.pxd | 2 +- src/zeroconf/_services/browser.py | 29 +++++++++++++++++++++-------- tests/services/test_browser.py | 2 +- 3 files changed, 23 insertions(+), 10 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 3cce4977..8b77c80e 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -23,7 +23,7 @@ cdef class _DNSPointerOutgoingBucket: @cython.locals(answer=DNSPointer) -cpdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) +cdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) cdef class QueryScheduler: diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index c302de54..8503151a 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -25,6 +25,7 @@ import random import threading import warnings +from functools import partial from types import TracebackType # noqa # used in type hints from typing import ( TYPE_CHECKING, @@ -111,7 +112,7 @@ def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNS self.bytes += max_compressed_size -def _group_ptr_queries_with_known_answers( +def group_ptr_queries_with_known_answers( now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers ) -> List[DNSOutgoing]: """Aggregate queries so that as many known answers as possible fit in the same packet @@ -122,6 +123,13 @@ def _group_ptr_queries_with_known_answers( so we try to keep all the known answers in the same packet as the questions. """ + return _group_ptr_queries_with_known_answers(now, multicast, question_with_known_answers) + + +def _group_ptr_queries_with_known_answers( + now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers +) -> List[DNSOutgoing]: + """Inner wrapper for group_ptr_queries_with_known_answers.""" # This is the maximum size the query + known answers can be with name compression. # The actual size of the query + known answers may be a bit smaller since other # parts may be shared when the final DNSOutgoing packets are constructed. The @@ -187,6 +195,17 @@ def generate_service_query( return _group_ptr_queries_with_known_answers(now, multicast, questions_with_known_answers) +def _on_change_dispatcher( + listener: ServiceListener, + zeroconf: 'Zeroconf', + service_type: str, + name: str, + state_change: ServiceStateChange, +) -> None: + """Dispatch a service state change to a listener.""" + getattr(listener, _ON_CHANGE_DISPATCH[state_change])(zeroconf, service_type, name) + + def _service_state_changed_from_listener(listener: ServiceListener) -> Callable[..., None]: """Generate a service_state_changed handlers from a listener.""" assert listener is not None @@ -196,13 +215,7 @@ def _service_state_changed_from_listener(listener: ServiceListener) -> Callable[ "don't care about the updates), it'll become mandatory." % (listener,), FutureWarning, ) - - def on_change( - zeroconf: 'Zeroconf', service_type: str, name: str, state_change: ServiceStateChange - ) -> None: - getattr(listener, _ON_CHANGE_DISPATCH[state_change])(zeroconf, service_type, name) - - return on_change + return partial(_on_change_dispatcher, listener) class QueryScheduler: diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index f6f3c345..15a03598 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -965,7 +965,7 @@ def test_group_ptr_queries_with_known_answers(): ) for counter in range(i) } - outs = _services_browser._group_ptr_queries_with_known_answers(now, True, questions_with_known_answers) + outs = _services_browser.group_ptr_queries_with_known_answers(now, True, questions_with_known_answers) for out in outs: packets = out.packets() # If we generate multiple packets there must From eba2e31d30cc594530b196a42612ab72e5771944 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 17:16:41 -0500 Subject: [PATCH 144/434] chore: ensure ci use_cython fails if cythonize fails (#1294) --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfdb5adb..91ae876c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -74,6 +74,8 @@ jobs: run: poetry install --only=main,dev - name: Install Dependencies with cython if: ${{ matrix.extension != 'skip_cython' }} + env: + REQUIRE_CYTHON: 1 run: poetry install --only=main,dev - name: Test with Pytest run: poetry run pytest --durations=20 --timeout=60 -v --cov=zeroconf --cov-branch --cov-report xml --cov-report html --cov-report term-missing tests From 0060f798872d0dc079634f79e6d247247940decf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 17:52:53 -0500 Subject: [PATCH 145/434] chore: fix more failing ServiceBrowser tests (#1295) --- src/zeroconf/_services/browser.py | 4 ++++ tests/test_asyncio.py | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 8503151a..ed482546 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -269,6 +269,10 @@ def reschedule_type(self, type_: str_, next_time: float_) -> bool: self._next_time[type_] = next_time return True + def _force_reschedule_type(self, type_: str_, next_time: float_) -> None: + """Force a reschedule of a type.""" + self._next_time[type_] = next_time + def process_ready_types(self, now: float_) -> List[str]: """Generate a list of ready types that is due and schedule the next time.""" if self.millis_to_wait(now): diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 25dd4681..53bce4b4 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1055,7 +1055,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): now = _new_current_time_millis() # Force the next query to be sent since we are testing # to see if the query contains answers and not the scheduler - browser.query_scheduler._next_time[type_] = now + (1000 * expected_ttl) + browser.query_scheduler._force_reschedule_type(type_, now + (1000 * expected_ttl)) browser.reschedule_type(type_, now, now) sleep_count += 1 await asyncio.wait_for(got_query.wait(), 1) @@ -1350,7 +1350,7 @@ def _new_current_time_millis(): await asyncio.wait_for(service_added.wait(), 1) time_offset = 1000 * expected_ttl # set the time to the end of the ttl now = _new_current_time_millis() - browser.query_scheduler._next_time[type_] = now + (1000 * expected_ttl) + browser.query_scheduler._force_reschedule_type(type_, now + (1000 * expected_ttl)) # Make sure the query schedule is to a time in the future # so we will reschedule with patch.object( From 0b9d36f5587fd9e43992c97a08b0210effd434d4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 18:15:54 -0500 Subject: [PATCH 146/434] chore: fix handler tests (#1296) --- .../_handlers/multicast_outgoing_queue.pxd | 10 ++-- .../_handlers/multicast_outgoing_queue.py | 24 ++++++--- tests/test_handlers.py | 50 +++++++++++-------- 3 files changed, 52 insertions(+), 32 deletions(-) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd index 244c07f8..59a4fb2a 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd @@ -12,11 +12,13 @@ cdef object RAND_INT cdef class MulticastOutgoingQueue: cdef object zc - cdef object queue - cdef cython.uint additional_delay - cdef cython.uint aggregation_delay + cdef public object queue + cdef public object _multicast_delay_random_min + cdef public object _multicast_delay_random_max + cdef object _additional_delay + cdef object _aggregation_delay - @cython.locals(last_group=AnswerGroup, random_int=cython.uint, random_delay=float, send_after=float, send_before=float) + @cython.locals(last_group=AnswerGroup, random_int=cython.uint) cpdef async_add(self, float now, cython.dict answers) @cython.locals(pending=AnswerGroup) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index d45940fb..1d398d73 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -38,31 +38,41 @@ from .._core import Zeroconf _float = float +_int = int class MulticastOutgoingQueue: """An outgoing queue used to aggregate multicast responses.""" - __slots__ = ("zc", "queue", "additional_delay", "aggregation_delay") + __slots__ = ( + "zc", + "queue", + "_multicast_delay_random_min", + "_multicast_delay_random_max", + "_additional_delay", + "_aggregation_delay", + ) - def __init__(self, zeroconf: 'Zeroconf', additional_delay: int, max_aggregation_delay: int) -> None: + def __init__(self, zeroconf: 'Zeroconf', additional_delay: _int, max_aggregation_delay: _int) -> None: self.zc = zeroconf self.queue: deque[AnswerGroup] = deque() # Additional delay is used to implement # Protect the network against excessive packet flooding # https://datatracker.ietf.org/doc/html/rfc6762#section-14 - self.additional_delay = additional_delay - self.aggregation_delay = max_aggregation_delay + self._multicast_delay_random_min = MULTICAST_DELAY_RANDOM_INTERVAL[0] + self._multicast_delay_random_max = MULTICAST_DELAY_RANDOM_INTERVAL[1] + self._additional_delay = additional_delay + self._aggregation_delay = max_aggregation_delay def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: """Add a group of answers with additionals to the outgoing queue.""" loop = self.zc.loop if TYPE_CHECKING: assert loop is not None - random_int = RAND_INT(*MULTICAST_DELAY_RANDOM_INTERVAL) - random_delay = random_int + self.additional_delay + random_int = RAND_INT(self._multicast_delay_random_min, self._multicast_delay_random_max) + random_delay = random_int + self._additional_delay send_after = now + random_delay - send_before = now + self.aggregation_delay + self.additional_delay + send_before = now + self._aggregation_delay + self._additional_delay if len(self.queue): # If we calculate a random delay for the send after time # that is less than the last group scheduled to go out, diff --git a/tests/test_handlers.py b/tests/test_handlers.py index a1c6ff5d..13fe3a51 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -11,12 +11,12 @@ import unittest import unittest.mock from typing import List, cast +from unittest.mock import patch import pytest import zeroconf as r from zeroconf import ServiceInfo, Zeroconf, const, current_time_millis -from zeroconf._handlers import multicast_outgoing_queue from zeroconf._handlers.multicast_outgoing_queue import ( MulticastOutgoingQueue, construct_outgoing_multicast_answers, @@ -1413,7 +1413,7 @@ async def test_response_aggregation_timings(run_isolated): zc = aiozc.zeroconf protocol = zc.engine.protocols[0] - with unittest.mock.patch.object(aiozc.zeroconf, "async_send") as send_mock: + with patch.object(aiozc.zeroconf, "async_send") as send_mock: protocol.datagram_received(query.packets()[0], ('127.0.0.1', const._MDNS_PORT)) protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) protocol.datagram_received(query.packets()[0], ('127.0.0.1', const._MDNS_PORT)) @@ -1492,7 +1492,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli zc = aiozc.zeroconf protocol = zc.engine.protocols[0] - with unittest.mock.patch.object(aiozc.zeroconf, "async_send") as send_mock: + with patch.object(aiozc.zeroconf, "async_send") as send_mock: send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression @@ -1581,16 +1581,19 @@ async def test_response_aggregation_random_delay(): outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 500) now = current_time_millis() - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (500, 600)): - outgoing_queue.async_add(now, {info.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 500 + outgoing_queue._multicast_delay_random_max = 600 + outgoing_queue.async_add(now, {info.dns_pointer(): set()}) # The second group should always be coalesced into first group since it will always come before - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (300, 400)): - outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 300 + outgoing_queue._multicast_delay_random_max = 400 + outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) # The third group should always be coalesced into first group since it will always come before - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (100, 200)): - outgoing_queue.async_add(now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 100 + outgoing_queue._multicast_delay_random_max = 200 + outgoing_queue.async_add(now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 1 assert info.dns_pointer() in outgoing_queue.queue[0].answers @@ -1599,8 +1602,9 @@ async def test_response_aggregation_random_delay(): assert info4.dns_pointer() in outgoing_queue.queue[0].answers # The forth group should not be coalesced because its scheduled after the last group in the queue - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (700, 800)): - outgoing_queue.async_add(now, {info5.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 700 + outgoing_queue._multicast_delay_random_max = 800 + outgoing_queue.async_add(now, {info5.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 2 assert info.dns_pointer() not in outgoing_queue.queue[1].answers @@ -1630,21 +1634,22 @@ async def test_future_answers_are_removed_on_send(): outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 0) now = current_time_millis() - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (1, 1)): - outgoing_queue.async_add(now, {info.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 1 + outgoing_queue._multicast_delay_random_max = 1 + outgoing_queue.async_add(now, {info.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 1 - with unittest.mock.patch.object(multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (2, 2)): - outgoing_queue.async_add(now, {info.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 2 + outgoing_queue._multicast_delay_random_max = 2 + outgoing_queue.async_add(now, {info.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 2 - with unittest.mock.patch.object( - multicast_outgoing_queue, "MULTICAST_DELAY_RANDOM_INTERVAL", (1000, 1000) - ): - outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) - outgoing_queue.async_add(now, {info.dns_pointer(): set()}) + outgoing_queue._multicast_delay_random_min = 1000 + outgoing_queue._multicast_delay_random_max = 1000 + outgoing_queue.async_add(now, {info2.dns_pointer(): set()}) + outgoing_queue.async_add(now, {info.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 3 @@ -1676,6 +1681,9 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor zc.add_listener(MyListener(), None) # type: ignore[arg-type] await asyncio.sleep(0) # flush out any call soons - assert "listeners passed to async_add_listener must inherit from RecordUpdateListener" in caplog.text + assert ( + "listeners passed to async_add_listener must inherit from RecordUpdateListener" in caplog.text + or "TypeError: Argument \'listener\' has incorrect type" in caplog.text + ) await aiozc.async_close() From d467a65257b13a45dab8b6a37aab2fe1976ba103 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 18:40:25 -0500 Subject: [PATCH 147/434] chore: fix duplicate packet test with cython (#1298) --- src/zeroconf/_listener.pxd | 7 ++++-- src/zeroconf/_listener.py | 26 +++++++++++++++------ tests/test_listener.py | 47 +++++++++++++++++++++++++------------- 3 files changed, 55 insertions(+), 25 deletions(-) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index a49fe96a..3b1d6231 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -7,7 +7,7 @@ from ._utils.time cimport current_time_millis, millis_to_seconds cdef object log -cdef object logging_DEBUG +cdef object DEBUG_ENABLED cdef bint TYPE_CHECKING cdef cython.uint _MAX_MSG_ABSOLUTE @@ -27,7 +27,10 @@ cdef class AsyncListener: cdef public cython.dict _deferred cdef public cython.dict _timers - @cython.locals(now=cython.float, msg=DNSIncoming) + @cython.locals(now=cython.float, debug=cython.bint) cpdef datagram_received(self, cython.bytes bytes, cython.tuple addrs) + @cython.locals(msg=DNSIncoming) + cpdef _process_datagram_at_time(self, bint debug, cython.uint data_len, cython.float now, bytes data, cython.tuple addrs) + cdef _cancel_any_timers_for_addr(self, object addr) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 913c169f..c27d1b61 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -23,6 +23,7 @@ import asyncio import logging import random +from functools import partial from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union, cast from ._logger import QuietLogger, log @@ -40,8 +41,9 @@ _bytes = bytes _str = str _int = int +_float = float -logging_DEBUG = logging.DEBUG +DEBUG_ENABLED = partial(log.isEnabledFor, logging.DEBUG) class AsyncListener: @@ -80,9 +82,8 @@ def __init__(self, zc: 'Zeroconf') -> None: def datagram_received( self, data: _bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] ) -> None: - assert self.transport is not None data_len = len(data) - debug = log.isEnabledFor(logging_DEBUG) + debug = DEBUG_ENABLED() if data_len > _MAX_MSG_ABSOLUTE: # Guard against oversized packets to ensure bad implementations cannot overwhelm @@ -95,13 +96,22 @@ def datagram_received( _MAX_MSG_ABSOLUTE, ) return - now = current_time_millis() + self._process_datagram_at_time(debug, data_len, now, data, addrs) + + def _process_datagram_at_time( + self, + debug: bool, + data_len: _int, + now: _float, + data: _bytes, + addrs: Union[Tuple[str, int], Tuple[str, int, int, int]], + ) -> None: if ( self.data == data and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time and self.last_message is not None - and not self.last_message.has_qu_question() + and self.last_message.has_qu_question() is False ): # Guard against duplicate packets if debug: @@ -134,7 +144,7 @@ def datagram_received( self.data = data self.last_time = now self.last_message = msg - if msg.valid: + if msg.valid is True: if debug: log.debug( 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', @@ -157,10 +167,12 @@ def datagram_received( ) return - if not msg.is_query(): + if msg.is_query() is False: self._record_manager.async_updates_from_response(msg) return + if TYPE_CHECKING: + assert self.transport is not None self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) def handle_query_or_defer( diff --git a/tests/test_listener.py b/tests/test_listener.py index 914b4a13..dff01d78 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -160,62 +160,77 @@ def handle_query_or_defer( addrs = ("1.2.3.4", 43) - with patch.object(_listener, "current_time_millis") as _current_time_millis, patch.object( - listener, "handle_query_or_defer" - ) as _handle_query_or_defer: + with patch.object(listener, "handle_query_or_defer") as _handle_query_or_defer: start_time = current_time_millis() - _current_time_millis.return_value = start_time - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), start_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), start_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() - # Now walk time forward 1000 seconds - _current_time_millis.return_value = start_time + 1000 + # Now walk time forward 1100 milliseconds + new_time = start_time + 1100 # Now call with the same packet again and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question2, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question2), new_time, packet_with_qm_question2, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet and handle_query_or_defer should fire - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet with qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qu_question), new_time, packet_with_qu_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call again with the same packet that has a qu question and handle_query_or_defer should fire - listener.datagram_received(packet_with_qu_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qu_question), new_time, packet_with_qu_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() log.setLevel(logging.WARNING) # Call with the QM packet again - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the same packet again and handle_query_or_defer should not fire - listener.datagram_received(packet_with_qm_question, addrs) + listener._process_datagram_at_time( + False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + ) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() # Now call with garbage - listener.datagram_received(b'garbage', addrs) + listener._process_datagram_at_time(False, len(b'garbage'), new_time, b'garbage', addrs) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() From 630b9aa4b8d50cf9527d66f5cced90887d98a7e3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 18:40:40 -0500 Subject: [PATCH 148/434] chore: fix test_dns_record_abc with cython (#1297) --- tests/test_dns.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/test_dns.py b/tests/test_dns.py index 08f805f0..4f7e0543 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -10,6 +10,8 @@ import unittest import unittest.mock +import pytest + import zeroconf as r from zeroconf import DNSHinfo, DNSText, ServiceInfo, const, current_time_millis from zeroconf._dns import DNSRRSet @@ -80,7 +82,8 @@ def test_dns_service_repr(self): def test_dns_record_abc(self): record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) self.assertRaises(r.AbstractMethodException, record.__eq__, record) - self.assertRaises(r.AbstractMethodException, record.write, None) + with pytest.raises((r.AbstractMethodException, TypeError)): + record.write(None) # type: ignore[arg-type] def test_dns_record_reset_ttl(self): record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) From edb4d0d0f57de09483291fae99e5c37eadd9bd88 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 18:54:46 -0500 Subject: [PATCH 149/434] chore: fix ServiceBrowser backoff test under cython (#1299) --- tests/services/test_browser.py | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 15a03598..268a9b20 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -487,7 +487,7 @@ def test_backoff(): start_time = time.monotonic() * 1000 initial_query_interval = _services_browser._BROWSER_TIME / 1000 - def current_time_millis(): + def _current_time_millis(): """Current system time in milliseconds""" return start_time + time_offset * 1000 @@ -496,19 +496,34 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): got_query.set() old_send(out, addr=addr, port=port, v6_flow_scope=v6_flow_scope) + class ServiceBrowserWithPatchedTime(_services_browser.ServiceBrowser): + def _async_start(self) -> None: + """Generate the next time and setup listeners. + + Must be called by uses of this base class after they + have finished setting their properties. + """ + super()._async_start() + self.query_scheduler.start(_current_time_millis()) + + def _async_send_ready_queries_schedule_next(self): + if self.done or self.zc.done: + return + now = _current_time_millis() + self._async_send_ready_queries(now) + self._async_schedule_next(now) + # patch the zeroconf send # patch the zeroconf current_time_millis # patch the backoff limit to prevent test running forever with patch.object(zeroconf_browser, "async_send", send), patch.object( - _services_browser, "current_time_millis", current_time_millis - ), patch.object(_services_browser, "_BROWSER_BACKOFF_LIMIT", 10), patch.object( - _services_browser, "_FIRST_QUERY_DELAY_RANDOM_INTERVAL", (0, 0) - ): + _services_browser, "_BROWSER_BACKOFF_LIMIT", 10 + ), patch.object(_services_browser, "_FIRST_QUERY_DELAY_RANDOM_INTERVAL", (0, 0)): # dummy service callback def on_service_state_change(zeroconf, service_type, state_change, name): pass - browser = ServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = ServiceBrowserWithPatchedTime(zeroconf_browser, type_, [on_service_state_change]) try: # Test that queries are sent at increasing intervals From 8a17f2053a89db4beca9e8c1de4640faf27726b4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 19:09:17 -0500 Subject: [PATCH 150/434] feat: speed up ServiceBrowsers with a pxd for the signal interface (#1289) --- build_ext.py | 1 + src/zeroconf/_services/__init__.pxd | 11 +++++++++++ src/zeroconf/_services/browser.pxd | 1 + src/zeroconf/_services/browser.py | 2 +- 4 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 src/zeroconf/_services/__init__.pxd diff --git a/build_ext.py b/build_ext.py index 2134f61d..ba80e52c 100644 --- a/build_ext.py +++ b/build_ext.py @@ -33,6 +33,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_handlers/record_manager.py", "src/zeroconf/_handlers/multicast_outgoing_queue.py", "src/zeroconf/_handlers/query_handler.py", + "src/zeroconf/_services/__init__.py", "src/zeroconf/_services/browser.py", "src/zeroconf/_services/info.py", "src/zeroconf/_services/registry.py", diff --git a/src/zeroconf/_services/__init__.pxd b/src/zeroconf/_services/__init__.pxd new file mode 100644 index 00000000..46a75f3c --- /dev/null +++ b/src/zeroconf/_services/__init__.pxd @@ -0,0 +1,11 @@ + +import cython + + +cdef class Signal: + + cdef list _handlers + +cdef class SignalRegistrationInterface: + + cdef list _handlers diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 8b77c80e..a844d333 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -5,6 +5,7 @@ from .._cache cimport DNSCache from .._protocol.outgoing cimport DNSOutgoing, DNSPointer, DNSQuestion, DNSRecord from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis, millis_to_seconds +from . cimport Signal, SignalRegistrationInterface cdef bint TYPE_CHECKING diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index ed482546..b0b1a079 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -405,7 +405,7 @@ def _enqueue_callback( state_change is SERVICE_STATE_CHANGE_ADDED or ( state_change is SERVICE_STATE_CHANGE_REMOVED - and self._pending_handlers.get(key) != SERVICE_STATE_CHANGE_ADDED + and self._pending_handlers.get(key) is not SERVICE_STATE_CHANGE_ADDED ) or (state_change is SERVICE_STATE_CHANGE_UPDATED and key not in self._pending_handlers) ): From c37ead4d7000607e81706a97b4cdffd80cf8cf99 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 19:28:19 -0500 Subject: [PATCH 151/434] feat: speed up decoding labels from incoming data (#1291) --- src/zeroconf/_protocol/incoming.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index 37fc91e7..d71e2378 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -87,7 +87,7 @@ cdef class DNSIncoming: link_py_int=object, linked_labels=cython.list ) - cdef _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) + cdef cython.uint _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) cdef _read_header(self) From c2f99d902ad3d3ce09bf59cafb9f2e3c0400f63e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 19:32:57 -0500 Subject: [PATCH 152/434] chore: fix race in dns tests (#1300) --- tests/test_dns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_dns.py b/tests/test_dns.py index 4f7e0543..0eac568d 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -149,13 +149,13 @@ def test_dns_record_is_stale(self): now = current_time_millis() assert record.is_stale(now) is False assert record.is_stale(now + (8 / 4.1 * 1000)) is False - assert record.is_stale(now + (8 / 2 * 1000)) is True + assert record.is_stale(now + (8 / 1.9 * 1000)) is True assert record.is_stale(now + (8 * 1000)) is True def test_dns_record_is_recent(self): now = current_time_millis() record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, 8) - assert record.is_recent(now + (8 / 4.1 * 1000)) is True + assert record.is_recent(now + (8 / 4.2 * 1000)) is True assert record.is_recent(now + (8 / 3 * 1000)) is False assert record.is_recent(now + (8 / 2 * 1000)) is False assert record.is_recent(now + (8 * 1000)) is False From f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 4 Nov 2023 19:33:02 -0500 Subject: [PATCH 153/434] feat: speed up incoming packet processing with a memory view (#1290) --- src/zeroconf/_protocol/incoming.pxd | 1 + src/zeroconf/_protocol/incoming.py | 12 +++++++----- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index d71e2378..c39ab9a6 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -50,6 +50,7 @@ cdef class DNSIncoming: cdef public unsigned int flags cdef cython.uint offset cdef public bytes data + cdef const unsigned char [:] view cdef unsigned int _data_len cdef public cython.dict name_cache cdef public cython.list questions diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 5838657a..6a7451e7 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -78,6 +78,7 @@ class DNSIncoming: 'flags', 'offset', 'data', + 'view', '_data_len', 'name_cache', 'questions', @@ -105,6 +106,7 @@ def __init__( self.flags = 0 self.offset = 0 self.data = data + self.view = data self._data_len = len(data) self.name_cache: Dict[int, List[str]] = {} self.questions: List[DNSQuestion] = [] @@ -228,7 +230,7 @@ def _read_questions(self) -> None: def _read_character_string(self) -> str: """Reads a character string from the packet""" - length = self.data[self.offset] + length = self.view[self.offset] self.offset += 1 info = self.data[self.offset : self.offset + length].decode('utf-8', 'replace') self.offset += length @@ -334,8 +336,8 @@ def _read_bitmap(self, end: _int) -> List[int]: offset = self.offset offset_plus_one = offset + 1 offset_plus_two = offset + 2 - window = self.data[offset] - bitmap_length = self.data[offset_plus_one] + window = self.view[offset] + bitmap_length = self.view[offset_plus_one] bitmap_end = offset_plus_two + bitmap_length for i, byte in enumerate(self.data[offset_plus_two:bitmap_end]): for bit in range(0, 8): @@ -361,7 +363,7 @@ def _read_name(self) -> str: def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: Set[int]) -> int: # This is a tight loop that is called frequently, small optimizations can make a difference. while off < self._data_len: - length = self.data[off] + length = self.view[off] if length == 0: return off + DNS_COMPRESSION_HEADER_LEN @@ -377,7 +379,7 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: ) # We have a DNS compression pointer - link_data = self.data[off + 1] + link_data = self.view[off + 1] link = (length & 0x3F) * 256 + link_data link_py_int = link if link > self._data_len: From a910a2b17f92285e349800bd80ba564997f8b88b Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 5 Nov 2023 00:45:52 +0000 Subject: [PATCH 154/434] 0.120.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d91772e0..09ba7b93 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ +## v0.120.0 (2023-11-05) + +### Feature + +* Speed up incoming packet processing with a memory view ([#1290](https://github.com/python-zeroconf/python-zeroconf/issues/1290)) ([`f1f0a25`](https://github.com/python-zeroconf/python-zeroconf/commit/f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7)) +* Speed up decoding labels from incoming data ([#1291](https://github.com/python-zeroconf/python-zeroconf/issues/1291)) ([`c37ead4`](https://github.com/python-zeroconf/python-zeroconf/commit/c37ead4d7000607e81706a97b4cdffd80cf8cf99)) +* Speed up ServiceBrowsers with a pxd for the signal interface ([#1289](https://github.com/python-zeroconf/python-zeroconf/issues/1289)) ([`8a17f20`](https://github.com/python-zeroconf/python-zeroconf/commit/8a17f2053a89db4beca9e8c1de4640faf27726b4)) + ## v0.119.0 (2023-10-18) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 85367cc7..95cfbd97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.119.0" +version = "0.120.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 92aa9aea..05141b33 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.119.0' +__version__ = '0.120.0' __license__ = 'LGPL' From d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Nov 2023 08:30:46 -0600 Subject: [PATCH 155/434] feat: speed up record updates (#1301) --- build_ext.py | 1 + src/zeroconf/_cache.pxd | 6 +++--- src/zeroconf/_handlers/query_handler.py | 14 +++++++------- src/zeroconf/_record_update.pxd | 10 ++++++++++ src/zeroconf/_record_update.py | 21 +++++++++++++++++---- src/zeroconf/_services/browser.pxd | 7 +++++-- src/zeroconf/_services/browser.py | 15 ++++++++------- src/zeroconf/_services/info.pxd | 8 ++++++-- src/zeroconf/_services/info.py | 12 ++++++------ src/zeroconf/_updates.py | 2 +- tests/test_updates.py | 14 ++++++++++++++ 11 files changed, 78 insertions(+), 32 deletions(-) create mode 100644 src/zeroconf/_record_update.pxd diff --git a/build_ext.py b/build_ext.py index ba80e52c..d2f32685 100644 --- a/build_ext.py +++ b/build_ext.py @@ -26,6 +26,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_dns.py", "src/zeroconf/_cache.py", "src/zeroconf/_history.py", + "src/zeroconf/_record_update.py", "src/zeroconf/_listener.py", "src/zeroconf/_protocol/incoming.py", "src/zeroconf/_protocol/outgoing.py", diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index cdba8176..1f94c21e 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -44,9 +44,9 @@ cdef class DNSCache: ) cpdef async_all_by_details(self, str name, object type_, object class_) - cpdef async_entries_with_name(self, str name) + cpdef cython.dict async_entries_with_name(self, str name) - cpdef async_entries_with_server(self, str name) + cpdef cython.dict async_entries_with_server(self, str name) @cython.locals( cached_entry=DNSRecord, @@ -57,7 +57,7 @@ cdef class DNSCache: records=cython.dict, entry=DNSRecord, ) - cpdef get_all_by_details(self, str name, object type_, object class_) + cpdef cython.list get_all_by_details(self, str name, object type_, object class_) @cython.locals( store=cython.dict, diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index cab11662..4e74aa5c 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -139,7 +139,7 @@ def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry and maybe_entry.is_recent(self._now)) + return bool(maybe_entry is not None and maybe_entry.is_recent(self._now) is True) def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: """Check if an answer was seen in the last second. @@ -149,7 +149,7 @@ def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry and self._now - maybe_entry.created < _ONE_SECOND) + return bool(maybe_entry is not None and self._now - maybe_entry.created < _ONE_SECOND) class QueryHandler: @@ -174,7 +174,7 @@ def _add_service_type_enumeration_query_answers( dns_pointer = DNSPointer( _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, 0.0 ) - if not known_answers.suppresses(dns_pointer): + if known_answers.suppresses(dns_pointer) is False: answer_set[dns_pointer] = set() def _add_pointer_answers( @@ -185,7 +185,7 @@ def _add_pointer_answers( # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.1. dns_pointer = service._dns_pointer(None) - if known_answers.suppresses(dns_pointer): + if known_answers.suppresses(dns_pointer) is True: continue answer_set[dns_pointer] = { service._dns_service(None), @@ -208,7 +208,7 @@ def _add_address_answers( seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) - elif not known_answers.suppresses(dns_address): + elif known_answers.suppresses(dns_address) is False: answers.append(dns_address) missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types if answers: @@ -248,11 +248,11 @@ def _answer_question( # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.2. dns_service = service._dns_service(None) - if not known_answers.suppresses(dns_service): + if known_answers.suppresses(dns_service) is False: answer_set[dns_service] = service._get_address_and_nsec_records(None) if type_ in (_TYPE_TXT, _TYPE_ANY): dns_text = service._dns_text(None) - if not known_answers.suppresses(dns_text): + if known_answers.suppresses(dns_text) is False: answer_set[dns_text] = set() return answer_set diff --git a/src/zeroconf/_record_update.pxd b/src/zeroconf/_record_update.pxd new file mode 100644 index 00000000..d1b18cbe --- /dev/null +++ b/src/zeroconf/_record_update.pxd @@ -0,0 +1,10 @@ + +import cython + +from ._dns cimport DNSRecord + + +cdef class RecordUpdate: + + cdef public DNSRecord new + cdef public DNSRecord old diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index fbcacd5f..5a362534 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -20,11 +20,24 @@ USA """ -from typing import NamedTuple, Optional +from typing import Optional from ._dns import DNSRecord -class RecordUpdate(NamedTuple): - new: DNSRecord - old: Optional[DNSRecord] +class RecordUpdate: + + __slots__ = ("new", "old") + + def __init__(self, new: DNSRecord, old: Optional[DNSRecord] = None): + """RecordUpdate represents a change in a DNS record.""" + self.new = new + self.old = old + + def __getitem__(self, index: int) -> Optional[DNSRecord]: + """Get the new or old record.""" + if index == 0: + return self.new + elif index == 1: + return self.old + raise IndexError(index) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index a844d333..c9b98a42 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -3,6 +3,7 @@ import cython from .._cache cimport DNSCache from .._protocol.outgoing cimport DNSOutgoing, DNSPointer, DNSQuestion, DNSRecord +from .._record_update cimport RecordUpdate from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis, millis_to_seconds from . cimport Signal, SignalRegistrationInterface @@ -13,6 +14,7 @@ cdef object cached_possible_types cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT cdef cython.uint _TYPE_PTR cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED +cdef cython.set _ADDRESS_RECORD_TYPES cdef class _DNSPointerOutgoingBucket: @@ -43,6 +45,7 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cdef public cython.set types cdef public object zc + cdef DNSCache _cache cdef object _loop cdef public object addr cdef public object port @@ -60,10 +63,10 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _enqueue_callback(self, object state_change, object type_, object name) - @cython.locals(record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) + @cython.locals(record_update=RecordUpdate, record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) cpdef async_update_records(self, object zc, cython.float now, cython.list records) - cpdef _names_matching_types(self, object types) + cpdef cython.list _names_matching_types(self, object types) cpdef reschedule_type(self, object type_, object now, object next_time) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index b0b1a079..15af8d91 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -297,6 +297,7 @@ class _ServiceBrowserBase(RecordUpdateListener): __slots__ = ( 'types', 'zc', + '_cache', '_loop', 'addr', 'port', @@ -345,6 +346,7 @@ def __init__( # Will generate BadTypeInNameException on a bad name service_type_name(check_type_, strict=False) self.zc = zc + self._cache = zc.cache assert zc.loop is not None self._loop = zc.loop self.addr = addr @@ -421,8 +423,8 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record This method will be run in the event loop. """ for record_update in records: - record = record_update[0] - old_record = record_update[1] + record = record_update.new + old_record = record_update.old record_type = record.type if record_type is _TYPE_PTR: @@ -440,15 +442,14 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record continue # If its expired or already exists in the cache it cannot be updated. - if old_record or record.is_expired(now) is True: + if old_record is not None or record.is_expired(now) is True: continue if record_type in _ADDRESS_RECORD_TYPES: - cache = self.zc.cache + cache = self._cache + names = {service.name for service in cache.async_entries_with_server(record.name)} # Iterate through the DNSCache and callback any services that use this address - for type_, name in self._names_matching_types( - {service.name for service in cache.async_entries_with_server(record.name)} - ): + for type_, name in self._names_matching_types(names): self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) continue diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 2e516a9e..0461bf00 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -4,6 +4,7 @@ import cython from .._cache cimport DNSCache from .._dns cimport DNSAddress, DNSNsec, DNSPointer, DNSRecord, DNSService, DNSText from .._protocol.outgoing cimport DNSOutgoing +from .._record_update cimport RecordUpdate from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis @@ -56,7 +57,7 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public cython.list _dns_address_cache cdef public cython.set _get_address_and_nsec_records_cache - @cython.locals(cache=DNSCache) + @cython.locals(record_update=RecordUpdate, update=bint, cache=DNSCache) cpdef async_update_records(self, object zc, cython.float now, cython.list records) @cython.locals(cache=DNSCache) @@ -76,7 +77,7 @@ cdef class ServiceInfo(RecordUpdateListener): dns_text_record=DNSText, dns_address_record=DNSAddress ) - cdef _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) + cdef bint _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) @cython.locals(cache=DNSCache) cdef cython.list _get_address_records_from_cache_by_type(self, object zc, object _type) @@ -109,3 +110,6 @@ cdef class ServiceInfo(RecordUpdateListener): cdef cython.set _get_address_and_nsec_records(self, object override_ttl) cpdef async_clear_cache(self) + + @cython.locals(cache=DNSCache) + cdef _generate_request_query(self, object zc, object now, object question_type) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index ee033c82..fab6b410 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -420,7 +420,7 @@ def _get_ip_addresses_from_cache_lifo( """Set IPv6 addresses from the cache.""" address_list: List[Union[IPv4Address, IPv6Address]] = [] for record in self._get_address_records_from_cache_by_type(zc, type): - if record.is_expired(now): + if record.is_expired(now) is True: continue ip_addr = _cached_ip_addresses_wrapper(record.address) if ip_addr is not None: @@ -463,7 +463,7 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo Returns True if a new record was added. """ - if record.is_expired(now): + if record.is_expired(now) is True: return False record_key = record.key @@ -779,7 +779,7 @@ async def async_request( now = current_time_millis() - if self._load_from_cache(zc, now): + if self._load_from_cache(zc, now) is True: return True if TYPE_CHECKING: @@ -795,7 +795,7 @@ async def async_request( if last <= now: return False if next_ <= now: - out = self.generate_request_query( + out = self._generate_request_query( zc, now, question_type or DNS_QUESTION_TYPE_QU if first_request else DNS_QUESTION_TYPE_QM, @@ -815,8 +815,8 @@ async def async_request( return True - def generate_request_query( - self, zc: 'Zeroconf', now: float_, question_type: Optional[DNSQuestionType] = None + def _generate_request_query( + self, zc: 'Zeroconf', now: float_, question_type: DNSQuestionType ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index a117cc2b..42fa8285 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -68,7 +68,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record This method will be run in the event loop. """ for record in records: - self.update_record(zc, now, record[0]) + self.update_record(zc, now, record.new) def async_update_records_complete(self) -> None: """Called when a record update has completed for all handlers. diff --git a/tests/test_updates.py b/tests/test_updates.py index 46f5b50b..eb071adf 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -11,6 +11,7 @@ import zeroconf as r from zeroconf import Zeroconf, const +from zeroconf._record_update import RecordUpdate from zeroconf._services.browser import ServiceBrowser from zeroconf._services.info import ServiceInfo @@ -87,3 +88,16 @@ def on_service_state_change(zeroconf, service_type, state_change, name): zc.remove_listener(listener) zc.close() + + +def test_record_update_compat(): + """Test a RecordUpdate can fetch by index.""" + new = r.DNSPointer('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 'new') + old = r.DNSPointer('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 'old') + update = RecordUpdate(new, old) + assert update[0] == new + assert update[1] == old + with pytest.raises(IndexError): + update[2] + assert update.new == new + assert update.old == old From aff7276b6208bb4bb18ca0708db5600d502ea2bb Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 8 Nov 2023 14:40:43 +0000 Subject: [PATCH 156/434] 0.121.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 09ba7b93..e8f8d154 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.121.0 (2023-11-08) + +### Feature + +* Speed up record updates ([#1301](https://github.com/python-zeroconf/python-zeroconf/issues/1301)) ([`d2af6a0`](https://github.com/python-zeroconf/python-zeroconf/commit/d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4)) + ## v0.120.0 (2023-11-05) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 95cfbd97..a31b50c2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.120.0" +version = "0.121.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 05141b33..ca9d6980 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.120.0' +__version__ = '0.121.0' __license__ = 'LGPL' From 4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Nov 2023 16:15:47 -0600 Subject: [PATCH 157/434] feat: build aarch64 wheels (#1302) --- .github/workflows/ci.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 91ae876c..5e827beb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -146,12 +146,19 @@ jobs: ref: "${{ steps.release_tag.outputs.newest_release_tag }}" fetch-depth: 0 + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v1 + with: + platforms: arm64 + - name: Build wheels uses: pypa/cibuildwheel@v2.16.2 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* CIBW_BEFORE_ALL_LINUX: apt-get install -y gcc || yum install -y gcc || apk add gcc + CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 From 0b94856839906d336258608526e2bacbd3ea3457 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 8 Nov 2023 22:32:03 +0000 Subject: [PATCH 158/434] 0.122.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8f8d154..1cc9ea4b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.122.0 (2023-11-08) + +### Feature + +* Build aarch64 wheels ([#1302](https://github.com/python-zeroconf/python-zeroconf/issues/1302)) ([`4fe58e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83)) + ## v0.121.0 (2023-11-08) ### Feature diff --git a/pyproject.toml b/pyproject.toml index a31b50c2..6f06f359 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.121.0" +version = "0.122.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ca9d6980..bf391d39 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.121.0' +__version__ = '0.122.0' __license__ = 'LGPL' From 5500591afbb4198655f0527788490758fce7600a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Nov 2023 16:45:24 -0600 Subject: [PATCH 159/434] chore: bump setup-qemu-action to v3 (#1303) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5e827beb..2b5fca4c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -148,7 +148,7 @@ jobs: - name: Set up QEMU if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@v3 with: platforms: arm64 From 6c8f5a5dec2072aa6a8f889c5d8a4623ab392234 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Nov 2023 22:12:00 -0600 Subject: [PATCH 160/434] fix: skip wheel builds for eol python and older python with aarch64 (#1304) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2b5fca4c..d2bc4959 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -156,7 +156,7 @@ jobs: uses: pypa/cibuildwheel@v2.16.2 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 CIBW_BEFORE_ALL_LINUX: apt-get install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 From b1a8a071449709da60c25b62f6ee47714b70f927 Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 9 Nov 2023 04:21:20 +0000 Subject: [PATCH 161/434] 0.122.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cc9ea4b..a5f3e88a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.122.1 (2023-11-09) + +### Fix + +* Skip wheel builds for eol python and older python with aarch64 ([#1304](https://github.com/python-zeroconf/python-zeroconf/issues/1304)) ([`6c8f5a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6c8f5a5dec2072aa6a8f889c5d8a4623ab392234)) + ## v0.122.0 (2023-11-08) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 6f06f359..ba8243c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.122.0" +version = "0.122.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index bf391d39..36adc576 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.122.0' +__version__ = '0.122.1' __license__ = 'LGPL' From 7e884db4d958459e64257aba860dba2450db0687 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Nov 2023 23:21:08 -0600 Subject: [PATCH 162/434] fix: do not build aarch64 wheels for PyPy (#1305) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d2bc4959..8f61d900 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -156,7 +156,7 @@ jobs: uses: pypa/cibuildwheel@v2.16.2 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 CIBW_BEFORE_ALL_LINUX: apt-get install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 From 9b284aa4651975cb298364b17d62a67d7d899c5b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 9 Nov 2023 05:30:20 +0000 Subject: [PATCH 163/434] 0.122.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a5f3e88a..ffef1cda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.122.2 (2023-11-09) + +### Fix + +* Do not build aarch64 wheels for PyPy ([#1305](https://github.com/python-zeroconf/python-zeroconf/issues/1305)) ([`7e884db`](https://github.com/python-zeroconf/python-zeroconf/commit/7e884db4d958459e64257aba860dba2450db0687)) + ## v0.122.1 (2023-11-09) ### Fix diff --git a/pyproject.toml b/pyproject.toml index ba8243c3..fb42b70a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.122.1" +version = "0.122.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 36adc576..3cf8bac2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.122.1' +__version__ = '0.122.2' __license__ = 'LGPL' From 79aafb0acf7ca6b17976be7ede748008deada27b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 9 Nov 2023 07:31:41 -0600 Subject: [PATCH 164/434] fix: do not build musllinux aarch64 wheels to reduce release time (#1306) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8f61d900..da9db349 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -156,7 +156,7 @@ jobs: uses: pypa/cibuildwheel@v2.16.2 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 CIBW_BEFORE_ALL_LINUX: apt-get install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 From 9ca9a57470b17cf683e40f4e397c7e260730545b Mon Sep 17 00:00:00 2001 From: github-actions Date: Thu, 9 Nov 2023 13:52:35 +0000 Subject: [PATCH 165/434] 0.122.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ffef1cda..29154b5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.122.3 (2023-11-09) + +### Fix + +* Do not build musllinux aarch64 wheels to reduce release time ([#1306](https://github.com/python-zeroconf/python-zeroconf/issues/1306)) ([`79aafb0`](https://github.com/python-zeroconf/python-zeroconf/commit/79aafb0acf7ca6b17976be7ede748008deada27b)) + ## v0.122.2 (2023-11-09) ### Fix diff --git a/pyproject.toml b/pyproject.toml index fb42b70a..8226a859 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.122.2" +version = "0.122.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 3cf8bac2..55370b2b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.122.2' +__version__ = '0.122.3' __license__ = 'LGPL' From 0701b8ab6009891cbaddaa1d17116d31fd1b2f78 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 11 Nov 2023 23:42:39 -0600 Subject: [PATCH 166/434] feat: speed up instances only used to lookup answers (#1307) --- src/zeroconf/_listener.pxd | 11 ++++++++ src/zeroconf/_listener.py | 12 ++++++--- src/zeroconf/_services/registry.pxd | 5 ++++ src/zeroconf/_services/registry.py | 10 +++++--- tests/test_core.py | 39 ++++++++++++++++++----------- tests/test_listener.py | 12 ++++++++- 6 files changed, 67 insertions(+), 22 deletions(-) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 3b1d6231..ec877c78 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -3,6 +3,7 @@ import cython from ._handlers.record_manager cimport RecordManager from ._protocol.incoming cimport DNSIncoming +from ._services.registry cimport ServiceRegistry from ._utils.time cimport current_time_millis, millis_to_seconds @@ -18,6 +19,7 @@ cdef cython.uint _DUPLICATE_PACKET_SUPPRESSION_INTERVAL cdef class AsyncListener: cdef public object zc + cdef ServiceRegistry _registry cdef RecordManager _record_manager cdef public cython.bytes data cdef public cython.float last_time @@ -34,3 +36,12 @@ cdef class AsyncListener: cpdef _process_datagram_at_time(self, bint debug, cython.uint data_len, cython.float now, bytes data, cython.tuple addrs) cdef _cancel_any_timers_for_addr(self, object addr) + + cpdef handle_query_or_defer( + self, + DNSIncoming msg, + object addr, + object port, + object transport, + tuple v6_flow_scope + ) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index c27d1b61..07d059eb 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -57,6 +57,7 @@ class AsyncListener: __slots__ = ( 'zc', + '_registry', '_record_manager', 'data', 'last_time', @@ -69,6 +70,7 @@ class AsyncListener: def __init__(self, zc: 'Zeroconf') -> None: self.zc = zc + self._registry = zc.registry self._record_manager = zc.record_manager self.data: Optional[bytes] = None self.last_time: float = 0 @@ -171,6 +173,10 @@ def _process_datagram_at_time( self._record_manager.async_updates_from_response(msg) return + if not self._registry.has_entries: + # If the registry is empty, we have no answers to give. + return + if TYPE_CHECKING: assert self.transport is not None self.handle_query_or_defer(msg, addr, port, self.transport, v6_flow_scope) @@ -178,10 +184,10 @@ def _process_datagram_at_time( def handle_query_or_defer( self, msg: DNSIncoming, - addr: str, - port: int, + addr: _str, + port: _int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + v6_flow_scope: Union[Tuple[()], Tuple[int, int]], ) -> None: """Deal with incoming query packets. Provides a response if possible.""" diff --git a/src/zeroconf/_services/registry.pxd b/src/zeroconf/_services/registry.pxd index 1d0562c3..6f9017db 100644 --- a/src/zeroconf/_services/registry.pxd +++ b/src/zeroconf/_services/registry.pxd @@ -9,6 +9,7 @@ cdef class ServiceRegistry: cdef cython.dict _services cdef public cython.dict types cdef public cython.dict servers + cdef public bint has_entries @cython.locals( record_list=cython.list, @@ -17,6 +18,10 @@ cdef class ServiceRegistry: cdef _add(self, ServiceInfo info) + @cython.locals( + info=ServiceInfo, + old_service_info=ServiceInfo + ) cdef _remove(self, cython.list infos) cpdef ServiceInfo async_get_info_name(self, str name) diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index e9dc4a62..261e8e9c 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -35,7 +35,7 @@ class ServiceRegistry: the event loop as it is not thread safe. """ - __slots__ = ("_services", "types", "servers") + __slots__ = ("_services", "types", "servers", "has_entries") def __init__( self, @@ -44,6 +44,7 @@ def __init__( self._services: Dict[str, ServiceInfo] = {} self.types: Dict[str, List] = {} self.servers: Dict[str, List] = {} + self.has_entries: bool = False def async_add(self, info: ServiceInfo) -> None: """Add a new service to the registry.""" @@ -95,14 +96,17 @@ def _add(self, info: ServiceInfo) -> None: self._services[info.key] = info self.types.setdefault(info.type.lower(), []).append(info.key) self.servers.setdefault(info.server_key, []).append(info.key) + self.has_entries = True def _remove(self, infos: List[ServiceInfo]) -> None: """Remove a services under the lock.""" for info in infos: - if info.key not in self._services: + old_service_info = self._services.get(info.key) + if old_service_info is None: continue - old_service_info = self._services[info.key] assert old_service_info.server_key is not None self.types[old_service_info.type.lower()].remove(info.key) self.servers[old_service_info.server_key].remove(info.key) del self._services[info.key] + + self.has_entries = bool(self._services) diff --git a/tests/test_core.py b/tests/test_core.py index 4bce6db9..de4b2ef5 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -12,12 +12,10 @@ import time import unittest import unittest.mock -from typing import cast -from unittest.mock import patch +from typing import Tuple, Union, cast +from unittest.mock import Mock, patch if sys.version_info[:3][1] < 8: - from unittest.mock import Mock - AsyncMock = Mock else: from unittest.mock import AsyncMock @@ -26,6 +24,8 @@ import zeroconf as r from zeroconf import NotRunningException, Zeroconf, const, current_time_millis +from zeroconf._listener import AsyncListener, _WrappedTransport +from zeroconf._protocol.incoming import DNSIncoming from zeroconf.asyncio import AsyncZeroconf from . import _clear_cache, _inject_response, _wait_for_start, has_working_ipv6 @@ -45,10 +45,19 @@ def teardown_module(): log.setLevel(original_logging_level) -def threadsafe_query(zc, protocol, *args): +def threadsafe_query( + zc: 'Zeroconf', + protocol: 'AsyncListener', + msg: DNSIncoming, + addr: str, + port: int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]], +) -> None: async def make_query(): - protocol.handle_query_or_defer(*args) + protocol.handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) + assert zc.loop is not None asyncio.run_coroutine_threadsafe(make_query(), zc.loop).result() @@ -476,28 +485,28 @@ def test_tc_bit_defers(): next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert source_ip not in protocol._deferred assert source_ip not in protocol._timers @@ -555,20 +564,20 @@ def test_tc_bit_defers_last_response_missing(): next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred timer1 = protocol._timers[source_ip] next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred timer2 = protocol._timers[source_ip] assert timer1.cancelled() assert timer2 != timer1 # Send the same packet again to similar multi interfaces - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers timer3 = protocol._timers[source_ip] @@ -577,7 +586,7 @@ def test_tc_bit_defers_last_response_missing(): next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) - threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, None) + threadsafe_query(zc, protocol, next_packet, source_ip, const._MDNS_PORT, Mock(), ()) assert protocol._deferred[source_ip] == expected_deferred assert source_ip in protocol._timers timer4 = protocol._timers[source_ip] diff --git a/tests/test_listener.py b/tests/test_listener.py index dff01d78..bd802273 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -10,7 +10,14 @@ from unittest.mock import MagicMock, patch import zeroconf as r -from zeroconf import Zeroconf, _engine, _listener, const, current_time_millis +from zeroconf import ( + ServiceInfo, + Zeroconf, + _engine, + _listener, + const, + current_time_millis, +) from zeroconf._protocol import outgoing from zeroconf._protocol.incoming import DNSIncoming @@ -125,6 +132,9 @@ def test_guard_against_duplicate_packets(): These packets can quickly overwhelm the system. """ zc = Zeroconf(interfaces=['127.0.0.1']) + zc.registry.async_add( + ServiceInfo("_http._tcp.local.", "Test._http._tcp.local.", server="Test._http._tcp.local.", port=4) + ) zc.question_history = QuestionHistoryWithoutSuppression() class SubListener(_listener.AsyncListener): From d793e1365e351858380d5b7e4bd74399f01f4bbd Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 12 Nov 2023 05:53:27 +0000 Subject: [PATCH 167/434] 0.123.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 29154b5c..1b987706 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.123.0 (2023-11-12) + +### Feature + +* Speed up instances only used to lookup answers ([#1307](https://github.com/python-zeroconf/python-zeroconf/issues/1307)) ([`0701b8a`](https://github.com/python-zeroconf/python-zeroconf/commit/0701b8ab6009891cbaddaa1d17116d31fd1b2f78)) + ## v0.122.3 (2023-11-09) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 8226a859..a735b5fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.122.3" +version = "0.123.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 55370b2b..2e9aad9f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.122.3' +__version__ = '0.123.0' __license__ = 'LGPL' From 56ef90865189c01d2207abcc5e2efe3a7a022fa1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 12 Nov 2023 12:32:57 -0600 Subject: [PATCH 168/434] feat: small speed up to process incoming packets (#1309) --- src/zeroconf/_core.py | 2 +- src/zeroconf/_listener.pxd | 10 ++++++++++ src/zeroconf/_listener.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 40375484..7f60a695 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -573,7 +573,7 @@ def handle_assembled_query( addr: str, port: int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + v6_flow_scope: Union[Tuple[()], Tuple[int, int]], ) -> None: """Respond to a (re)assembled query. diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index ec877c78..729e0de6 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -37,6 +37,7 @@ cdef class AsyncListener: cdef _cancel_any_timers_for_addr(self, object addr) + @cython.locals(incoming=DNSIncoming, deferred=list) cpdef handle_query_or_defer( self, DNSIncoming msg, @@ -45,3 +46,12 @@ cdef class AsyncListener: object transport, tuple v6_flow_scope ) + + cpdef _respond_query( + self, + object msg, + object addr, + object port, + object transport, + tuple v6_flow_scope + ) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 07d059eb..700029e1 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -220,7 +220,7 @@ def _respond_query( addr: _str, port: _int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + v6_flow_scope: Union[Tuple[()], Tuple[int, int]], ) -> None: """Respond to a query and reassemble any truncated deferred packets.""" self._cancel_any_timers_for_addr(addr) From ce98cb8a06f20c49cebd5691d464f3caa803f8cf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 12 Nov 2023 12:35:45 -0600 Subject: [PATCH 169/434] chore(deps): bump cython to >= 3.0.5 (#1310) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a735b5fb..87b97134 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -151,7 +151,7 @@ ignore_errors = true [build-system] # 1.5.2 required for https://github.com/python-poetry/poetry/issues/7505 -requires = ['setuptools>=65.4.1', 'wheel', 'Cython', "poetry-core>=1.5.2"] +requires = ['setuptools>=65.4.1', 'wheel', 'Cython>=3.0.5', "poetry-core>=1.5.2"] build-backend = "poetry.core.masonry.api" [tool.codespell] From 605dc9ccd843a535802031f051b3d93310186ad1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 12 Nov 2023 12:50:48 -0600 Subject: [PATCH 170/434] feat: avoid decoding known answers if we have no answers to give (#1308) --- src/zeroconf/_core.py | 6 +- src/zeroconf/_handlers/answers.py | 8 + .../_handlers/multicast_outgoing_queue.py | 4 +- src/zeroconf/_handlers/query_handler.pxd | 33 ++- src/zeroconf/_handlers/query_handler.py | 203 +++++++++++++----- src/zeroconf/_record_update.py | 1 - tests/test_handlers.py | 89 +++++++- 7 files changed, 270 insertions(+), 74 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 7f60a695..5827e2d5 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -577,15 +577,17 @@ def handle_assembled_query( ) -> None: """Respond to a (re)assembled query. - If the protocol recieved packets with the TC bit set, it will + If the protocol received packets with the TC bit set, it will wait a bit for the rest of the packets and only call handle_assembled_query once it has a complete set of packets or the timer expires. If the TC bit is not set, a single packet will be in packets. """ - now = packets[0].now ucast_source = port != _MDNS_PORT question_answers = self.query_handler.async_response(packets, ucast_source) + if not question_answers: + return + now = packets[0].now if question_answers.ucast: questions = packets[0].questions id_ = packets[0].id diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index 6ba502ac..a2dbd66a 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -59,6 +59,14 @@ def __init__( self.mcast_aggregate = mcast_aggregate self.mcast_aggregate_last_second = mcast_aggregate_last_second + def __repr__(self) -> str: + """Return a string representation of this QuestionAnswers.""" + return ( + f'QuestionAnswers(ucast={self.ucast}, mcast_now={self.mcast_now}, ' + f'mcast_aggregate={self.mcast_aggregate}, ' + f'mcast_aggregate_last_second={self.mcast_aggregate_last_second})' + ) + class AnswerGroup: """A group of answers scheduled to be sent at the same time.""" diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index 1d398d73..23288d18 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -77,7 +77,7 @@ def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: # If we calculate a random delay for the send after time # that is less than the last group scheduled to go out, # we instead add the answers to the last group as this - # allows aggregating additonal responses + # allows aggregating additional responses last_group = self.queue[-1] if send_after <= last_group.send_after: last_group.answers.update(answers) @@ -116,7 +116,7 @@ def async_ready(self) -> None: # be sure we schedule them to go out later loop.call_at(loop.time() + millis_to_seconds(self.queue[0].send_after - now), self.async_ready) - if answers: + if answers: # pragma: no branch # If we have the same answer scheduled to go out, remove them self._remove_answers_from_queue(answers) zc.async_send(construct_outgoing_multicast_answers(answers)) diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index ff970d76..8c42144c 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -18,6 +18,23 @@ cdef cython.set _ADDRESS_RECORD_TYPES cdef object IPVersion, _IPVersion_ALL cdef object _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL +cdef unsigned int _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION +cdef unsigned int _ANSWER_STRATEGY_POINTER +cdef unsigned int _ANSWER_STRATEGY_ADDRESS +cdef unsigned int _ANSWER_STRATEGY_SERVICE +cdef unsigned int _ANSWER_STRATEGY_TEXT + +cdef list _EMPTY_SERVICES_LIST +cdef list _EMPTY_TYPES_LIST + +cdef class _AnswerStrategy: + + cdef public DNSQuestion question + cdef public unsigned int strategy_type + cdef public list types + cdef public list services + + cdef class _QueryResponse: cdef bint _is_probe @@ -53,24 +70,30 @@ cdef class QueryHandler: cdef QuestionHistory question_history @cython.locals(service=ServiceInfo) - cdef _add_service_type_enumeration_query_answers(self, cython.dict answer_set, DNSRRSet known_answers) + cdef _add_service_type_enumeration_query_answers(self, list types, cython.dict answer_set, DNSRRSet known_answers) @cython.locals(service=ServiceInfo) - cdef _add_pointer_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers) + cdef _add_pointer_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers) @cython.locals(service=ServiceInfo, dns_address=DNSAddress) - cdef _add_address_answers(self, str lower_name, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) + cdef _add_address_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) - cdef cython.dict _answer_question(self, DNSQuestion question, DNSRRSet known_answers) + cdef cython.dict _answer_question(self, DNSQuestion question, unsigned int strategy_type, list types, list services, DNSRRSet known_answers) @cython.locals( msg=DNSIncoming, + msgs=list, + strategy=_AnswerStrategy, question=DNSQuestion, answer_set=cython.dict, known_answers=DNSRRSet, known_answers_set=cython.set, + is_unicast=bint, is_probe=object, - now=object + now=float ) cpdef async_response(self, cython.list msgs, cython.bint unicast_source) + + @cython.locals(name=str, question_lower_name=str) + cdef _get_answer_strategies(self, DNSQuestion question) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 4e74aa5c..0af72f4c 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -20,13 +20,13 @@ USA """ - from typing import TYPE_CHECKING, List, Optional, Set, cast from .._cache import DNSCache, _UniqueRecordsType from .._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet from .._history import QuestionHistory from .._protocol.incoming import DNSIncoming +from .._services.info import ServiceInfo from .._services.registry import ServiceRegistry from .._utils.net import IPVersion from ..const import ( @@ -47,11 +47,39 @@ _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} +_EMPTY_SERVICES_LIST: List[ServiceInfo] = [] +_EMPTY_TYPES_LIST: List[str] = [] + _IPVersion_ALL = IPVersion.All _int = int +_ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION = 0 +_ANSWER_STRATEGY_POINTER = 1 +_ANSWER_STRATEGY_ADDRESS = 2 +_ANSWER_STRATEGY_SERVICE = 3 +_ANSWER_STRATEGY_TEXT = 4 + + +class _AnswerStrategy: + + __slots__ = ("question", "strategy_type", "types", "services") + + def __init__( + self, + question: DNSQuestion, + strategy_type: _int, + types: List[str], + services: List[ServiceInfo], + ) -> None: + """Create an answer strategy.""" + self.question = question + self.strategy_type = strategy_type + self.types = types + self.services = services + + class _QueryResponse: """A pair for unicast and multicast DNSOutgoing responses.""" @@ -164,13 +192,13 @@ def __init__(self, registry: ServiceRegistry, cache: DNSCache, question_history: self.question_history = question_history def _add_service_type_enumeration_query_answers( - self, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet + self, types: List[str], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet ) -> None: """Provide an answer to a service type enumeration query. https://datatracker.ietf.org/doc/html/rfc6763#section-9 """ - for stype in self.registry.async_get_types(): + for stype in types: dns_pointer = DNSPointer( _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, 0.0 ) @@ -178,10 +206,10 @@ def _add_service_type_enumeration_query_answers( answer_set[dns_pointer] = set() def _add_pointer_answers( - self, lower_name: str, answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet + self, services: List[ServiceInfo], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet ) -> None: """Answer PTR/ANY question.""" - for service in self.registry.async_get_infos_type(lower_name): + for service in services: # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.1. dns_pointer = service._dns_pointer(None) @@ -190,17 +218,18 @@ def _add_pointer_answers( answer_set[dns_pointer] = { service._dns_service(None), service._dns_text(None), - } | service._get_address_and_nsec_records(None) + *service._get_address_and_nsec_records(None), + } def _add_address_answers( self, - lower_name: str, + services: List[ServiceInfo], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, type_: _int, ) -> None: """Answer A/AAAA/ANY question.""" - for service in self.registry.async_get_infos_server(lower_name): + for service in services: answers: List[DNSAddress] = [] additionals: Set[DNSRecord] = set() seen_types: Set[int] = set() @@ -224,75 +253,135 @@ def _add_address_answers( def _answer_question( self, question: DNSQuestion, + strategy_type: _int, + types: List[str], + services: List[ServiceInfo], known_answers: DNSRRSet, ) -> _AnswerWithAdditionalsType: """Answer a question.""" answer_set: _AnswerWithAdditionalsType = {} - question_lower_name = question.name.lower() - type_ = question.type - - if type_ == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: - self._add_service_type_enumeration_query_answers(answer_set, known_answers) - return answer_set - - if type_ in (_TYPE_PTR, _TYPE_ANY): - self._add_pointer_answers(question_lower_name, answer_set, known_answers) - if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): - self._add_address_answers(question_lower_name, answer_set, known_answers, type_) - - if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): - service = self.registry.async_get_info_name(question_lower_name) - if service is not None: - if type_ in (_TYPE_SRV, _TYPE_ANY): - # Add recommended additional answers according to - # https://tools.ietf.org/html/rfc6763#section-12.2. - dns_service = service._dns_service(None) - if known_answers.suppresses(dns_service) is False: - answer_set[dns_service] = service._get_address_and_nsec_records(None) - if type_ in (_TYPE_TXT, _TYPE_ANY): - dns_text = service._dns_text(None) - if known_answers.suppresses(dns_text) is False: - answer_set[dns_text] = set() + if strategy_type == _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION: + self._add_service_type_enumeration_query_answers(types, answer_set, known_answers) + elif strategy_type == _ANSWER_STRATEGY_POINTER: + self._add_pointer_answers(services, answer_set, known_answers) + elif strategy_type == _ANSWER_STRATEGY_ADDRESS: + self._add_address_answers(services, answer_set, known_answers, question.type) + elif strategy_type == _ANSWER_STRATEGY_SERVICE: + # Add recommended additional answers according to + # https://tools.ietf.org/html/rfc6763#section-12.2. + service = services[0] + dns_service = service._dns_service(None) + if known_answers.suppresses(dns_service) is False: + answer_set[dns_service] = service._get_address_and_nsec_records(None) + elif strategy_type == _ANSWER_STRATEGY_TEXT: # pragma: no branch + service = services[0] + dns_text = service._dns_text(None) + if known_answers.suppresses(dns_text) is False: + answer_set[dns_text] = set() return answer_set def async_response( # pylint: disable=unused-argument self, msgs: List[DNSIncoming], ucast_source: bool - ) -> QuestionAnswers: + ) -> Optional[QuestionAnswers]: """Deal with incoming query packets. Provides a response if possible. This function must be run in the event loop as it is not threadsafe. """ - answers: List[DNSRecord] = [] + strategies: List[_AnswerStrategy] = [] + for msg in msgs: + for question in msg.questions: + strategies.extend(self._get_answer_strategies(question)) + + if not strategies: + # We have no way to answer the question because we have + # nothing in the ServiceRegistry that matches or we do not + # understand the question. + return None + is_probe = False - msg = msgs[0] questions = msg.questions - now = msg.now + # Only decode known answers if we are not a probe and we have + # at least one answer strategy + answers: List[DNSRecord] = [] for msg in msgs: - if msg.is_probe() is False: - answers.extend(msg.answers()) - else: + if msg.is_probe() is True: is_probe = True + else: + answers.extend(msg.answers()) + + msg = msgs[0] + query_res = _QueryResponse(self.cache, questions, is_probe, msg.now) known_answers = DNSRRSet(answers) - query_res = _QueryResponse(self.cache, questions, is_probe, now) known_answers_set: Optional[Set[DNSRecord]] = None - - for msg in msgs: - for question in msg.questions: - if not question.unique: # unique and unicast are the same flag - if not known_answers_set: # pragma: no branch - known_answers_set = known_answers.lookup_set() - self.question_history.add_question_at_time(question, now, known_answers_set) - answer_set = self._answer_question(question, known_answers) - if not ucast_source and question.unique: # unique and unicast are the same flag - query_res.add_qu_question_response(answer_set) - continue - if ucast_source: - query_res.add_ucast_question_response(answer_set) - # We always multicast as well even if its a unicast - # source as long as we haven't done it recently (75% of ttl) - query_res.add_mcast_question_response(answer_set) + now = msg.now + for strategy in strategies: + question = strategy.question + is_unicast = question.unique is True # unique and unicast are the same flag + if not is_unicast: + if known_answers_set is None: # pragma: no branch + known_answers_set = known_answers.lookup_set() + self.question_history.add_question_at_time(question, now, known_answers_set) + answer_set = self._answer_question( + question, strategy.strategy_type, strategy.types, strategy.services, known_answers + ) + if not ucast_source and is_unicast: + query_res.add_qu_question_response(answer_set) + continue + if ucast_source: + query_res.add_ucast_question_response(answer_set) + # We always multicast as well even if its a unicast + # source as long as we haven't done it recently (75% of ttl) + query_res.add_mcast_question_response(answer_set) return query_res.answers() + + def _get_answer_strategies( + self, + question: DNSQuestion, + ) -> List[_AnswerStrategy]: + """Collect strategies to answer a question.""" + name = question.name + question_lower_name = name.lower() + type_ = question.type + strategies: List[_AnswerStrategy] = [] + + if type_ == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: + types = self.registry.async_get_types() + if types: + strategies.append( + _AnswerStrategy( + question, _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION, types, _EMPTY_SERVICES_LIST + ) + ) + return strategies + + if type_ in (_TYPE_PTR, _TYPE_ANY): + services = self.registry.async_get_infos_type(question_lower_name) + if services: + strategies.append( + _AnswerStrategy(question, _ANSWER_STRATEGY_POINTER, _EMPTY_TYPES_LIST, services) + ) + + if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): + services = self.registry.async_get_infos_server(question_lower_name) + if services: + strategies.append( + _AnswerStrategy(question, _ANSWER_STRATEGY_ADDRESS, _EMPTY_TYPES_LIST, services) + ) + + if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): + service = self.registry.async_get_info_name(question_lower_name) + if service is not None: + if type_ in (_TYPE_SRV, _TYPE_ANY): + strategies.append( + _AnswerStrategy(question, _ANSWER_STRATEGY_SERVICE, _EMPTY_TYPES_LIST, [service]) + ) + if type_ in (_TYPE_TXT, _TYPE_ANY): + strategies.append( + _AnswerStrategy(question, _ANSWER_STRATEGY_TEXT, _EMPTY_TYPES_LIST, [service]) + ) + + return strategies diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index 5a362534..8e0e4bdb 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -26,7 +26,6 @@ class RecordUpdate: - __slots__ = ("new", "old") def __init__(self, new: DNSRecord, old: Optional[DNSRecord] = None): diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 13fe3a51..1a1066fa 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -107,6 +107,7 @@ def _process_outgoing_packet(out): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) # The additonals should all be suppresed since they are all in the answers section @@ -145,6 +146,7 @@ def _process_outgoing_packet(out): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) # There will be one NSEC additional to indicate the lack of AAAA record @@ -244,6 +246,7 @@ def test_ptr_optimization(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -260,6 +263,7 @@ def test_ptr_optimization(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate_last_second @@ -305,6 +309,7 @@ def test_any_query_for_ptr(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers mcast_answers = list(question_answers.mcast_aggregate) assert mcast_answers[0].name == type_ assert mcast_answers[0].alias == registration_name # type: ignore[attr-defined] @@ -332,6 +337,7 @@ def test_aaaa_query(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] # unregister @@ -358,6 +364,7 @@ def test_aaaa_query_upper_case(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] # unregister @@ -391,6 +398,7 @@ def test_a_and_aaaa_record_fate_sharing(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert aaaa_record in question_answers.mcast_now assert a_record in additionals @@ -403,6 +411,7 @@ def test_a_and_aaaa_record_fate_sharing(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert a_record in question_answers.mcast_now assert aaaa_record in additionals @@ -437,6 +446,7 @@ def test_unicast_response(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], True ) + assert question_answers for answers in (question_answers.ucast, question_answers.mcast_aggregate): has_srv = has_txt = has_a = has_aaaa = has_nsec = False nbr_additionals = 0 @@ -486,6 +496,7 @@ async def test_probe_answered_immediately(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -499,6 +510,7 @@ async def test_probe_answered_immediately(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert question_answers.ucast assert question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -528,6 +540,7 @@ async def test_probe_answered_immediately_with_uppercase_name(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -541,6 +554,7 @@ async def test_probe_answered_immediately_with_uppercase_name(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert question_answers.ucast assert question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -607,6 +621,7 @@ def _validate_complete_response(answers): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers _validate_complete_response(question_answers.ucast) assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -622,6 +637,7 @@ def _validate_complete_response(answers): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate @@ -637,6 +653,7 @@ def _validate_complete_response(answers): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers _validate_complete_response(question_answers.ucast) _validate_complete_response(question_answers.mcast_now) @@ -652,6 +669,7 @@ def _validate_complete_response(answers): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.mcast_now assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -681,6 +699,7 @@ def test_known_answer_supression(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert question_answers.mcast_aggregate @@ -692,6 +711,7 @@ def test_known_answer_supression(): generated.add_answer_at_time(info.dns_pointer(), now) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -703,6 +723,7 @@ def test_known_answer_supression(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -715,6 +736,7 @@ def test_known_answer_supression(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -728,6 +750,7 @@ def test_known_answer_supression(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast expected_nsec_record = cast(r.DNSNsec, list(question_answers.mcast_now)[0]) assert const._TYPE_A not in expected_nsec_record.rdtypes @@ -741,6 +764,7 @@ def test_known_answer_supression(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -752,6 +776,7 @@ def test_known_answer_supression(): generated.add_answer_at_time(info.dns_service(), now) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -763,6 +788,7 @@ def test_known_answer_supression(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert question_answers.mcast_aggregate @@ -774,6 +800,7 @@ def test_known_answer_supression(): generated.add_answer_at_time(info.dns_text(), now) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -827,6 +854,7 @@ def test_multi_packet_known_answer_supression(): packets = generated.packets() assert len(packets) > 1 question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -868,6 +896,7 @@ def test_known_answer_supression_service_type_enumeration_query(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert question_answers.mcast_aggregate @@ -898,6 +927,7 @@ def test_known_answer_supression_service_type_enumeration_query(): ) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert not question_answers.mcast_aggregate @@ -938,6 +968,7 @@ def test_upper_case_enumeration_query(): generated.add_question(question) packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now assert question_answers.mcast_aggregate @@ -948,6 +979,19 @@ def test_upper_case_enumeration_query(): zc.close() +def test_enumeration_query_with_no_registered_services(): + zc = Zeroconf(interfaces=['127.0.0.1']) + _clear_cache(zc) + generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) + question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) + generated.add_question(question) + packets = generated.packets() + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert not question_answers + # unregister + zc.close() + + # This test uses asyncio because it needs to access the cache directly # which is not threadsafe @pytest.mark.asyncio @@ -1000,6 +1044,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.mcast_now assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -1024,6 +1069,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.mcast_now assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -1047,6 +1093,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -1075,6 +1122,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) + assert question_answers assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second @@ -1235,8 +1283,22 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu now = current_time_millis() _clear_cache(zc) + aiozc.zeroconf.registry.async_add( + ServiceInfo( + "_hap._tcp.local.", + "other._hap._tcp.local.", + 80, + 0, + 0, + {"md": "known"}, + "ash-2.local.", + addresses=[socket.inet_aton("1.2.3.4")], + ) + ) + services = aiozc.zeroconf.registry.async_get_infos_type("_hap._tcp.local.") + assert len(services) == 1 generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) question.unicast = False known_answer = r.DNSPointer( "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-other._hap._tcp.local.' @@ -1246,9 +1308,10 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu now = r.current_time_millis() packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now - assert not question_answers.mcast_aggregate + assert question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second assert zc.question_history.suppresses(question, now, {known_answer}) @@ -1261,20 +1324,32 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): zc = aiozc.zeroconf now = current_time_millis() _clear_cache(zc) - + info = ServiceInfo( + "_hap._tcp.local.", + "qu._hap._tcp.local.", + 80, + 0, + 0, + {"md": "known"}, + "ash-2.local.", + addresses=[socket.inet_aton("1.2.3.4")], + ) + aiozc.zeroconf.registry.async_add(info) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) question.unicast = True known_answer = r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-other._hap._tcp.local.' + "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'notqu._hap._tcp.local.' ) generated.add_question(question) generated.add_answer_at_time(known_answer, 0) now = r.current_time_millis() packets = generated.packets() question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) - assert not question_answers.ucast - assert not question_answers.mcast_now + assert question_answers + assert "qu._hap._tcp.local." in str(question_answers) + assert not question_answers.ucast # has not multicast recently + assert question_answers.mcast_now assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second assert not zc.question_history.suppresses(question, now, {known_answer}) From e60cc41730f209eddd2a54b0c424b1fb604ce00a Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 12 Nov 2023 19:00:20 +0000 Subject: [PATCH 171/434] 0.124.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b987706..2594d5b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.124.0 (2023-11-12) + +### Feature + +* Avoid decoding known answers if we have no answers to give ([#1308](https://github.com/python-zeroconf/python-zeroconf/issues/1308)) ([`605dc9c`](https://github.com/python-zeroconf/python-zeroconf/commit/605dc9ccd843a535802031f051b3d93310186ad1)) +* Small speed up to process incoming packets ([#1309](https://github.com/python-zeroconf/python-zeroconf/issues/1309)) ([`56ef908`](https://github.com/python-zeroconf/python-zeroconf/commit/56ef90865189c01d2207abcc5e2efe3a7a022fa1)) + ## v0.123.0 (2023-11-12) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 87b97134..ba4c19c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.123.0" +version = "0.124.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 2e9aad9f..0794fe25 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.123.0' +__version__ = '0.124.0' __license__ = 'LGPL' From d192d33b1f05aa95a89965e86210aec086673a17 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 12 Nov 2023 13:51:59 -0600 Subject: [PATCH 172/434] feat: speed up service browser queries when browsing many types (#1311) --- src/zeroconf/_services/browser.pxd | 13 +++++++++++-- src/zeroconf/_services/browser.py | 18 ++++++++---------- tests/services/test_browser.py | 14 ++++++++------ 3 files changed, 27 insertions(+), 18 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index c9b98a42..0cd0aeea 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -2,6 +2,7 @@ import cython from .._cache cimport DNSCache +from .._history cimport QuestionHistory from .._protocol.outgoing cimport DNSOutgoing, DNSPointer, DNSQuestion, DNSRecord from .._record_update cimport RecordUpdate from .._updates cimport RecordUpdateListener @@ -11,7 +12,7 @@ from . cimport Signal, SignalRegistrationInterface cdef bint TYPE_CHECKING cdef object cached_possible_types -cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT +cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT, _MAX_MSG_TYPICAL, _DNS_PACKET_HEADER_LEN cdef cython.uint _TYPE_PTR cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED cdef cython.set _ADDRESS_RECORD_TYPES @@ -24,8 +25,16 @@ cdef class _DNSPointerOutgoingBucket: cpdef add(self, cython.uint max_compressed_size, DNSQuestion question, cython.set answers) +@cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord) +cpdef generate_service_query( + object zc, + float now, + list type_, + bint multicast, + object question_type +) -@cython.locals(answer=DNSPointer) +@cython.locals(answer=DNSPointer, query_buckets=list, question=DNSQuestion, max_compressed_size=cython.uint, max_bucket_size=cython.uint, query_bucket=_DNSPointerOutgoingBucket) cdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) cdef class QueryScheduler: diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 15af8d91..c69076f3 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -164,24 +164,22 @@ def _group_ptr_queries_with_known_answers( def generate_service_query( - zc: 'Zeroconf', - now: float, - types_: List[str], - multicast: bool = True, - question_type: Optional[DNSQuestionType] = None, + zc: 'Zeroconf', now: float_, types_: List[str], multicast: bool, question_type: Optional[DNSQuestionType] ) -> List[DNSOutgoing]: """Generate a service query for sending with zeroconf.send.""" questions_with_known_answers: _QuestionWithKnownAnswers = {} qu_question = not multicast if question_type is None else question_type == DNSQuestionType.QU + question_history = zc.question_history + cache = zc.cache for type_ in types_: question = DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) question.unicast = qu_question known_answers = { record - for record in zc.cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) - if not record.is_stale(now) + for record in cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) + if record.is_stale(now) is False } - if not qu_question and zc.question_history.suppresses(question, now, known_answers): + if not qu_question and question_history.suppresses(question, now, known_answers): log.debug("Asking %s was suppressed by the question history", question) continue if TYPE_CHECKING: @@ -189,8 +187,8 @@ def generate_service_query( else: pointer_known_answers = known_answers questions_with_known_answers[question] = pointer_known_answers - if not qu_question: - zc.question_history.add_question_at_time(question, now, known_answers) + if qu_question is False: + question_history.add_question_at_time(question, now, known_answers) return _group_ptr_queries_with_known_answers(now, multicast, questions_with_known_answers) diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 268a9b20..a658ded9 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1010,32 +1010,34 @@ async def test_generate_service_query_suppress_duplicate_questions(): assert zc.question_history.suppresses(question, now, other_known_answers) # The known answer list is different, do not suppress - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True) + outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) assert outs zc.cache.async_add_records([answer]) # The known answer list contains all the asked questions in the history # we should suppress - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True) + outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) assert not outs # We do not suppress once the question history expires - outs = _services_browser.generate_service_query(zc, now + 1000, [name], multicast=True) + outs = _services_browser.generate_service_query( + zc, now + 1000, [name], multicast=True, question_type=None + ) assert outs # We do not suppress QU queries ever - outs = _services_browser.generate_service_query(zc, now, [name], multicast=False) + outs = _services_browser.generate_service_query(zc, now, [name], multicast=False, question_type=None) assert outs zc.question_history.async_expire(now + 2000) # No suppression after clearing the history - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True) + outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) assert outs # The previous query we just sent is still remembered and # the next one is suppressed - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True) + outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) assert not outs await aiozc.async_close() From cfa1fd691d76a6b59769fb500a57732a4e120ac9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 12 Nov 2023 20:01:41 +0000 Subject: [PATCH 173/434] 0.125.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2594d5b2..b24e6b8b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.125.0 (2023-11-12) + +### Feature + +* Speed up service browser queries when browsing many types ([#1311](https://github.com/python-zeroconf/python-zeroconf/issues/1311)) ([`d192d33`](https://github.com/python-zeroconf/python-zeroconf/commit/d192d33b1f05aa95a89965e86210aec086673a17)) + ## v0.124.0 (2023-11-12) ### Feature diff --git a/pyproject.toml b/pyproject.toml index ba4c19c9..b8b771d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.124.0" +version = "0.125.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 0794fe25..0db4cab3 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.124.0' +__version__ = '0.125.0' __license__ = 'LGPL' From 9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 13 Nov 2023 12:16:15 -0600 Subject: [PATCH 174/434] feat: speed up writing name compression for outgoing packets (#1312) --- bench/outgoing.py | 2 +- src/zeroconf/_protocol/outgoing.pxd | 16 +++++--- src/zeroconf/_protocol/outgoing.py | 62 +++++++++++++++++------------ 3 files changed, 47 insertions(+), 33 deletions(-) diff --git a/bench/outgoing.py b/bench/outgoing.py index bb5d99ce..5c8f2e6f 100644 --- a/bench/outgoing.py +++ b/bench/outgoing.py @@ -158,7 +158,7 @@ def generate_packets() -> DNSOutgoing: def make_outgoing_message() -> None: - out.state = State.init + out.state = State.init.value out.finished = False out.packets() diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 2374f8b3..2cd9410a 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -21,8 +21,8 @@ cdef object PACK_BYTE cdef object PACK_SHORT cdef object PACK_LONG -cdef object STATE_INIT -cdef object STATE_FINISHED +cdef unsigned int STATE_INIT +cdef unsigned int STATE_FINISHED cdef object LOGGING_IS_ENABLED_FOR cdef object LOGGING_DEBUG @@ -40,7 +40,7 @@ cdef class DNSOutgoing: cdef public cython.list data cdef public unsigned int size cdef public bint allow_long - cdef public object state + cdef public unsigned int state cdef public cython.list questions cdef public cython.list answers cdef public cython.list authorities @@ -91,6 +91,8 @@ cdef class DNSOutgoing: ) cpdef write_name(self, cython.str name) + cdef _write_link_to_name(self, unsigned int index) + cpdef write_short(self, object value) cpdef write_string(self, cython.bytes value) @@ -98,6 +100,8 @@ cdef class DNSOutgoing: cpdef _write_utf(self, cython.str value) @cython.locals( + debug_enable=bint, + made_progress=bint, questions_offset=object, answer_offset=object, authority_offset=object, @@ -107,7 +111,7 @@ cdef class DNSOutgoing: authorities_written=object, additionals_written=object, ) - cdef _packets(self) + cpdef packets(self) cpdef add_question_or_all_cache(self, DNSCache cache, object now, str name, object type_, object class_) @@ -124,6 +128,6 @@ cdef class DNSOutgoing: cpdef add_additional_answer(self, DNSRecord record) - cpdef is_query(self) + cpdef bint is_query(self) - cpdef is_response(self) + cpdef bint is_response(self) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index f4f68c3d..41c832f6 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -61,8 +61,8 @@ class State(enum.Enum): finished = 1 -STATE_INIT = State.init -STATE_FINISHED = State.finished +STATE_INIT = State.init.value +STATE_FINISHED = State.finished.value LOGGING_IS_ENABLED_FOR = log.isEnabledFor LOGGING_DEBUG = logging.DEBUG @@ -277,30 +277,41 @@ def write_name(self, name: str_) -> None: """ # split name into each label - name_length = 0 if name.endswith('.'): - name = name[: len(name) - 1] - labels = name.split('.') - # Write each new label or a pointer to the existing - # on in the packet + name = name[:-1] + + index = self.names.get(name, 0) + if index: + self._write_link_to_name(index) + return + start_size = self.size - for count in range(len(labels)): - label = name if count == 0 else '.'.join(labels[count:]) - index = self.names.get(label, 0) + labels = name.split('.') + # Write each new label or a pointer to the existing one in the packet + self.names[name] = start_size + self._write_utf(labels[0]) + + name_length = 0 + for count in range(1, len(labels)): + partial_name = '.'.join(labels[count:]) + index = self.names.get(partial_name, 0) if index: - # If part of the name already exists in the packet, - # create a pointer to it - self._write_byte((index >> 8) | 0xC0) - self._write_byte(index & 0xFF) + self._write_link_to_name(index) return if name_length == 0: name_length = len(name.encode('utf-8')) - self.names[label] = start_size + name_length - len(label.encode('utf-8')) + self.names[partial_name] = start_size + name_length - len(partial_name.encode('utf-8')) self._write_utf(labels[count]) # this is the end of a name self._write_byte(0) + def _write_link_to_name(self, index: int_) -> None: + # If part of the name already exists in the packet, + # create a pointer to it + self._write_byte((index >> 8) | 0xC0) + self._write_byte(index & 0xFF) + def _write_question(self, question: DNSQuestion_) -> bool: """Writes a question to the packet""" start_data_length = len(self.data) @@ -406,9 +417,6 @@ def packets(self) -> List[bytes]: will be written out to a single oversized packet no more than _MAX_MSG_ABSOLUTE in length (and hence will be subject to IP fragmentation potentially).""" - return self._packets() - - def _packets(self) -> List[bytes]: if self.state == STATE_FINISHED: return self.packets_data @@ -445,6 +453,8 @@ def _packets(self) -> List[bytes]: authorities_written = self._write_records_from_offset(self.authorities, authority_offset) additionals_written = self._write_records_from_offset(self.additionals, additional_offset) + made_progress = bool(self.data) + self._insert_short_at_start(additionals_written) self._insert_short_at_start(authorities_written) self._insert_short_at_start(answers_written) @@ -479,16 +489,16 @@ def _packets(self) -> List[bytes]: self._insert_short_at_start(self.id) self.packets_data.append(b''.join(self.data)) - self._reset_for_next_packet() - if ( - not questions_written - and not answers_written - and not authorities_written - and not additionals_written - and (self.questions or self.answers or self.authorities or self.additionals) - ): + if not made_progress: + # Generating an empty packet is not a desirable outcome, but currently + # too many internals rely on this behavior. So, we'll just return an + # empty packet and log a warning until this can be refactored at a later + # date. log.warning("packets() made no progress adding records; returning") break + + self._reset_for_next_packet() + self.state = STATE_FINISHED return self.packets_data From 55cf4ccdff886a136db4e2133d3e6cdd001a8bd6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 13 Nov 2023 13:26:29 -0600 Subject: [PATCH 175/434] feat: speed up outgoing packet writer (#1313) --- bench/outgoing.py | 3 +- src/zeroconf/_protocol/outgoing.pxd | 47 ++++++++++++++++++----------- src/zeroconf/_protocol/outgoing.py | 46 +++++++++++++++++----------- 3 files changed, 60 insertions(+), 36 deletions(-) diff --git a/bench/outgoing.py b/bench/outgoing.py index 5c8f2e6f..d832a05b 100644 --- a/bench/outgoing.py +++ b/bench/outgoing.py @@ -158,9 +158,10 @@ def generate_packets() -> DNSOutgoing: def make_outgoing_message() -> None: + out.packets() out.state = State.init.value out.finished = False - out.packets() + out._reset_for_next_packet() count = 100000 diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 2cd9410a..0f757af8 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -15,8 +15,11 @@ cdef cython.uint _FLAGS_TC cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _MAX_MSG_TYPICAL + cdef bint TYPE_CHECKING +cdef unsigned int SHORT_CACHE_MAX + cdef object PACK_BYTE cdef object PACK_SHORT cdef object PACK_LONG @@ -28,6 +31,7 @@ cdef object LOGGING_IS_ENABLED_FOR cdef object LOGGING_DEBUG cdef cython.tuple BYTE_TABLE +cdef cython.tuple SHORT_LOOKUP cdef class DNSOutgoing: @@ -46,13 +50,15 @@ cdef class DNSOutgoing: cdef public cython.list authorities cdef public cython.list additionals - cdef _reset_for_next_packet(self) + cpdef _reset_for_next_packet(self) - cdef _write_byte(self, object value) + cdef _write_byte(self, cython.uint value) - cdef _insert_short_at_start(self, object value) + cdef void _insert_short_at_start(self, unsigned int value) - cdef _replace_short(self, object index, object value) + cdef _replace_short(self, cython.uint index, cython.uint value) + + cdef _get_short(self, cython.uint value) cdef _write_int(self, object value) @@ -61,10 +67,12 @@ cdef class DNSOutgoing: @cython.locals( d=cython.bytes, data_view=cython.list, + index=cython.uint, length=cython.uint ) cdef cython.bint _write_record(self, DNSRecord record, object now) + @cython.locals(class_=cython.uint) cdef _write_record_class(self, DNSEntry record) @cython.locals( @@ -72,13 +80,16 @@ cdef class DNSOutgoing: ) cdef cython.bint _check_data_limit_or_rollback(self, cython.uint start_data_length, cython.uint start_size) - cdef _write_questions_from_offset(self, object questions_offset) + @cython.locals(questions_written=cython.uint) + cdef cython.uint _write_questions_from_offset(self, unsigned int questions_offset) - cdef _write_answers_from_offset(self, object answer_offset) + @cython.locals(answers_written=cython.uint) + cdef cython.uint _write_answers_from_offset(self, unsigned int answer_offset) - cdef _write_records_from_offset(self, cython.list records, object offset) + @cython.locals(records_written=cython.uint) + cdef cython.uint _write_records_from_offset(self, cython.list records, unsigned int offset) - cdef _has_more_to_add(self, object questions_offset, object answer_offset, object authority_offset, object additional_offset) + cdef bint _has_more_to_add(self, unsigned int questions_offset, unsigned int answer_offset, unsigned int authority_offset, unsigned int additional_offset) cdef _write_ttl(self, DNSRecord record, object now) @@ -93,23 +104,25 @@ cdef class DNSOutgoing: cdef _write_link_to_name(self, unsigned int index) - cpdef write_short(self, object value) + cpdef write_short(self, cython.uint value) cpdef write_string(self, cython.bytes value) + @cython.locals(utfstr=bytes) cpdef _write_utf(self, cython.str value) @cython.locals( debug_enable=bint, made_progress=bint, - questions_offset=object, - answer_offset=object, - authority_offset=object, - additional_offset=object, - questions_written=object, - answers_written=object, - authorities_written=object, - additionals_written=object, + has_more_to_add=bint, + questions_offset="unsigned int", + answer_offset="unsigned int", + authority_offset="unsigned int", + additional_offset="unsigned int", + questions_written="unsigned int", + answers_written="unsigned int", + authorities_written="unsigned int", + additionals_written="unsigned int", ) cpdef packets(self) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 41c832f6..d3b47ae6 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -53,7 +53,10 @@ PACK_SHORT = Struct('>H').pack PACK_LONG = Struct('>L').pack +SHORT_CACHE_MAX = 128 + BYTE_TABLE = tuple(PACK_BYTE(i) for i in range(256)) +SHORT_LOOKUP = tuple(PACK_SHORT(i) for i in range(SHORT_CACHE_MAX)) class State(enum.Enum): @@ -220,17 +223,21 @@ def _write_byte(self, value: int_) -> None: self.data.append(BYTE_TABLE[value]) self.size += 1 + def _get_short(self, value: int_) -> bytes: + """Convert an unsigned short to 2 bytes.""" + return SHORT_LOOKUP[value] if value < SHORT_CACHE_MAX else PACK_SHORT(value) + def _insert_short_at_start(self, value: int_) -> None: """Inserts an unsigned short at the start of the packet""" - self.data.insert(0, PACK_SHORT(value)) + self.data.insert(0, self._get_short(value)) def _replace_short(self, index: int_, value: int_) -> None: """Replaces an unsigned short in a certain position in the packet""" - self.data[index] = PACK_SHORT(value) + self.data[index] = self._get_short(value) def write_short(self, value: int_) -> None: """Writes an unsigned short to the packet""" - self.data.append(PACK_SHORT(value)) + self.data.append(self._get_short(value)) self.size += 2 def _write_int(self, value: Union[float, int]) -> None: @@ -323,10 +330,11 @@ def _write_question(self, question: DNSQuestion_) -> bool: def _write_record_class(self, record: Union[DNSQuestion_, DNSRecord_]) -> None: """Write out the record class including the unique/unicast (QU) bit.""" - if record.unique and self.multicast: - self.write_short(record.class_ | _CLASS_UNIQUE) + class_ = record.class_ + if record.unique is True and self.multicast is True: + self.write_short(class_ | _CLASS_UNIQUE) else: - self.write_short(record.class_) + self.write_short(class_) def _write_ttl(self, record: DNSRecord_, now: float_) -> None: """Write out the record ttl.""" @@ -417,21 +425,20 @@ def packets(self) -> List[bytes]: will be written out to a single oversized packet no more than _MAX_MSG_ABSOLUTE in length (and hence will be subject to IP fragmentation potentially).""" + packets_data = self.packets_data + if self.state == STATE_FINISHED: - return self.packets_data + return packets_data questions_offset = 0 answer_offset = 0 authority_offset = 0 additional_offset = 0 # we have to at least write out the question - first_time = True - debug_enable = LOGGING_IS_ENABLED_FOR(LOGGING_DEBUG) + debug_enable = LOGGING_IS_ENABLED_FOR(LOGGING_DEBUG) is True + has_more_to_add = True - while first_time or self._has_more_to_add( - questions_offset, answer_offset, authority_offset, additional_offset - ): - first_time = False + while has_more_to_add: if debug_enable: log.debug( "offsets = questions=%d, answers=%d, authorities=%d, additionals=%d", @@ -473,9 +480,11 @@ def packets(self) -> List[bytes]: additional_offset, ) - if self.is_query() and self._has_more_to_add( + has_more_to_add = self._has_more_to_add( questions_offset, answer_offset, authority_offset, additional_offset - ): + ) + + if has_more_to_add and self.is_query(): # https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 if debug_enable: # pragma: no branch log.debug("Setting TC flag") @@ -488,7 +497,7 @@ def packets(self) -> List[bytes]: else: self._insert_short_at_start(self.id) - self.packets_data.append(b''.join(self.data)) + packets_data.append(b''.join(self.data)) if not made_progress: # Generating an empty packet is not a desirable outcome, but currently @@ -498,7 +507,8 @@ def packets(self) -> List[bytes]: log.warning("packets() made no progress adding records; returning") break - self._reset_for_next_packet() + if has_more_to_add: + self._reset_for_next_packet() self.state = STATE_FINISHED - return self.packets_data + return packets_data From bf2cfdedcc8fadbefe3fa88097791aeb5bf4ffe3 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 13 Nov 2023 19:36:14 +0000 Subject: [PATCH 176/434] 0.126.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b24e6b8b..c035860d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.126.0 (2023-11-13) + +### Feature + +* Speed up outgoing packet writer ([#1313](https://github.com/python-zeroconf/python-zeroconf/issues/1313)) ([`55cf4cc`](https://github.com/python-zeroconf/python-zeroconf/commit/55cf4ccdff886a136db4e2133d3e6cdd001a8bd6)) +* Speed up writing name compression for outgoing packets ([#1312](https://github.com/python-zeroconf/python-zeroconf/issues/1312)) ([`9caeabb`](https://github.com/python-zeroconf/python-zeroconf/commit/9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b)) + ## v0.125.0 (2023-11-12) ### Feature diff --git a/pyproject.toml b/pyproject.toml index b8b771d2..50a29bbe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.125.0" +version = "0.126.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 0db4cab3..0334de40 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.125.0' +__version__ = '0.126.0' __license__ = 'LGPL' From bfe4c24881a7259713425df5ab00ffe487518841 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Nov 2023 16:58:40 -0600 Subject: [PATCH 177/434] feat: small speed up to processing incoming dns records (#1315) --- src/zeroconf/_cache.pxd | 2 +- src/zeroconf/_dns.pxd | 62 ++++++++------- src/zeroconf/_dns.py | 6 +- src/zeroconf/_handlers/query_handler.py | 16 ++-- src/zeroconf/_handlers/record_manager.py | 6 +- src/zeroconf/_history.pxd | 2 +- src/zeroconf/_listener.py | 4 +- src/zeroconf/_protocol/incoming.pxd | 25 +++--- src/zeroconf/_protocol/incoming.py | 96 +++++++++++++++--------- src/zeroconf/_protocol/outgoing.py | 2 +- src/zeroconf/_services/browser.pxd | 2 +- src/zeroconf/_services/browser.py | 6 +- src/zeroconf/_services/info.pxd | 2 +- src/zeroconf/_services/info.py | 6 +- tests/test_protocol.py | 1 + 15 files changed, 136 insertions(+), 102 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 1f94c21e..ef1c1353 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -24,7 +24,7 @@ cdef class DNSCache: cdef public cython.dict cache cdef public cython.dict service_cache - cpdef async_add_records(self, object entries) + cpdef bint async_add_records(self, object entries) cpdef async_remove_records(self, object entries) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 6785d1a3..255181f8 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -4,19 +4,21 @@ import cython from ._protocol.outgoing cimport DNSOutgoing -cdef object _LEN_BYTE -cdef object _LEN_SHORT -cdef object _LEN_INT +cdef cython.uint _LEN_BYTE +cdef cython.uint _LEN_SHORT +cdef cython.uint _LEN_INT -cdef object _NAME_COMPRESSION_MIN_SIZE -cdef object _BASE_MAX_SIZE +cdef cython.uint _NAME_COMPRESSION_MIN_SIZE +cdef cython.uint _BASE_MAX_SIZE cdef cython.uint _EXPIRE_FULL_TIME_MS cdef cython.uint _EXPIRE_STALE_TIME_MS cdef cython.uint _RECENT_TIME_MS -cdef object _CLASS_UNIQUE -cdef object _CLASS_MASK +cdef cython.uint _TYPE_ANY + +cdef cython.uint _CLASS_UNIQUE +cdef cython.uint _CLASS_MASK cdef object current_time_millis @@ -25,36 +27,40 @@ cdef class DNSEntry: cdef public str key cdef public str name cdef public cython.uint type - cdef public object class_ - cdef public object unique + cdef public cython.uint class_ + cdef public bint unique + + cdef _set_class(self, cython.uint class_) - cdef _dns_entry_matches(self, DNSEntry other) + cdef bint _dns_entry_matches(self, DNSEntry other) cdef class DNSQuestion(DNSEntry): cdef public cython.int _hash + cpdef bint answered_by(self, DNSRecord rec) + cdef class DNSRecord(DNSEntry): cdef public cython.float ttl cdef public cython.float created - cdef _suppressed_by_answer(self, DNSRecord answer) + cdef bint _suppressed_by_answer(self, DNSRecord answer) @cython.locals( answers=cython.list, ) - cpdef suppressed_by(self, object msg) + cpdef bint suppressed_by(self, object msg) cpdef get_remaining_ttl(self, cython.float now) cpdef get_expiration_time(self, cython.uint percent) - cpdef is_expired(self, cython.float now) + cpdef bint is_expired(self, cython.float now) - cpdef is_stale(self, cython.float now) + cpdef bint is_stale(self, cython.float now) - cpdef is_recent(self, cython.float now) + cpdef bint is_recent(self, cython.float now) cpdef reset_ttl(self, DNSRecord other) @@ -66,7 +72,7 @@ cdef class DNSAddress(DNSRecord): cdef public object address cdef public object scope_id - cdef _eq(self, DNSAddress other) + cdef bint _eq(self, DNSAddress other) cpdef write(self, DNSOutgoing out) @@ -74,10 +80,10 @@ cdef class DNSAddress(DNSRecord): cdef class DNSHinfo(DNSRecord): cdef public cython.int _hash - cdef public object cpu - cdef public object os + cdef public str cpu + cdef public str os - cdef _eq(self, DNSHinfo other) + cdef bint _eq(self, DNSHinfo other) cpdef write(self, DNSOutgoing out) @@ -87,29 +93,29 @@ cdef class DNSPointer(DNSRecord): cdef public str alias cdef public str alias_key - cdef _eq(self, DNSPointer other) + cdef bint _eq(self, DNSPointer other) cpdef write(self, DNSOutgoing out) cdef class DNSText(DNSRecord): cdef public cython.int _hash - cdef public object text + cdef public bytes text - cdef _eq(self, DNSText other) + cdef bint _eq(self, DNSText other) cpdef write(self, DNSOutgoing out) cdef class DNSService(DNSRecord): cdef public cython.int _hash - cdef public object priority - cdef public object weight - cdef public object port + cdef public cython.uint priority + cdef public cython.uint weight + cdef public cython.uint port cdef public str server cdef public str server_key - cdef _eq(self, DNSService other) + cdef bint _eq(self, DNSService other) cpdef write(self, DNSOutgoing out) @@ -119,7 +125,7 @@ cdef class DNSNsec(DNSRecord): cdef public object next_name cdef public cython.list rdtypes - cdef _eq(self, DNSNsec other) + cdef bint _eq(self, DNSNsec other) cpdef write(self, DNSOutgoing out) @@ -129,7 +135,7 @@ cdef class DNSRRSet: cdef cython.dict _lookup @cython.locals(other=DNSRecord) - cpdef suppresses(self, DNSRecord record) + cpdef bint suppresses(self, DNSRecord record) @cython.locals( record=DNSRecord, diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 0b43f410..3e9f074a 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -67,10 +67,13 @@ class DNSEntry: __slots__ = ('key', 'name', 'type', 'class_', 'unique') - def __init__(self, name: str, type_: _int, class_: _int) -> None: + def __init__(self, name: str, type_: int, class_: int) -> None: self.name = name self.key = name.lower() self.type = type_ + self._set_class(class_) + + def _set_class(self, class_: _int) -> None: self.class_ = class_ & _CLASS_MASK self.unique = (class_ & _CLASS_UNIQUE) != 0 @@ -371,7 +374,6 @@ class DNSText(DNSRecord): def __init__( self, name: str, type_: int, class_: int, ttl: int, text: bytes, created: Optional[float] = None ) -> None: - assert isinstance(text, (bytes, type(None))) super().__init__(name, type_, class_, ttl, created) self.text = text self._hash = hash((self.key, type_, self.class_, text)) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 0af72f4c..c66d9c30 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -167,7 +167,7 @@ def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry is not None and maybe_entry.is_recent(self._now) is True) + return bool(maybe_entry is not None and maybe_entry.is_recent(self._now)) def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: """Check if an answer was seen in the last second. @@ -202,7 +202,7 @@ def _add_service_type_enumeration_query_answers( dns_pointer = DNSPointer( _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, 0.0 ) - if known_answers.suppresses(dns_pointer) is False: + if not known_answers.suppresses(dns_pointer): answer_set[dns_pointer] = set() def _add_pointer_answers( @@ -213,7 +213,7 @@ def _add_pointer_answers( # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.1. dns_pointer = service._dns_pointer(None) - if known_answers.suppresses(dns_pointer) is True: + if known_answers.suppresses(dns_pointer): continue answer_set[dns_pointer] = { service._dns_service(None), @@ -237,7 +237,7 @@ def _add_address_answers( seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) - elif known_answers.suppresses(dns_address) is False: + elif not known_answers.suppresses(dns_address): answers.append(dns_address) missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types if answers: @@ -272,12 +272,12 @@ def _answer_question( # https://tools.ietf.org/html/rfc6763#section-12.2. service = services[0] dns_service = service._dns_service(None) - if known_answers.suppresses(dns_service) is False: + if not known_answers.suppresses(dns_service): answer_set[dns_service] = service._get_address_and_nsec_records(None) elif strategy_type == _ANSWER_STRATEGY_TEXT: # pragma: no branch service = services[0] dns_text = service._dns_text(None) - if known_answers.suppresses(dns_text) is False: + if not known_answers.suppresses(dns_text): answer_set[dns_text] = set() return answer_set @@ -307,7 +307,7 @@ def async_response( # pylint: disable=unused-argument # at least one answer strategy answers: List[DNSRecord] = [] for msg in msgs: - if msg.is_probe() is True: + if msg.is_probe(): is_probe = True else: answers.extend(msg.answers()) @@ -319,7 +319,7 @@ def async_response( # pylint: disable=unused-argument now = msg.now for strategy in strategies: question = strategy.question - is_unicast = question.unique is True # unique and unicast are the same flag + is_unicast = question.unique # unique and unicast are the same flag if not is_unicast: if known_answers_set is None: # pragma: no branch known_answers_set = known_answers.lookup_set() diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 6fb11f55..cbf88abd 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -106,14 +106,14 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: ) record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) - if record.unique is True: # https://tools.ietf.org/html/rfc6762#section-10.2 + if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 unique_types.add((record.name, record_type, record.class_)) if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = cache.async_get_unique(record) - if record.is_expired(now_float) is False: + if not record.is_expired(now_float): if maybe_entry is not None: maybe_entry.reset_ttl(record) else: @@ -151,7 +151,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: new = False if other_adds or address_adds: new = cache.async_add_records(address_adds) - if cache.async_add_records(other_adds) is True: + if cache.async_add_records(other_adds): new = True # Removes are processed last since # ServiceInfo could generate an un-needed query diff --git a/src/zeroconf/_history.pxd b/src/zeroconf/_history.pxd index d4e1c833..c1ff7619 100644 --- a/src/zeroconf/_history.pxd +++ b/src/zeroconf/_history.pxd @@ -12,7 +12,7 @@ cdef class QuestionHistory: cpdef add_question_at_time(self, DNSQuestion question, float now, cython.set known_answers) @cython.locals(than=cython.double, previous_question=cython.tuple, previous_known_answers=cython.set) - cpdef suppresses(self, DNSQuestion question, cython.double now, cython.set known_answers) + cpdef bint suppresses(self, DNSQuestion question, cython.double now, cython.set known_answers) @cython.locals(than=cython.double, now_known_answers=cython.tuple) cpdef async_expire(self, cython.double now) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 700029e1..23d24578 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -113,7 +113,7 @@ def _process_datagram_at_time( self.data == data and (now - _DUPLICATE_PACKET_SUPPRESSION_INTERVAL) < self.last_time and self.last_message is not None - and self.last_message.has_qu_question() is False + and not self.last_message.has_qu_question() ): # Guard against duplicate packets if debug: @@ -169,7 +169,7 @@ def _process_datagram_at_time( ) return - if msg.is_query() is False: + if not msg.is_query(): self._record_manager.async_updates_from_response(msg) return diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index c39ab9a6..3bfc57f2 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -52,32 +52,33 @@ cdef class DNSIncoming: cdef public bytes data cdef const unsigned char [:] view cdef unsigned int _data_len - cdef public cython.dict name_cache - cdef public cython.list questions + cdef cython.dict _name_cache + cdef cython.list _questions cdef cython.list _answers - cdef public object id - cdef public cython.uint num_questions - cdef public cython.uint num_answers - cdef public cython.uint num_authorities - cdef public cython.uint num_additionals - cdef public object valid + cdef public cython.uint id + cdef cython.uint _num_questions + cdef cython.uint _num_answers + cdef cython.uint _num_authorities + cdef cython.uint _num_additionals + cdef public bint valid cdef public object now cdef cython.float _now_float cdef public object scope_id cdef public object source + cdef bint _has_qu_question @cython.locals( question=DNSQuestion ) - cpdef has_qu_question(self) + cpdef bint has_qu_question(self) - cpdef is_query(self) + cpdef bint is_query(self) - cpdef is_probe(self) + cpdef bint is_probe(self) cpdef answers(self) - cpdef is_response(self) + cpdef bint is_response(self) @cython.locals( off=cython.uint, diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 6a7451e7..fd5fafb6 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -80,19 +80,20 @@ class DNSIncoming: 'data', 'view', '_data_len', - 'name_cache', - 'questions', + '_name_cache', + '_questions', '_answers', 'id', - 'num_questions', - 'num_answers', - 'num_authorities', - 'num_additionals', + '_num_questions', + '_num_answers', + '_num_authorities', + '_num_additionals', 'valid', 'now', '_now_float', 'scope_id', 'source', + '_has_qu_question', ) def __init__( @@ -108,20 +109,21 @@ def __init__( self.data = data self.view = data self._data_len = len(data) - self.name_cache: Dict[int, List[str]] = {} - self.questions: List[DNSQuestion] = [] + self._name_cache: Dict[int, List[str]] = {} + self._questions: List[DNSQuestion] = [] self._answers: List[DNSRecord] = [] self.id = 0 - self.num_questions = 0 - self.num_answers = 0 - self.num_authorities = 0 - self.num_additionals = 0 + self._num_questions = 0 + self._num_answers = 0 + self._num_authorities = 0 + self._num_additionals = 0 self.valid = False self._did_read_others = False self.now = now or current_time_millis() self._now_float = self.now self.source = source self.scope_id = scope_id + self._has_qu_question = False try: self._initial_parse() except DECODE_EXCEPTIONS: @@ -142,24 +144,43 @@ def is_response(self) -> bool: def has_qu_question(self) -> bool: """Returns true if any question is a QU question.""" - if not self.num_questions: - return False - for question in self.questions: - # QU questions use the same bit as unique - if question.unique: - return True - return False + return self._has_qu_question @property def truncated(self) -> bool: """Returns true if this is a truncated.""" return (self.flags & _FLAGS_TC) == _FLAGS_TC + @property + def questions(self) -> List[DNSQuestion]: + """Questions in the packet.""" + return self._questions + + @property + def num_questions(self) -> int: + """Number of questions in the packet.""" + return self._num_questions + + @property + def num_answers(self) -> int: + """Number of answers in the packet.""" + return self._num_answers + + @property + def num_authorities(self) -> int: + """Number of authorities in the packet.""" + return self._num_authorities + + @property + def num_additionals(self) -> int: + """Number of additionals in the packet.""" + return self._num_additionals + def _initial_parse(self) -> None: """Parse the data needed to initalize the packet object.""" self._read_header() self._read_questions() - if not self.num_questions: + if not self._num_questions: self._read_others() self.valid = True @@ -190,7 +211,7 @@ def answers(self) -> List[DNSRecord]: def is_probe(self) -> bool: """Returns true if this is a probe.""" - return self.num_authorities > 0 + return self._num_authorities > 0 def __repr__(self) -> str: return '' % ', '.join( @@ -198,11 +219,11 @@ def __repr__(self) -> str: 'id=%s' % self.id, 'flags=%s' % self.flags, 'truncated=%s' % self.truncated, - 'n_q=%s' % self.num_questions, - 'n_ans=%s' % self.num_answers, - 'n_auth=%s' % self.num_authorities, - 'n_add=%s' % self.num_additionals, - 'questions=%s' % self.questions, + 'n_q=%s' % self._num_questions, + 'n_ans=%s' % self._num_answers, + 'n_auth=%s' % self._num_authorities, + 'n_add=%s' % self._num_additionals, + 'questions=%s' % self._questions, 'answers=%s' % self.answers(), ] ) @@ -212,21 +233,24 @@ def _read_header(self) -> None: ( self.id, self.flags, - self.num_questions, - self.num_answers, - self.num_authorities, - self.num_additionals, + self._num_questions, + self._num_answers, + self._num_authorities, + self._num_additionals, ) = UNPACK_6H(self.data) self.offset += 12 def _read_questions(self) -> None: """Reads questions section of packet""" - for _ in range(self.num_questions): + questions = self._questions + for _ in range(self._num_questions): name = self._read_name() type_, class_ = UNPACK_HH(self.data, self.offset) self.offset += 4 question = DNSQuestion(name, type_, class_) - self.questions.append(question) + if question.unique: # QU questions use the same bit as unique + self._has_qu_question = True + questions.append(question) def _read_character_string(self) -> str: """Reads a character string from the packet""" @@ -246,7 +270,7 @@ def _read_others(self) -> None: """Reads the answers, authorities and additionals section of the packet""" self._did_read_others = True - n = self.num_answers + self.num_authorities + self.num_additionals + n = self._num_answers + self._num_authorities + self._num_additionals for _ in range(n): domain = self._read_name() type_, class_, ttl, length = UNPACK_HHiH(self.data, self.offset) @@ -352,7 +376,7 @@ def _read_name(self) -> str: seen_pointers: Set[int] = set() original_offset = self.offset self.offset = self._decode_labels_at_offset(original_offset, labels, seen_pointers) - self.name_cache[original_offset] = labels + self._name_cache[original_offset] = labels name = ".".join(labels) + "." if len(name) > MAX_NAME_LENGTH: raise IncomingDecodeError( @@ -394,12 +418,12 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: raise IncomingDecodeError( f"DNS compression pointer at {off} was seen again from {self.source}" ) - linked_labels = self.name_cache.get(link_py_int) + linked_labels = self._name_cache.get(link_py_int) if not linked_labels: linked_labels = [] seen_pointers.add(link_py_int) self._decode_labels_at_offset(link, linked_labels, seen_pointers) - self.name_cache[link_py_int] = linked_labels + self._name_cache[link_py_int] = linked_labels labels.extend(linked_labels) if len(labels) > MAX_DNS_LABELS: raise IncomingDecodeError( diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index d3b47ae6..0438cc83 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -331,7 +331,7 @@ def _write_question(self, question: DNSQuestion_) -> bool: def _write_record_class(self, record: Union[DNSQuestion_, DNSRecord_]) -> None: """Write out the record class including the unique/unicast (QU) bit.""" class_ = record.class_ - if record.unique is True and self.multicast is True: + if record.unique is True and self.multicast: self.write_short(class_ | _CLASS_UNIQUE) else: self.write_short(class_) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 0cd0aeea..25c0f584 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -25,7 +25,7 @@ cdef class _DNSPointerOutgoingBucket: cpdef add(self, cython.uint max_compressed_size, DNSQuestion question, cython.set answers) -@cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord) +@cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord, qu_question=bint) cpdef generate_service_query( object zc, float now, diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index c69076f3..ca8c9aa5 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -177,7 +177,7 @@ def generate_service_query( known_answers = { record for record in cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) - if record.is_stale(now) is False + if not record.is_stale(now) } if not qu_question and question_history.suppresses(question, now, known_answers): log.debug("Asking %s was suppressed by the question history", question) @@ -187,7 +187,7 @@ def generate_service_query( else: pointer_known_answers = known_answers questions_with_known_answers[question] = pointer_known_answers - if qu_question is False: + if not qu_question: question_history.add_question_at_time(question, now, known_answers) return _group_ptr_queries_with_known_answers(now, multicast, questions_with_known_answers) @@ -440,7 +440,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record continue # If its expired or already exists in the cache it cannot be updated. - if old_record is not None or record.is_expired(now) is True: + if old_record is not None or record.is_expired(now): continue if record_type in _ADDRESS_RECORD_TYPES: diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 0461bf00..b7977466 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -61,7 +61,7 @@ cdef class ServiceInfo(RecordUpdateListener): cpdef async_update_records(self, object zc, cython.float now, cython.list records) @cython.locals(cache=DNSCache) - cpdef _load_from_cache(self, object zc, cython.float now) + cpdef bint _load_from_cache(self, object zc, cython.float now) cdef _unpack_text_into_properties(self) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index fab6b410..fbf28af2 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -420,7 +420,7 @@ def _get_ip_addresses_from_cache_lifo( """Set IPv6 addresses from the cache.""" address_list: List[Union[IPv4Address, IPv6Address]] = [] for record in self._get_address_records_from_cache_by_type(zc, type): - if record.is_expired(now) is True: + if record.is_expired(now): continue ip_addr = _cached_ip_addresses_wrapper(record.address) if ip_addr is not None: @@ -463,7 +463,7 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo Returns True if a new record was added. """ - if record.is_expired(now) is True: + if record.is_expired(now): return False record_key = record.key @@ -779,7 +779,7 @@ async def async_request( now = current_time_millis() - if self._load_from_cache(zc, now) is True: + if self._load_from_cache(zc, now): return True if TYPE_CHECKING: diff --git a/tests/test_protocol.py b/tests/test_protocol.py index a8593850..0a853104 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -316,6 +316,7 @@ def test_massive_probe_packet_split(self): parsed1 = r.DNSIncoming(packets[0]) assert parsed1.questions[0].unicast is True assert len(parsed1.questions) == 30 + assert parsed1.num_questions == 30 assert parsed1.num_authorities == 88 assert parsed1.truncated parsed2 = r.DNSIncoming(packets[1]) From 0d60b61538a5d4b6f44b2369333b6e916a0a55b4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 14 Nov 2023 23:41:35 -0600 Subject: [PATCH 178/434] feat: speed up incoming packet reader (#1314) --- src/zeroconf/_dns.py | 1 - src/zeroconf/_protocol/incoming.pxd | 48 +++++++++++----------- src/zeroconf/_protocol/incoming.py | 64 ++++++++++++++++------------- 3 files changed, 59 insertions(+), 54 deletions(-) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 3e9f074a..4ca429a8 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -244,7 +244,6 @@ def __init__( class_: int, ttl: int, address: bytes, - *, scope_id: Optional[int] = None, created: Optional[float] = None, ) -> None: diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index 3bfc57f2..07ae6e78 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -21,11 +21,6 @@ cdef cython.uint _FLAGS_TC cdef cython.uint _FLAGS_QR_QUERY cdef cython.uint _FLAGS_QR_RESPONSE -cdef object UNPACK_3H -cdef object UNPACK_6H -cdef object UNPACK_HH -cdef object UNPACK_HHiH - cdef object DECODE_EXCEPTIONS cdef object IncomingDecodeError @@ -62,7 +57,6 @@ cdef class DNSIncoming: cdef cython.uint _num_additionals cdef public bint valid cdef public object now - cdef cython.float _now_float cdef public object scope_id cdef public object source cdef bint _has_qu_question @@ -81,49 +75,53 @@ cdef class DNSIncoming: cpdef bint is_response(self) @cython.locals( - off=cython.uint, - label_idx=cython.uint, - length=cython.uint, - link=cython.uint, - link_data=cython.uint, + off="unsigned int", + label_idx="unsigned int", + length="unsigned int", + link="unsigned int", + link_data="unsigned int", link_py_int=object, linked_labels=cython.list ) - cdef cython.uint _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) + cdef unsigned int _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) + @cython.locals(offset="unsigned int") cdef _read_header(self) cdef _initial_parse(self) @cython.locals( - end=cython.uint, - length=cython.uint + end="unsigned int", + length="unsigned int", + offset="unsigned int" ) cdef _read_others(self) + @cython.locals(offset="unsigned int") cdef _read_questions(self) @cython.locals( - length=cython.uint, + length="unsigned int", ) cdef str _read_character_string(self) cdef bytes _read_string(self, unsigned int length) @cython.locals( - name_start=cython.uint + name_start="unsigned int", + offset="unsigned int" ) - cdef _read_record(self, object domain, unsigned int type_, object class_, object ttl, unsigned int length) + cdef _read_record(self, object domain, unsigned int type_, unsigned int class_, unsigned int ttl, unsigned int length) @cython.locals( - offset=cython.uint, - offset_plus_one=cython.uint, - offset_plus_two=cython.uint, - window=cython.uint, - bit=cython.uint, - byte=cython.uint, - i=cython.uint, - bitmap_length=cython.uint, + offset="unsigned int", + offset_plus_one="unsigned int", + offset_plus_two="unsigned int", + window="unsigned int", + bit="unsigned int", + byte="unsigned int", + i="unsigned int", + bitmap_length="unsigned int", ) cdef _read_bitmap(self, unsigned int end) diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index fd5fafb6..9e208b63 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -60,10 +60,6 @@ DECODE_EXCEPTIONS = (IndexError, struct.error, IncomingDecodeError) -UNPACK_3H = struct.Struct(b'!3H').unpack_from -UNPACK_6H = struct.Struct(b'!6H').unpack_from -UNPACK_HH = struct.Struct(b'!HH').unpack_from -UNPACK_HHiH = struct.Struct(b'!HHiH').unpack_from _seen_logs: Dict[str, Union[int, tuple]] = {} _str = str @@ -90,7 +86,6 @@ class DNSIncoming: '_num_additionals', 'valid', 'now', - '_now_float', 'scope_id', 'source', '_has_qu_question', @@ -120,7 +115,6 @@ def __init__( self.valid = False self._did_read_others = False self.now = now or current_time_millis() - self._now_float = self.now self.source = source self.scope_id = scope_id self._has_qu_question = False @@ -230,23 +224,28 @@ def __repr__(self) -> str: def _read_header(self) -> None: """Reads header portion of packet""" - ( - self.id, - self.flags, - self._num_questions, - self._num_answers, - self._num_authorities, - self._num_additionals, - ) = UNPACK_6H(self.data) + view = self.view + offset = self.offset self.offset += 12 + # The header has 6 unsigned shorts in network order + self.id = view[offset] << 8 | view[offset + 1] + self.flags = view[offset + 2] << 8 | view[offset + 3] + self._num_questions = view[offset + 4] << 8 | view[offset + 5] + self._num_answers = view[offset + 6] << 8 | view[offset + 7] + self._num_authorities = view[offset + 8] << 8 | view[offset + 9] + self._num_additionals = view[offset + 10] << 8 | view[offset + 11] def _read_questions(self) -> None: """Reads questions section of packet""" + view = self.view questions = self._questions for _ in range(self._num_questions): name = self._read_name() - type_, class_ = UNPACK_HH(self.data, self.offset) + offset = self.offset self.offset += 4 + # The question has 2 unsigned shorts in network order + type_ = view[offset] << 8 | view[offset + 1] + class_ = view[offset + 2] << 8 | view[offset + 3] question = DNSQuestion(name, type_, class_) if question.unique: # QU questions use the same bit as unique self._has_qu_question = True @@ -270,11 +269,18 @@ def _read_others(self) -> None: """Reads the answers, authorities and additionals section of the packet""" self._did_read_others = True + view = self.view n = self._num_answers + self._num_authorities + self._num_additionals for _ in range(n): domain = self._read_name() - type_, class_, ttl, length = UNPACK_HHiH(self.data, self.offset) + offset = self.offset self.offset += 10 + # type_, class_ and length are unsigned shorts in network order + # ttl is an unsigned long in network order https://www.rfc-editor.org/errata/eid2130 + type_ = view[offset] << 8 | view[offset + 1] + class_ = view[offset + 2] << 8 | view[offset + 3] + ttl = view[offset + 4] << 24 | view[offset + 5] << 16 | view[offset + 6] << 8 | view[offset + 7] + length = view[offset + 8] << 8 | view[offset + 9] end = self.offset + length rec = None try: @@ -300,16 +306,19 @@ def _read_record( ) -> Optional[DNSRecord]: """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: - dns_address = DNSAddress(domain, type_, class_, ttl, self._read_string(4)) - dns_address.created = self._now_float - return dns_address + return DNSAddress(domain, type_, class_, ttl, self._read_string(4), None, self.now) if type_ in (_TYPE_CNAME, _TYPE_PTR): return DNSPointer(domain, type_, class_, ttl, self._read_name(), self.now) if type_ == _TYPE_TXT: return DNSText(domain, type_, class_, ttl, self._read_string(length), self.now) if type_ == _TYPE_SRV: - priority, weight, port = UNPACK_3H(self.data, self.offset) + view = self.view + offset = self.offset self.offset += 6 + # The SRV record has 3 unsigned shorts in network order + priority = view[offset] << 8 | view[offset + 1] + weight = view[offset + 2] << 8 | view[offset + 3] + port = view[offset + 4] << 8 | view[offset + 5] return DNSService( domain, type_, @@ -332,10 +341,7 @@ def _read_record( self.now, ) if type_ == _TYPE_AAAA: - dns_address = DNSAddress(domain, type_, class_, ttl, self._read_string(16)) - dns_address.created = self._now_float - dns_address.scope_id = self.scope_id - return dns_address + return DNSAddress(domain, type_, class_, ttl, self._read_string(16), self.scope_id, self.now) if type_ == _TYPE_NSEC: name_start = self.offset return DNSNsec( @@ -356,12 +362,13 @@ def _read_record( def _read_bitmap(self, end: _int) -> List[int]: """Reads an NSEC bitmap from the packet.""" rdtypes = [] + view = self.view while self.offset < end: offset = self.offset offset_plus_one = offset + 1 offset_plus_two = offset + 2 - window = self.view[offset] - bitmap_length = self.view[offset_plus_one] + window = view[offset] + bitmap_length = view[offset_plus_one] bitmap_end = offset_plus_two + bitmap_length for i, byte in enumerate(self.data[offset_plus_two:bitmap_end]): for bit in range(0, 8): @@ -386,8 +393,9 @@ def _read_name(self) -> str: def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: Set[int]) -> int: # This is a tight loop that is called frequently, small optimizations can make a difference. + view = self.view while off < self._data_len: - length = self.view[off] + length = view[off] if length == 0: return off + DNS_COMPRESSION_HEADER_LEN @@ -403,7 +411,7 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: ) # We have a DNS compression pointer - link_data = self.view[off + 1] + link_data = view[off + 1] link = (length & 0x3F) * 256 + link_data link_py_int = link if link > self._data_len: From cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 15 Nov 2023 00:27:46 -0600 Subject: [PATCH 179/434] feat: small speed up to writing outgoing packets (#1316) --- src/zeroconf/_protocol/outgoing.pxd | 5 +++-- src/zeroconf/_protocol/outgoing.py | 10 +++++++++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 0f757af8..52237f09 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -32,6 +32,7 @@ cdef object LOGGING_DEBUG cdef cython.tuple BYTE_TABLE cdef cython.tuple SHORT_LOOKUP +cdef cython.dict LONG_LOOKUP cdef class DNSOutgoing: @@ -70,7 +71,7 @@ cdef class DNSOutgoing: index=cython.uint, length=cython.uint ) - cdef cython.bint _write_record(self, DNSRecord record, object now) + cdef cython.bint _write_record(self, DNSRecord record, float now) @cython.locals(class_=cython.uint) cdef _write_record_class(self, DNSEntry record) @@ -91,7 +92,7 @@ cdef class DNSOutgoing: cdef bint _has_more_to_add(self, unsigned int questions_offset, unsigned int answer_offset, unsigned int authority_offset, unsigned int additional_offset) - cdef _write_ttl(self, DNSRecord record, object now) + cdef _write_ttl(self, DNSRecord record, float now) @cython.locals( labels=cython.list, diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 0438cc83..e421681c 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -31,6 +31,8 @@ from .._logger import log from ..const import ( _CLASS_UNIQUE, + _DNS_HOST_TTL, + _DNS_OTHER_TTL, _DNS_PACKET_HEADER_LEN, _FLAGS_QR_MASK, _FLAGS_QR_QUERY, @@ -57,6 +59,7 @@ BYTE_TABLE = tuple(PACK_BYTE(i) for i in range(256)) SHORT_LOOKUP = tuple(PACK_SHORT(i) for i in range(SHORT_CACHE_MAX)) +LONG_LOOKUP = {i: PACK_LONG(i) for i in (_DNS_OTHER_TTL, _DNS_HOST_TTL, 0)} class State(enum.Enum): @@ -242,7 +245,12 @@ def write_short(self, value: int_) -> None: def _write_int(self, value: Union[float, int]) -> None: """Writes an unsigned integer to the packet""" - self.data.append(PACK_LONG(int(value))) + value_as_int = int(value) + long_bytes = LONG_LOOKUP.get(value_as_int) + if long_bytes is not None: + self.data.append(long_bytes) + else: + self.data.append(PACK_LONG(value_as_int)) self.size += 4 def write_string(self, value: bytes_) -> None: From 1b5cc2459359db9b08782ae7f75e7914ab0e1bf0 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 15 Nov 2023 06:36:48 +0000 Subject: [PATCH 180/434] 0.127.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c035860d..dba51f2c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ +## v0.127.0 (2023-11-15) + +### Feature + +* Small speed up to writing outgoing packets ([#1316](https://github.com/python-zeroconf/python-zeroconf/issues/1316)) ([`cd28476`](https://github.com/python-zeroconf/python-zeroconf/commit/cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65)) +* Speed up incoming packet reader ([#1314](https://github.com/python-zeroconf/python-zeroconf/issues/1314)) ([`0d60b61`](https://github.com/python-zeroconf/python-zeroconf/commit/0d60b61538a5d4b6f44b2369333b6e916a0a55b4)) +* Small speed up to processing incoming dns records ([#1315](https://github.com/python-zeroconf/python-zeroconf/issues/1315)) ([`bfe4c24`](https://github.com/python-zeroconf/python-zeroconf/commit/bfe4c24881a7259713425df5ab00ffe487518841)) + ## v0.126.0 (2023-11-13) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 50a29bbe..02990c46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.126.0" +version = "0.127.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 0334de40..b04da841 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.126.0' +__version__ = '0.127.0' __license__ = 'LGPL' From 72fed787df295016f705ee43c6901a8277b43df7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Nov 2023 14:44:10 -0600 Subject: [PATCH 181/434] chore: add benchmark to create and destroy an instance (#1317) --- bench/create_destory.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 bench/create_destory.py diff --git a/bench/create_destory.py b/bench/create_destory.py new file mode 100644 index 00000000..f1941423 --- /dev/null +++ b/bench/create_destory.py @@ -0,0 +1,23 @@ +"""Benchmark for AsyncZeroconf.""" +import asyncio +import time + +from zeroconf.asyncio import AsyncZeroconf + +iterations = 10000 + + +async def _create_destroy(count: int) -> None: + for _ in range(count): + async with AsyncZeroconf() as zc: + await zc.zeroconf.async_wait_for_start() + + +async def _run() -> None: + start = time.perf_counter() + await _create_destroy(iterations) + duration = time.perf_counter() - start + print(f"Creating and destroying {iterations} Zeroconf instances took {duration} seconds") + + +asyncio.run(_run()) From a20084281e66bdb9c37183a5eb992435f5b866ac Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 2 Dec 2023 13:16:45 -1000 Subject: [PATCH 182/434] feat: speed up unpacking TXT record data in ServiceInfo (#1318) --- bench/txt_properties.py | 22 ++++++++++++++++++++++ src/zeroconf/_services/info.pxd | 3 ++- src/zeroconf/_services/info.py | 19 +++++++++---------- 3 files changed, 33 insertions(+), 11 deletions(-) create mode 100644 bench/txt_properties.py diff --git a/bench/txt_properties.py b/bench/txt_properties.py new file mode 100644 index 00000000..792d5312 --- /dev/null +++ b/bench/txt_properties.py @@ -0,0 +1,22 @@ +import timeit + +from zeroconf import ServiceInfo + +info = ServiceInfo( + "_test._tcp.local.", + "test._test._tcp.local.", + properties=( + b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" + b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==" + ), +) + + +def process_properties() -> None: + info._properties = None + info.properties + + +count = 100000 +time = timeit.Timer(process_properties).timeit(count) +print(f"Processing {count} properties took {time} seconds") diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index b7977466..3506c3a9 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -63,7 +63,8 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(cache=DNSCache) cpdef bint _load_from_cache(self, object zc, cython.float now) - cdef _unpack_text_into_properties(self) + @cython.locals(length="unsigned char", index="unsigned int", key_value=bytes, key_sep_value=tuple) + cdef void _unpack_text_into_properties(self) cdef _set_properties(self, cython.dict properties) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index fbf28af2..f363b55b 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -388,27 +388,26 @@ def _set_text(self, text: bytes) -> None: def _unpack_text_into_properties(self) -> None: """Unpacks the text field into properties""" text = self.text - if not text: + end = len(text) + if end == 0: # Properties should be set atomically # in case another thread is reading them self._properties = {} return index = 0 - pairs: List[bytes] = [] - end = len(text) + properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]] = {} while index < end: length = text[index] index += 1 - pairs.append(text[index : index + length]) + key_value = text[index : index + length] + key_sep_value = key_value.partition(b'=') + key = key_sep_value[0] + if key not in properties: + properties[key] = key_sep_value[2] or None index += length - # Reverse the list so that the first item in the list - # is the last item in the text field. This is important - # to preserve backwards compatibility where the first - # key always wins if the key is seen multiple times. - pairs.reverse() - self._properties = {key: value or None for key, _, value in (pair.partition(b'=') for pair in pairs)} + self._properties = properties def get_name(self) -> str: """Name accessor""" From 1c2f194dd265eeee8d41cc3e3aa6fcbbcff0b0c5 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 2 Dec 2023 23:25:05 +0000 Subject: [PATCH 183/434] 0.128.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dba51f2c..8233562a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.128.0 (2023-12-02) + +### Feature + +* Speed up unpacking TXT record data in ServiceInfo ([#1318](https://github.com/python-zeroconf/python-zeroconf/issues/1318)) ([`a200842`](https://github.com/python-zeroconf/python-zeroconf/commit/a20084281e66bdb9c37183a5eb992435f5b866ac)) + ## v0.127.0 (2023-11-15) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 02990c46..4bfe3a82 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.127.0" +version = "0.128.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b04da841..b994698b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.127.0' +__version__ = '0.128.0' __license__ = 'LGPL' From 1682991b985b1f7b2bf0cff1a7eb7793070e7cb1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Dec 2023 10:38:27 -1000 Subject: [PATCH 184/434] fix: correct handling of IPv6 addresses with scope_id in ServiceInfo (#1322) --- examples/browser.py | 10 +++--- src/zeroconf/_services/info.pxd | 8 +++++ src/zeroconf/_services/info.py | 53 +++++++++++++++++++++-------- tests/services/test_info.py | 59 ++++++++++++++++++++++++++++++--- 4 files changed, 107 insertions(+), 23 deletions(-) diff --git a/examples/browser.py b/examples/browser.py index 60933e2a..a456a9eb 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -51,18 +51,18 @@ def on_service_state_change( parser.add_argument('--debug', action='store_true') parser.add_argument('--find', action='store_true', help='Browse all available services') version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6', action='store_true') version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument('--v4-only', action='store_true') args = parser.parse_args() if args.debug: logging.getLogger('zeroconf').setLevel(logging.DEBUG) - if args.v6: - ip_version = IPVersion.All - elif args.v6_only: + if args.v6_only: ip_version = IPVersion.V6Only - else: + elif args.v4_only: ip_version = IPVersion.V4Only + else: + ip_version = IPVersion.All zeroconf = Zeroconf(ip_version=ip_version) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 3506c3a9..b7a2ee30 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -32,6 +32,14 @@ cdef object _IPVersion_V4Only_value cdef cython.set _ADDRESS_RECORD_TYPES cdef bint TYPE_CHECKING +cdef bint IPADDRESS_SUPPORTS_SCOPE_ID + +cdef _get_ip_address_object_from_record(DNSAddress record) + +@cython.locals(address_str=str) +cdef _str_without_scope_id(object addr) + +cdef _ip_bytes_and_scope_to_address(object addr, object scope_id) cdef class ServiceInfo(RecordUpdateListener): diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index f363b55b..e9e25763 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -22,6 +22,7 @@ import asyncio import random +import sys from functools import lru_cache from ipaddress import IPv4Address, IPv6Address, _BaseAddress, ip_address from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast @@ -78,12 +79,15 @@ # the A/AAAA/SRV records for a host. _AVOID_SYNC_DELAY_RANDOM_INTERVAL = (20, 120) +bytes_ = bytes float_ = float int_ = int DNS_QUESTION_TYPE_QU = DNSQuestionType.QU DNS_QUESTION_TYPE_QM = DNSQuestionType.QM +IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + if TYPE_CHECKING: from .._core import Zeroconf @@ -110,6 +114,29 @@ def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4 _cached_ip_addresses_wrapper = _cached_ip_addresses +def _get_ip_address_object_from_record(record: DNSAddress) -> Optional[Union[IPv4Address, IPv6Address]]: + """Get the IP address object from the record.""" + if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id is not None: + return _ip_bytes_and_scope_to_address(record.address, record.scope_id) + return _cached_ip_addresses_wrapper(record.address) + + +def _ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: + """Convert the bytes and scope to an IP address object.""" + base_address = _cached_ip_addresses_wrapper(address) + if base_address is not None and base_address.is_link_local: + return _cached_ip_addresses_wrapper(f"{base_address}%{scope}") + return base_address + + +def _str_without_scope_id(addr: Union[IPv4Address, IPv6Address]) -> str: + """Return the string representation of the address without the scope id.""" + if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: + address_str = str(addr) + return address_str.partition('%')[0] + return str(addr) + + class ServiceInfo(RecordUpdateListener): """Service information. @@ -177,6 +204,7 @@ def __init__( raise TypeError("addresses and parsed_addresses cannot be provided together") if not type_.endswith(service_type_name(name, strict=False)): raise BadTypeInNameException + self.interface_index = interface_index self.text = b'' self.type = type_ self._name = name @@ -199,7 +227,6 @@ def __init__( self._set_properties(properties) self.host_ttl = host_ttl self.other_ttl = other_ttl - self.interface_index = interface_index self._new_records_futures: Optional[Set[asyncio.Future]] = None self._dns_address_cache: Optional[List[DNSAddress]] = None self._dns_pointer_cache: Optional[DNSPointer] = None @@ -243,7 +270,10 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - addr = _cached_ip_addresses_wrapper(address) + if IPADDRESS_SUPPORTS_SCOPE_ID and len(address) == 16 and self.interface_index is not None: + addr = _ip_bytes_and_scope_to_address(address, self.interface_index) + else: + addr = _cached_ip_addresses_wrapper(address) if addr is None: raise TypeError( "Addresses must either be IPv4 or IPv6 strings, bytes, or integers;" @@ -322,10 +352,10 @@ def ip_addresses_by_version( def _ip_addresses_by_version_value( self, version_value: int_ - ) -> Union[List[IPv4Address], List[IPv6Address], List[_BaseAddress]]: + ) -> Union[List[IPv4Address], List[IPv6Address]]: """Backend for addresses_by_version that uses the raw value.""" if version_value == _IPVersion_All_value: - return [*self._ipv4_addresses, *self._ipv6_addresses] + return [*self._ipv4_addresses, *self._ipv6_addresses] # type: ignore[return-value] if version_value == _IPVersion_V4Only_value: return self._ipv4_addresses return self._ipv6_addresses @@ -339,7 +369,7 @@ def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: This means the first address will always be the most recently added address of the given IP version. """ - return [str(addr) for addr in self._ip_addresses_by_version_value(version.value)] + return [_str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: """Equivalent to parsed_addresses, with the exception that IPv6 Link-Local @@ -351,12 +381,7 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st This means the first address will always be the most recently added address of the given IP version. """ - if self.interface_index is None: - return self.parsed_addresses(version) - return [ - f"{addr}%{self.interface_index}" if addr.version == 6 and addr.is_link_local else str(addr) - for addr in self._ip_addresses_by_version_value(version.value) - ] + return [str(addr) for addr in self._ip_addresses_by_version_value(version.value)] def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: """Sets properties and text of this info from a dictionary""" @@ -421,8 +446,8 @@ def _get_ip_addresses_from_cache_lifo( for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue - ip_addr = _cached_ip_addresses_wrapper(record.address) - if ip_addr is not None: + ip_addr = _get_ip_address_object_from_record(record) + if ip_addr is not None and ip_addr not in address_list: address_list.append(ip_addr) address_list.reverse() # Reverse to get LIFO order return address_list @@ -471,7 +496,7 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo dns_address_record = record if TYPE_CHECKING: assert isinstance(dns_address_record, DNSAddress) - ip_addr = _cached_ip_addresses_wrapper(dns_address_record.address) + ip_addr = _get_ip_address_object_from_record(dns_address_record) if ip_addr is None: log.warning( "Encountered invalid address while processing %s: %s", diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 7d437d23..482b3b0c 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -7,6 +7,7 @@ import logging import os import socket +import sys import threading import unittest from ipaddress import ip_address @@ -538,6 +539,7 @@ def test_multiple_addresses(): assert info.addresses == [address, address] assert info.parsed_addresses() == [address_parsed, address_parsed] assert info.parsed_scoped_addresses() == [address_parsed, address_parsed] + ipaddress_supports_scope_id = sys.version_info >= (3, 9, 0) if has_working_ipv6() and not os.environ.get('SKIP_IPV6'): address_v6_parsed = "2001:db8::1" @@ -576,14 +578,18 @@ def test_multiple_addresses(): assert info.ip_addresses_by_version(r.IPVersion.All) == [ ip_address(address), ip_address(address_v6), - ip_address(address_v6_ll), + ip_address(address_v6_ll_scoped_parsed) + if ipaddress_supports_scope_id + else ip_address(address_v6_ll), ] assert info.addresses_by_version(r.IPVersion.V4Only) == [address] assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ip_address(address)] assert info.addresses_by_version(r.IPVersion.V6Only) == [address_v6, address_v6_ll] assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ ip_address(address_v6), - ip_address(address_v6_ll), + ip_address(address_v6_ll_scoped_parsed) + if ipaddress_supports_scope_id + else ip_address(address_v6_ll), ] assert info.parsed_addresses() == [address_parsed, address_v6_parsed, address_v6_ll_parsed] assert info.parsed_addresses(r.IPVersion.V4Only) == [address_parsed] @@ -591,15 +597,60 @@ def test_multiple_addresses(): assert info.parsed_scoped_addresses() == [ address_parsed, address_v6_parsed, - address_v6_ll_scoped_parsed, + address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, ] assert info.parsed_scoped_addresses(r.IPVersion.V4Only) == [address_parsed] assert info.parsed_scoped_addresses(r.IPVersion.V6Only) == [ address_v6_parsed, - address_v6_ll_scoped_parsed, + address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, ] +@unittest.skipIf(sys.version_info < (3, 9, 0), 'Requires newer python') +def test_scoped_addresses_from_cache(): + type_ = "_http._tcp.local." + registration_name = f"scoped.{type_}" + zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + host = "scoped.local." + + zeroconf.cache.async_add_records( + [ + r.DNSPointer( + type_, + const._TYPE_PTR, + const._CLASS_IN | const._CLASS_UNIQUE, + 120, + registration_name, + ), + r.DNSService( + registration_name, + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + 120, + 0, + 0, + 80, + host, + ), + r.DNSAddress( + host, + const._TYPE_AAAA, + const._CLASS_IN | const._CLASS_UNIQUE, + 120, + socket.inet_pton(socket.AF_INET6, "fe80::52e:c2f2:bc5f:e9c6"), + scope_id=12, + ), + ] + ) + + # New kwarg way + info = ServiceInfo(type_, registration_name) + info.load_from_cache(zeroconf) + assert info.parsed_scoped_addresses() == ["fe80::52e:c2f2:bc5f:e9c6%12"] + assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ip_address("fe80::52e:c2f2:bc5f:e9c6%12")] + zeroconf.close() + + # This test uses asyncio because it needs to access the cache directly # which is not threadsafe @pytest.mark.asyncio From 46e5351661f1eb8e95a2dab97025fa8f33a3b63b Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Dec 2023 20:46:41 +0000 Subject: [PATCH 185/434] 0.128.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8233562a..944bb8b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.128.1 (2023-12-10) + +### Fix + +* Correct handling of IPv6 addresses with scope_id in ServiceInfo ([#1322](https://github.com/python-zeroconf/python-zeroconf/issues/1322)) ([`1682991`](https://github.com/python-zeroconf/python-zeroconf/commit/1682991b985b1f7b2bf0cff1a7eb7793070e7cb1)) + ## v0.128.0 (2023-12-02) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 4bfe3a82..8bce0b51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.0" +version = "0.128.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b994698b..64cd0033 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.0' +__version__ = '0.128.1' __license__ = 'LGPL' From a0dac46c01202b3d5a0823ac1928fc1d75332522 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Dec 2023 11:04:16 -1000 Subject: [PATCH 186/434] fix: match cython version for dev deps to build deps (#1325) --- poetry.lock | 305 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 158 insertions(+), 149 deletions(-) diff --git a/poetry.lock b/poetry.lock index ebcbf373..71c5d27c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,14 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "async-timeout" -version = "4.0.2" +version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [package.dependencies] @@ -19,7 +18,6 @@ typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -29,63 +27,71 @@ files = [ [[package]] name = "coverage" -version = "7.2.3" +version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, - {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, - {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, - {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, - {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, - {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, - {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, - {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, - {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, - {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, - {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, - {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, - {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, - {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, - {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, - {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, - {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, - {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, - {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, - {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, - {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, - {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, - {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, - {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, - {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, - {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, - {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, - {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [package.dependencies] @@ -96,64 +102,80 @@ toml = ["tomli"] [[package]] name = "cython" -version = "0.29.34" -description = "The Cython compiler for writing C extensions for the Python language." -category = "dev" +version = "3.0.6" +description = "The Cython compiler for writing C extensions in the Python language." optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Cython-0.29.34-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:742544024ddb74314e2d597accdb747ed76bd126e61fcf49940a5b5be0a8f381"}, - {file = "Cython-0.29.34-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03daae07f8cbf797506446adae512c3dd86e7f27a62a541fa1ee254baf43e32c"}, - {file = "Cython-0.29.34-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5a8de3e793a576e40ca9b4f5518610cd416273c7dc5e254115656b6e4ec70663"}, - {file = "Cython-0.29.34-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:60969d38e6a456a67e7ef8ae20668eff54e32ba439d4068ccf2854a44275a30f"}, - {file = "Cython-0.29.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:21b88200620d80cfe193d199b259cdad2b9af56f916f0f7f474b5a3631ca0caa"}, - {file = "Cython-0.29.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:308c8f1e58bf5e6e8a1c4dcf8abbd2d13d0f9b1e582f4d9ae8b89857342d8bb5"}, - {file = "Cython-0.29.34-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d8f822fb6ecd5d88c42136561f82960612421154fc5bf23c57103a367bb91356"}, - {file = "Cython-0.29.34-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56866323f1660cecb4d5ff3a1fba92a56b91b7cfae0a8253777aa4bdb3bdf9a8"}, - {file = "Cython-0.29.34-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e971db8aeb12e7c0697cefafe65eefcc33ff1224ae3d8c7f83346cbc42c6c270"}, - {file = "Cython-0.29.34-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4401270b0dc464c23671e2e9d52a60985f988318febaf51b047190e855bbe7d"}, - {file = "Cython-0.29.34-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:dce0a36d163c05ae8b21200059511217d79b47baf2b7b0f926e8367bd7a3cc24"}, - {file = "Cython-0.29.34-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbd79221869ee9a6ccc4953b2c8838bb6ae08ab4d50ea4b60d7894f03739417b"}, - {file = "Cython-0.29.34-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a0f4229df10bc4545ebbeaaf96ebb706011d8b333e54ed202beb03f2bee0a50e"}, - {file = "Cython-0.29.34-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fd1ea21f1cebf33ae288caa0f3e9b5563a709f4df8925d53bad99be693fc0d9b"}, - {file = "Cython-0.29.34-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d7ef5f68f4c5baa93349ea54a352f8716d18bee9a37f3e93eff38a5d4e9b7262"}, - {file = "Cython-0.29.34-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:459994d1de0f99bb18fad9f2325f760c4b392b1324aef37bcc1cd94922dfce41"}, - {file = "Cython-0.29.34-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:1d6c809e2f9ce5950bbc52a1d2352ef3d4fc56186b64cb0d50c8c5a3c1d17661"}, - {file = "Cython-0.29.34-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f674ceb5f722d364395f180fbac273072fc1a266aab924acc9cfd5afc645aae1"}, - {file = "Cython-0.29.34-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9489de5b2044dcdfd9d6ca8242a02d560137b3c41b1f5ae1c4f6707d66d6e44d"}, - {file = "Cython-0.29.34-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5c121dc185040f4333bfded68963b4529698e1b6d994da56be32c97a90c896b6"}, - {file = "Cython-0.29.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:b6149f7cc5b31bccb158c5b968e5a8d374fdc629792e7b928a9b66e08b03fca5"}, - {file = "Cython-0.29.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0ab3cbf3d62b0354631a45dc93cfcdf79098663b1c65a6033af4a452b52217a7"}, - {file = "Cython-0.29.34-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:4a2723447d1334484681d5aede34184f2da66317891f94b80e693a2f96a8f1a7"}, - {file = "Cython-0.29.34-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e40cf86aadc29ecd1cb6de67b0d9488705865deea4fc185c7ad56d7a6fc78703"}, - {file = "Cython-0.29.34-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8c3cd8bb8e880a3346f5685601004d96e0a2221e73edcaeea57ea848618b4ac6"}, - {file = "Cython-0.29.34-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0e9032cd650b0cb1d2c2ef2623f5714c14d14c28d7647d589c3eeed0baf7428e"}, - {file = "Cython-0.29.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bdb3285660e3068438791ace7dd7b1efd6b442a10b5c8d7a4f0c9d184d08c8ed"}, - {file = "Cython-0.29.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a8ad755f9364e720f10a36734a1c7a5ced5c679446718b589259261438a517c9"}, - {file = "Cython-0.29.34-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:7595d29eaee95633dd8060f50f0e54b27472d01587659557ebcfe39da3ea946b"}, - {file = "Cython-0.29.34-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e6ef7879668214d80ea3914c17e7d4e1ebf4242e0dd4dabe95ca5ccbe75589a5"}, - {file = "Cython-0.29.34-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ccb223b5f0fd95d8d27561efc0c14502c0945f1a32274835831efa5d5baddfc1"}, - {file = "Cython-0.29.34-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11b1b278b8edef215caaa5250ad65a10023bfa0b5a93c776552248fc6f60098d"}, - {file = "Cython-0.29.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5718319a01489688fdd22ddebb8e2fcbbd60be5f30de4336ea7063c3ae29fbe5"}, - {file = "Cython-0.29.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:cfb2302ef617d647ee590a4c0a00ba3c2da05f301dcefe7721125565d2e51351"}, - {file = "Cython-0.29.34-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:67b850cf46b861bc27226d31e1d87c0e69869a02f8d3cc5d5bef549764029879"}, - {file = "Cython-0.29.34-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0963266dad685812c1dbb758fcd4de78290e3adc7db271c8664dcde27380b13e"}, - {file = "Cython-0.29.34-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7879992487d9060a61393eeefe00d299210256928dce44d887b6be313d342bac"}, - {file = "Cython-0.29.34-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:44733366f1604b0c327613b6918469284878d2f5084297d10d26072fc6948d51"}, - {file = "Cython-0.29.34-py2.py3-none-any.whl", hash = "sha256:be4f6b7be75a201c290c8611c0978549c60353890204573078e865423dbe3c83"}, - {file = "Cython-0.29.34.tar.gz", hash = "sha256:1909688f5d7b521a60c396d20bba9e47a1b2d2784bfb085401e1e1e7d29a29a8"}, + {file = "Cython-3.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fcdfbf6fc7d0bd683d55e617c3d5a5f25b28ce8b405bc1e89054fc7c52a97e5"}, + {file = "Cython-3.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccbee314f8d15ee8ddbe270859dda427e1187123f2c7c41526d1f260eee6c8f7"}, + {file = "Cython-3.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b992f36ffa1294921fca5f6488ea192fadd75770dc64fa25975379382551e9"}, + {file = "Cython-3.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2e90a75d405070f3c41e701bb8005892f14d42322f1d8fd00a61d660bbae7"}, + {file = "Cython-3.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4121c1160bc1bd8828546e8ce45906bd9ff27799d14747ce3fbbc9d67efbb1b8"}, + {file = "Cython-3.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:519814b8f80869ee5f9ee2cb2363e5c310067c0298cbea291c556b22da1ef6ae"}, + {file = "Cython-3.0.6-cp310-cp310-win32.whl", hash = "sha256:b029d8c754ef867ab4d67fc2477dde9782bf0409cb8e4024a7d29cf5aff37530"}, + {file = "Cython-3.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:2262390f453eedf600e084b074144286576ed2a56bb7fbfe15ad8d9499eceb52"}, + {file = "Cython-3.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfe8c7ac60363769ed8d91fca26398aaa9640368ab999a79b0ccb5e788d3bcf8"}, + {file = "Cython-3.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e31a9b18ec6ce57eb3479df920e6093596fe4ba8010dcc372720040386b4bdb"}, + {file = "Cython-3.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca2542f1f34f0141475b13777df040c31f2073a055097734a0a793ac3a4fb72"}, + {file = "Cython-3.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c1c38dad4bd85e142ccbe2f88122807f8d5a75352321e1e4baf2b293df7c6"}, + {file = "Cython-3.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dc4b4e76c1414584bb55465dfb6f41dd6bd27fd53fb41ddfcaca9edf00c1f80e"}, + {file = "Cython-3.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:805a2c532feee09aeed064eaeb7b6ee35cbab650569d0a3756975f3cc4f246cf"}, + {file = "Cython-3.0.6-cp311-cp311-win32.whl", hash = "sha256:dcdb9a177c7c385fe0c0709a9a6790b6508847d67dcac76bb65a2c7ea447efe5"}, + {file = "Cython-3.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:b8640b7f6503292c358cef925df5a69adf230045719893ffe20ad98024fdf7ae"}, + {file = "Cython-3.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:16b3b02cc7b3bc42ee1a0118b1465ca46b0f3fb32d003e6f1a3a352a819bb9a3"}, + {file = "Cython-3.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11e1d9b153573c425846b627bef52b3b99cb73d4fbfbb136e500a878d4b5e803"}, + {file = "Cython-3.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a7a406f78c2f297bf82136ff5deac3150288446005ed1e56552a9e3ac1469f"}, + {file = "Cython-3.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88be4fbc760de8f313df89ca8256098c0963c9ec72f3aa88538384b80ef1a6ef"}, + {file = "Cython-3.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea2e5a7c503b41618bfb10e4bc610f780ab1c729280531b5cabb24e05aa21cf2"}, + {file = "Cython-3.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d296b48e1410cab50220a28a834167f2d7ac6c0e7de12834d66e42248a1b0f6"}, + {file = "Cython-3.0.6-cp312-cp312-win32.whl", hash = "sha256:7f19e99c6e334e9e30dfa844c3ca4ac09931b94dbba406c646bde54687aed758"}, + {file = "Cython-3.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:9cae02e26967ffb6503c6e91b77010acbadfb7189a5a11d6158d634fb0f73679"}, + {file = "Cython-3.0.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cb6a54543869a5b0ad009d86eb0ebc0879fab838392bfd253ad6d4f5e0f17d84"}, + {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d2d9e53bf021cc7a5c7b6b537b5b5a7ba466ba7348d498aa17499d0ad12637e"}, + {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05d15854b2b363b35c755d22015c1c2fc590b8128202f8c9eb85578461101d9c"}, + {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5548316497a3b8b2d9da575ea143476472db90dee73c67def061621940f78ae"}, + {file = "Cython-3.0.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9b853e0855e4b3d164c05b24718e5e2df369e5af54f47cb8d923c4f497dfc92c"}, + {file = "Cython-3.0.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2c77f97f462a40a319dda7e28c1669370cb26f9175f3e8f9bab99d2f8f3f2f09"}, + {file = "Cython-3.0.6-cp36-cp36m-win32.whl", hash = "sha256:3ac8b6734f2cad5640f2da21cd33cf88323547d07e445fb7453ab38ec5033b1f"}, + {file = "Cython-3.0.6-cp36-cp36m-win_amd64.whl", hash = "sha256:8dd5f5f3587909ff71f0562f50e00d4b836c948e56e8f74897b12f38a29e41b9"}, + {file = "Cython-3.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9c0472c6394750469062deb2c166125b10411636f63a0418b5c36a60d0c9a96a"}, + {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97081932c8810bb99cb26b4b0402202a1764b58ee287c8b306071d2848148c24"}, + {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e781b3880dfd0d4d37983c9d414bfd5f26c2141f6d763d20ef1964a0a4cb2405"}, + {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef88c46e91e21772a5d3b6b1e70a6da5fe098154ad4768888129b1c05e93bba7"}, + {file = "Cython-3.0.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a38b9e7a252ec27dbc21ee8f00f09a896e88285eebb6ed99207b2ff1ea6af28e"}, + {file = "Cython-3.0.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4975cdaf720d29288ec225b76b4f4471ff03f4f8b51841ba85d6587699ab2ad5"}, + {file = "Cython-3.0.6-cp37-cp37m-win32.whl", hash = "sha256:9b89463ea330318461ca47d3e49b5f606e7e82446b6f37e5c19b60392439674c"}, + {file = "Cython-3.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0ca8f379b47417bfad98faeb14bf8a3966fc92cf69f8aaf7635cf6885e50d001"}, + {file = "Cython-3.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3dda1e80eb577b9563cee6cf31923a7b88836b9f9be0043ec545b138b95d8e8"}, + {file = "Cython-3.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e34e9a96f98c379100ef4192994a311678fb5c9af34c83ba5230223577581"}, + {file = "Cython-3.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:345d9112fde4ae0347d656f58591fd52017c61a19779c95423bb38735fe4a401"}, + {file = "Cython-3.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25da0e51331ac12ff16cd858d1d836e092c984e1dc45d338166081d3802297c0"}, + {file = "Cython-3.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:eebbf09089b4988b9f398ed46f168892e32fcfeec346b15954fdd818aa103456"}, + {file = "Cython-3.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e3ed0c125556324fa49b9e92bea13be7b158fcae6f72599d63c8733688257788"}, + {file = "Cython-3.0.6-cp38-cp38-win32.whl", hash = "sha256:86e1e5a5c9157a547d0a769de59c98a1fc5e46cfad976f32f60423cc6de11052"}, + {file = "Cython-3.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:0d45a84a315bd84d1515cd3571415a0ee0709eb4e2cd4b13668ede928af344a7"}, + {file = "Cython-3.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a8e788e64b659bb8fe980bc37da3118e1f7285dec40c5fb293adabc74d4205f2"}, + {file = "Cython-3.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a77a174c7fb13d80754c8bf9912efd3f3696d13285b2f568eca17324263b3f7"}, + {file = "Cython-3.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074e84752cd0daf3226823ddbc37cca8bc45f61c94a1db2a34e641f2b9b0797"}, + {file = "Cython-3.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49d5cae02d56e151e1481e614a1af9a0fe659358f2aa5eca7a18f05aa641db61"}, + {file = "Cython-3.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b94610fa49e36db068446cfd149a42e3246f38a4256bbe818512ac181446b4b"}, + {file = "Cython-3.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fabb2d14dd71add618a7892c40ffec584d1dae1e477caa193778e52e06821d83"}, + {file = "Cython-3.0.6-cp39-cp39-win32.whl", hash = "sha256:ce442c0be72ab014c305399d955b78c3d1e69d5a5ce24398122b605691b69078"}, + {file = "Cython-3.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:8a05f79a0761fc76c42e945e5a9cb5d7986aa9e8e526fdf52bd9ca61a12d4567"}, + {file = "Cython-3.0.6-py2.py3-none-any.whl", hash = "sha256:5921a175ea20779d4443ef99276cfa9a1a47de0e32d593be7679be741c9ed93b"}, + {file = "Cython-3.0.6.tar.gz", hash = "sha256:399d185672c667b26eabbdca420c98564583798af3bc47670a8a09e9f19dd660"}, ] [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -163,7 +185,6 @@ test = ["pytest (>=6)"] name = "ifaddr" version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" -category = "main" optional = false python-versions = "*" files = [ @@ -173,14 +194,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, ] [package.dependencies] @@ -190,13 +210,12 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -206,26 +225,24 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [package.dependencies] @@ -237,14 +254,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "7.3.1" +version = "7.4.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"}, - {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] @@ -257,13 +273,12 @@ pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.20.3" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -281,14 +296,13 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-cov" -version = "4.0.0" +version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, - {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] @@ -300,14 +314,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-timeout" -version = "2.1.0" +version = "2.2.0" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, + {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, + {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, ] [package.dependencies] @@ -317,7 +330,6 @@ pytest = ">=5.0.0" name = "setuptools" version = "65.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -334,7 +346,6 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -344,21 +355,19 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" +version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -373,4 +382,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "1b871ae566e35d2aa05a22a4ff564eaec72807a4c37a012e41f8287831435b74" +content-hash = "5d7b707a062b320ee2930929c2b948e1e542f16eba9363175eaa09f09b111a02" diff --git a/pyproject.toml b/pyproject.toml index 8bce0b51..06215b5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ ifaddr = ">=0.1.7" pytest = "^7.2.0" pytest-cov = "^4.0.0" pytest-asyncio = "^0.20.3" -cython = "^0.29.32" +cython = "^3.0.5" setuptools = "^65.6.3" pytest-timeout = "^2.1.0" From ecea4e4217892ca8cf763074ac3e5d1b898acd21 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Dec 2023 11:10:16 -1000 Subject: [PATCH 187/434] fix: timestamps missing double precision (#1324) --- src/zeroconf/_cache.pxd | 6 +++--- src/zeroconf/_cache.py | 4 ++-- src/zeroconf/_dns.pxd | 14 +++++++------- src/zeroconf/_handlers/answers.pxd | 4 ++-- .../_handlers/multicast_outgoing_queue.pxd | 2 +- src/zeroconf/_handlers/record_manager.pxd | 2 +- src/zeroconf/_handlers/record_manager.py | 6 +++--- src/zeroconf/_listener.pxd | 6 +++--- src/zeroconf/_protocol/outgoing.pxd | 6 +++--- src/zeroconf/_protocol/outgoing.py | 4 ++-- src/zeroconf/_services/browser.pxd | 4 ++-- src/zeroconf/_services/info.pxd | 12 ++++++------ src/zeroconf/_updates.pxd | 2 +- src/zeroconf/_utils/time.pxd | 2 +- 14 files changed, 37 insertions(+), 37 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index ef1c1353..84107957 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -36,7 +36,7 @@ cdef class DNSCache: @cython.locals( record=DNSRecord, ) - cpdef async_expire(self, float now) + cpdef async_expire(self, double now) @cython.locals( records=cython.dict, @@ -68,6 +68,6 @@ cdef class DNSCache: @cython.locals( record=DNSRecord, - created_float=cython.float, + created_double=double, ) - cpdef async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, float now) + cpdef async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 83206e79..35a13cf6 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -243,7 +243,7 @@ def async_mark_unique_records_older_than_1s_to_expire( answers_rrset = set(answers) for name, type_, class_ in unique_types: for record in self.async_all_by_details(name, type_, class_): - created_float = record.created - if (now - created_float > _ONE_SECOND) and record not in answers_rrset: + created_double = record.created + if (now - created_double > _ONE_SECOND) and record not in answers_rrset: # Expire in 1s record.set_created_ttl(now, 1) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 255181f8..d4116a66 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -43,7 +43,7 @@ cdef class DNSQuestion(DNSEntry): cdef class DNSRecord(DNSEntry): cdef public cython.float ttl - cdef public cython.float created + cdef public double created cdef bint _suppressed_by_answer(self, DNSRecord answer) @@ -52,19 +52,19 @@ cdef class DNSRecord(DNSEntry): ) cpdef bint suppressed_by(self, object msg) - cpdef get_remaining_ttl(self, cython.float now) + cpdef get_remaining_ttl(self, double now) - cpdef get_expiration_time(self, cython.uint percent) + cpdef double get_expiration_time(self, cython.uint percent) - cpdef bint is_expired(self, cython.float now) + cpdef bint is_expired(self, double now) - cpdef bint is_stale(self, cython.float now) + cpdef bint is_stale(self, double now) - cpdef bint is_recent(self, cython.float now) + cpdef bint is_recent(self, double now) cpdef reset_ttl(self, DNSRecord other) - cpdef set_created_ttl(self, cython.float now, cython.float ttl) + cpdef set_created_ttl(self, double now, cython.float ttl) cdef class DNSAddress(DNSRecord): diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index 7efc45c7..5a3010ad 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -15,8 +15,8 @@ cdef class QuestionAnswers: cdef class AnswerGroup: - cdef public float send_after - cdef public float send_before + cdef public double send_after + cdef public double send_before cdef public cython.dict answers diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd index 59a4fb2a..1a8d6741 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd @@ -19,7 +19,7 @@ cdef class MulticastOutgoingQueue: cdef object _aggregation_delay @cython.locals(last_group=AnswerGroup, random_int=cython.uint) - cpdef async_add(self, float now, cython.dict answers) + cpdef async_add(self, double now, cython.dict answers) @cython.locals(pending=AnswerGroup) cdef _remove_answers_from_queue(self, cython.dict answers) diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 8775108b..0f543aff 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -31,7 +31,7 @@ cdef class RecordManager: record=DNSRecord, answers=cython.list, maybe_entry=DNSRecord, - now_float=cython.float + now_double=double ) cpdef async_updates_from_response(self, DNSIncoming msg) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index cbf88abd..129acd0b 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -84,7 +84,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: other_adds: List[DNSRecord] = [] removes: Set[DNSRecord] = set() now = msg.now - now_float = now + now_double = now unique_types: Set[Tuple[str, int, int]] = set() cache = self.cache answers = msg.answers() @@ -113,7 +113,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: record = cast(_UniqueRecordsType, record) maybe_entry = cache.async_get_unique(record) - if not record.is_expired(now_float): + if not record.is_expired(now_double): if maybe_entry is not None: maybe_entry.reset_ttl(record) else: @@ -129,7 +129,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now_float) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now_double) if updates: self.async_updates(now, updates) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 729e0de6..8b144653 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -22,18 +22,18 @@ cdef class AsyncListener: cdef ServiceRegistry _registry cdef RecordManager _record_manager cdef public cython.bytes data - cdef public cython.float last_time + cdef public double last_time cdef public DNSIncoming last_message cdef public object transport cdef public object sock_description cdef public cython.dict _deferred cdef public cython.dict _timers - @cython.locals(now=cython.float, debug=cython.bint) + @cython.locals(now=double, debug=cython.bint) cpdef datagram_received(self, cython.bytes bytes, cython.tuple addrs) @cython.locals(msg=DNSIncoming) - cpdef _process_datagram_at_time(self, bint debug, cython.uint data_len, cython.float now, bytes data, cython.tuple addrs) + cpdef _process_datagram_at_time(self, bint debug, cython.uint data_len, double now, bytes data, cython.tuple addrs) cdef _cancel_any_timers_for_addr(self, object addr) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 52237f09..3460f0c7 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -71,7 +71,7 @@ cdef class DNSOutgoing: index=cython.uint, length=cython.uint ) - cdef cython.bint _write_record(self, DNSRecord record, float now) + cdef cython.bint _write_record(self, DNSRecord record, double now) @cython.locals(class_=cython.uint) cdef _write_record_class(self, DNSEntry record) @@ -92,7 +92,7 @@ cdef class DNSOutgoing: cdef bint _has_more_to_add(self, unsigned int questions_offset, unsigned int answer_offset, unsigned int authority_offset, unsigned int additional_offset) - cdef _write_ttl(self, DNSRecord record, float now) + cdef _write_ttl(self, DNSRecord record, double now) @cython.locals( labels=cython.list, @@ -135,7 +135,7 @@ cdef class DNSOutgoing: cpdef add_answer(self, DNSIncoming inp, DNSRecord record) - @cython.locals(now_float=cython.float) + @cython.locals(now_double=double) cpdef add_answer_at_time(self, DNSRecord record, object now) cpdef add_authorative_answer(self, DNSPointer record) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index e421681c..e94cd0d2 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -152,8 +152,8 @@ def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: def add_answer_at_time(self, record: Optional[DNSRecord], now: Union[float, int]) -> None: """Adds an answer if it does not expire by a certain time""" - now_float = now - if record is not None and (now_float == 0 or not record.is_expired(now_float)): + now_double = now + if record is not None and (now_double == 0 or not record.is_expired(now_double)): self.answers.append((record, now)) def add_authorative_answer(self, record: DNSPointer) -> None: diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 25c0f584..a1d79b08 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -28,7 +28,7 @@ cdef class _DNSPointerOutgoingBucket: @cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord, qu_question=bint) cpdef generate_service_query( object zc, - float now, + double now, list type_, bint multicast, object question_type @@ -73,7 +73,7 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _enqueue_callback(self, object state_change, object type_, object name) @cython.locals(record_update=RecordUpdate, record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) - cpdef async_update_records(self, object zc, cython.float now, cython.list records) + cpdef async_update_records(self, object zc, double now, cython.list records) cpdef cython.list _names_matching_types(self, object types) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index b7a2ee30..ae24c769 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -66,10 +66,10 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public cython.set _get_address_and_nsec_records_cache @cython.locals(record_update=RecordUpdate, update=bint, cache=DNSCache) - cpdef async_update_records(self, object zc, cython.float now, cython.list records) + cpdef async_update_records(self, object zc, double now, cython.list records) @cython.locals(cache=DNSCache) - cpdef bint _load_from_cache(self, object zc, cython.float now) + cpdef bint _load_from_cache(self, object zc, double now) @cython.locals(length="unsigned char", index="unsigned int", key_value=bytes, key_sep_value=tuple) cdef void _unpack_text_into_properties(self) @@ -79,21 +79,21 @@ cdef class ServiceInfo(RecordUpdateListener): cdef _set_text(self, cython.bytes text) @cython.locals(record=DNSAddress) - cdef _get_ip_addresses_from_cache_lifo(self, object zc, cython.float now, object type) + cdef _get_ip_addresses_from_cache_lifo(self, object zc, double now, object type) @cython.locals( dns_service_record=DNSService, dns_text_record=DNSText, dns_address_record=DNSAddress ) - cdef bint _process_record_threadsafe(self, object zc, DNSRecord record, cython.float now) + cdef bint _process_record_threadsafe(self, object zc, DNSRecord record, double now) @cython.locals(cache=DNSCache) cdef cython.list _get_address_records_from_cache_by_type(self, object zc, object _type) - cdef _set_ipv4_addresses_from_cache(self, object zc, object now) + cdef _set_ipv4_addresses_from_cache(self, object zc, double now) - cdef _set_ipv6_addresses_from_cache(self, object zc, object now) + cdef _set_ipv6_addresses_from_cache(self, object zc, double now) cdef cython.list _ip_addresses_by_version_value(self, object version_value) diff --git a/src/zeroconf/_updates.pxd b/src/zeroconf/_updates.pxd index 23edf643..e1b44a12 100644 --- a/src/zeroconf/_updates.pxd +++ b/src/zeroconf/_updates.pxd @@ -4,6 +4,6 @@ import cython cdef class RecordUpdateListener: - cpdef async_update_records(self, object zc, cython.float now, cython.list records) + cpdef async_update_records(self, object zc, double now, cython.list records) cpdef async_update_records_complete(self) diff --git a/src/zeroconf/_utils/time.pxd b/src/zeroconf/_utils/time.pxd index 367f39b6..a9600286 100644 --- a/src/zeroconf/_utils/time.pxd +++ b/src/zeroconf/_utils/time.pxd @@ -1,4 +1,4 @@ -cpdef current_time_millis() +cpdef double current_time_millis() cpdef millis_to_seconds(object millis) From 868c2551d82a6279a2351de519b0091cc08b714a Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Dec 2023 21:21:02 +0000 Subject: [PATCH 188/434] 0.128.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 944bb8b7..1ccf586a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.128.2 (2023-12-10) + +### Fix + +* Timestamps missing double precision ([#1324](https://github.com/python-zeroconf/python-zeroconf/issues/1324)) ([`ecea4e4`](https://github.com/python-zeroconf/python-zeroconf/commit/ecea4e4217892ca8cf763074ac3e5d1b898acd21)) +* Match cython version for dev deps to build deps ([#1325](https://github.com/python-zeroconf/python-zeroconf/issues/1325)) ([`a0dac46`](https://github.com/python-zeroconf/python-zeroconf/commit/a0dac46c01202b3d5a0823ac1928fc1d75332522)) + ## v0.128.1 (2023-12-10) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 06215b5d..0f389bc7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.1" +version = "0.128.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 64cd0033..ab3ec633 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.1' +__version__ = '0.128.2' __license__ = 'LGPL' From cd7a16a32c37b2f7a2e90d3c749525a5393bad57 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Dec 2023 13:04:13 -1000 Subject: [PATCH 189/434] fix: correct nsec record writing (#1326) --- src/zeroconf/_dns.py | 16 ++++++++++------ tests/test_protocol.py | 37 ++++++++++++++++++++++++++++++++++--- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 4ca429a8..66fb5b86 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -480,17 +480,21 @@ def __init__( def write(self, out: 'DNSOutgoing') -> None: """Used in constructing an outgoing packet.""" bitmap = bytearray(b'\0' * 32) + total_octets = 0 for rdtype in self.rdtypes: if rdtype > 255: # mDNS only supports window 0 - continue - offset = rdtype % 256 - byte = offset // 8 + raise ValueError(f"rdtype {rdtype} is too large for NSEC") + byte = rdtype // 8 total_octets = byte + 1 - bitmap[byte] |= 0x80 >> (offset % 8) + bitmap[byte] |= 0x80 >> (rdtype % 8) + if total_octets == 0: + # NSEC must have at least one rdtype + # Writing an empty bitmap is not allowed + raise ValueError("NSEC must have at least one rdtype") out_bytes = bytes(bitmap[0:total_octets]) out.write_name(self.next_name) - out.write_short(0) - out.write_short(len(out_bytes)) + out._write_byte(0) # Always window 0 + out._write_byte(len(out_bytes)) out.write_string(out_bytes) def __eq__(self, other: Any) -> bool: diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 0a853104..c830b6c3 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -12,6 +12,8 @@ import unittest.mock from typing import cast +import pytest + import zeroconf as r from zeroconf import DNSHinfo, DNSIncoming, DNSText, const, current_time_millis @@ -65,7 +67,22 @@ def test_parse_own_packet_nsec(self): parsed = r.DNSIncoming(generated.packets()[0]) assert answer in parsed.answers() - # Types > 255 should be ignored + # Now with the higher RD type first + answer = r.DNSNsec( + 'eufy HomeBase2-2464._hap._tcp.local.', + const._TYPE_NSEC, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + 'eufy HomeBase2-2464._hap._tcp.local.', + [const._TYPE_SRV, const._TYPE_TXT], + ) + + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + generated.add_answer_at_time(answer, 0) + parsed = r.DNSIncoming(generated.packets()[0]) + assert answer in parsed.answers() + + # Types > 255 should raise an exception answer_invalid_types = r.DNSNsec( 'eufy HomeBase2-2464._hap._tcp.local.', const._TYPE_NSEC, @@ -76,8 +93,22 @@ def test_parse_own_packet_nsec(self): ) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time(answer_invalid_types, 0) - parsed = r.DNSIncoming(generated.packets()[0]) - assert answer in parsed.answers() + with pytest.raises(ValueError, match='rdtype 1000 is too large for NSEC'): + generated.packets() + + # Empty rdtypes are not allowed + answer_invalid_types = r.DNSNsec( + 'eufy HomeBase2-2464._hap._tcp.local.', + const._TYPE_NSEC, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + 'eufy HomeBase2-2464._hap._tcp.local.', + [], + ) + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + generated.add_answer_at_time(answer_invalid_types, 0) + with pytest.raises(ValueError, match='NSEC must have at least one rdtype'): + generated.packets() def test_parse_own_packet_response(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) From 816c0917c24875bd2e7a7fae54e0b3cc80c5794f Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Dec 2023 23:13:09 +0000 Subject: [PATCH 190/434] 0.128.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ccf586a..e25f2880 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.128.3 (2023-12-10) + +### Fix + +* Correct nsec record writing ([#1326](https://github.com/python-zeroconf/python-zeroconf/issues/1326)) ([`cd7a16a`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7a16a32c37b2f7a2e90d3c749525a5393bad57)) + ## v0.128.2 (2023-12-10) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 0f389bc7..78c6d73c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.2" +version = "0.128.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ab3ec633..8e5526cd 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.2' +__version__ = '0.128.3' __license__ = 'LGPL' From 39c40051d7a63bdc63a3e2dfa20bd944fee4e761 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 10 Dec 2023 13:31:19 -1000 Subject: [PATCH 191/434] fix: re-expose ServiceInfo._set_properties for backwards compat (#1327) --- src/zeroconf/_services/info.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index ae24c769..1f71daa5 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -74,7 +74,7 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(length="unsigned char", index="unsigned int", key_value=bytes, key_sep_value=tuple) cdef void _unpack_text_into_properties(self) - cdef _set_properties(self, cython.dict properties) + cpdef _set_properties(self, cython.dict properties) cdef _set_text(self, cython.bytes text) From 878a726b302530cd904b2e8fd0d48dfce6b165d3 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 10 Dec 2023 23:41:00 +0000 Subject: [PATCH 192/434] 0.128.4 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e25f2880..d8b6d01f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.128.4 (2023-12-10) + +### Fix + +* Re-expose ServiceInfo._set_properties for backwards compat ([#1327](https://github.com/python-zeroconf/python-zeroconf/issues/1327)) ([`39c4005`](https://github.com/python-zeroconf/python-zeroconf/commit/39c40051d7a63bdc63a3e2dfa20bd944fee4e761)) + ## v0.128.3 (2023-12-10) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 78c6d73c..f6230672 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.3" +version = "0.128.4" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 8e5526cd..7cdb30f5 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.3' +__version__ = '0.128.4' __license__ = 'LGPL' From e2f9f81dbc54c3dd527eeb3298897d63f99d33f4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Dec 2023 11:07:42 -1000 Subject: [PATCH 193/434] fix: performance regression with ServiceInfo IPv6Addresses (#1330) --- build_ext.py | 1 + src/zeroconf/_services/info.pxd | 14 ++-- src/zeroconf/_services/info.py | 57 ++++---------- src/zeroconf/_utils/ipaddress.pxd | 14 ++++ src/zeroconf/_utils/ipaddress.py | 121 ++++++++++++++++++++++++++++++ tests/utils/test_ipaddress.py | 24 ++++++ 6 files changed, 180 insertions(+), 51 deletions(-) create mode 100644 src/zeroconf/_utils/ipaddress.pxd create mode 100644 src/zeroconf/_utils/ipaddress.py create mode 100644 tests/utils/test_ipaddress.py diff --git a/build_ext.py b/build_ext.py index d2f32685..0f02f53a 100644 --- a/build_ext.py +++ b/build_ext.py @@ -39,6 +39,7 @@ def build(setup_kwargs: Any) -> None: "src/zeroconf/_services/info.py", "src/zeroconf/_services/registry.py", "src/zeroconf/_updates.py", + "src/zeroconf/_utils/ipaddress.py", "src/zeroconf/_utils/time.py", ], compiler_directives={"language_level": "3"}, # Python 3 diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 1f71daa5..ec19fcc6 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -6,11 +6,15 @@ from .._dns cimport DNSAddress, DNSNsec, DNSPointer, DNSRecord, DNSService, DNST from .._protocol.outgoing cimport DNSOutgoing from .._record_update cimport RecordUpdate from .._updates cimport RecordUpdateListener +from .._utils.ipaddress cimport ( + get_ip_address_object_from_record, + ip_bytes_and_scope_to_address, + str_without_scope_id, +) from .._utils.time cimport current_time_millis cdef object _resolve_all_futures_to_none -cdef object _cached_ip_addresses_wrapper cdef object _TYPE_SRV cdef object _TYPE_TXT @@ -33,13 +37,7 @@ cdef cython.set _ADDRESS_RECORD_TYPES cdef bint TYPE_CHECKING cdef bint IPADDRESS_SUPPORTS_SCOPE_ID - -cdef _get_ip_address_object_from_record(DNSAddress record) - -@cython.locals(address_str=str) -cdef _str_without_scope_id(object addr) - -cdef _ip_bytes_and_scope_to_address(object addr, object scope_id) +cdef object cached_ip_addresses cdef class ServiceInfo(RecordUpdateListener): diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index e9e25763..704c46b6 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -23,8 +23,7 @@ import asyncio import random import sys -from functools import lru_cache -from ipaddress import IPv4Address, IPv6Address, _BaseAddress, ip_address +from ipaddress import IPv4Address, IPv6Address, _BaseAddress from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast from .._dns import ( @@ -47,6 +46,12 @@ run_coro_with_timeout, wait_for_future_set_or_timeout, ) +from .._utils.ipaddress import ( + cached_ip_addresses, + get_ip_address_object_from_record, + ip_bytes_and_scope_to_address, + str_without_scope_id, +) from .._utils.name import service_type_name from .._utils.net import IPVersion, _encode_address from .._utils.time import current_time_millis @@ -67,6 +72,8 @@ _TYPE_TXT, ) +IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + _IPVersion_All_value = IPVersion.All.value _IPVersion_V4Only_value = IPVersion.V4Only.value # https://datatracker.ietf.org/doc/html/rfc6762#section-5.2 @@ -86,7 +93,6 @@ DNS_QUESTION_TYPE_QU = DNSQuestionType.QU DNS_QUESTION_TYPE_QM = DNSQuestionType.QM -IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) if TYPE_CHECKING: from .._core import Zeroconf @@ -102,41 +108,6 @@ def instance_name_from_service_info(info: "ServiceInfo", strict: bool = True) -> return info.name[: -len(service_name) - 1] -@lru_cache(maxsize=512) -def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4Address, IPv6Address]]: - """Cache IP addresses.""" - try: - return ip_address(address) - except ValueError: - return None - - -_cached_ip_addresses_wrapper = _cached_ip_addresses - - -def _get_ip_address_object_from_record(record: DNSAddress) -> Optional[Union[IPv4Address, IPv6Address]]: - """Get the IP address object from the record.""" - if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id is not None: - return _ip_bytes_and_scope_to_address(record.address, record.scope_id) - return _cached_ip_addresses_wrapper(record.address) - - -def _ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: - """Convert the bytes and scope to an IP address object.""" - base_address = _cached_ip_addresses_wrapper(address) - if base_address is not None and base_address.is_link_local: - return _cached_ip_addresses_wrapper(f"{base_address}%{scope}") - return base_address - - -def _str_without_scope_id(addr: Union[IPv4Address, IPv6Address]) -> str: - """Return the string representation of the address without the scope id.""" - if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: - address_str = str(addr) - return address_str.partition('%')[0] - return str(addr) - - class ServiceInfo(RecordUpdateListener): """Service information. @@ -271,9 +242,9 @@ def addresses(self, value: List[bytes]) -> None: for address in value: if IPADDRESS_SUPPORTS_SCOPE_ID and len(address) == 16 and self.interface_index is not None: - addr = _ip_bytes_and_scope_to_address(address, self.interface_index) + addr = ip_bytes_and_scope_to_address(address, self.interface_index) else: - addr = _cached_ip_addresses_wrapper(address) + addr = cached_ip_addresses(address) if addr is None: raise TypeError( "Addresses must either be IPv4 or IPv6 strings, bytes, or integers;" @@ -369,7 +340,7 @@ def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: This means the first address will always be the most recently added address of the given IP version. """ - return [_str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] + return [str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: """Equivalent to parsed_addresses, with the exception that IPv6 Link-Local @@ -446,7 +417,7 @@ def _get_ip_addresses_from_cache_lifo( for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue - ip_addr = _get_ip_address_object_from_record(record) + ip_addr = get_ip_address_object_from_record(record) if ip_addr is not None and ip_addr not in address_list: address_list.append(ip_addr) address_list.reverse() # Reverse to get LIFO order @@ -496,7 +467,7 @@ def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: flo dns_address_record = record if TYPE_CHECKING: assert isinstance(dns_address_record, DNSAddress) - ip_addr = _get_ip_address_object_from_record(dns_address_record) + ip_addr = get_ip_address_object_from_record(dns_address_record) if ip_addr is None: log.warning( "Encountered invalid address while processing %s: %s", diff --git a/src/zeroconf/_utils/ipaddress.pxd b/src/zeroconf/_utils/ipaddress.pxd new file mode 100644 index 00000000..098c6ff9 --- /dev/null +++ b/src/zeroconf/_utils/ipaddress.pxd @@ -0,0 +1,14 @@ +cdef bint TYPE_CHECKING +cdef bint IPADDRESS_SUPPORTS_SCOPE_ID + +from .._dns cimport DNSAddress + + +cpdef get_ip_address_object_from_record(DNSAddress record) + +@cython.locals(address_str=str) +cpdef str_without_scope_id(object addr) + +cpdef ip_bytes_and_scope_to_address(object addr, object scope_id) + +cdef object cached_ip_addresses_wrapper diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py new file mode 100644 index 00000000..b946efb5 --- /dev/null +++ b/src/zeroconf/_utils/ipaddress.py @@ -0,0 +1,121 @@ +""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine + Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 + USA +""" +import sys +from functools import lru_cache +from ipaddress import AddressValueError, IPv4Address, IPv6Address, NetmaskValueError +from typing import Any, Optional, Union + +from .._dns import DNSAddress +from ..const import _TYPE_AAAA + +bytes_ = bytes +int_ = int +IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) + + +class ZeroconfIPv4Address(IPv4Address): + + __slots__ = ("_str", "_is_link_local") + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize a new IPv4 address.""" + super().__init__(*args, **kwargs) + self._str = super().__str__() + self._is_link_local = super().is_link_local + + def __str__(self) -> str: + """Return the string representation of the IPv4 address.""" + return self._str + + @property + def is_link_local(self) -> bool: + """Return True if this is a link-local address.""" + return self._is_link_local + + +class ZeroconfIPv6Address(IPv6Address): + + __slots__ = ("_str", "_is_link_local") + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Initialize a new IPv6 address.""" + super().__init__(*args, **kwargs) + self._str = super().__str__() + self._is_link_local = super().is_link_local + + def __str__(self) -> str: + """Return the string representation of the IPv6 address.""" + return self._str + + @property + def is_link_local(self) -> bool: + """Return True if this is a link-local address.""" + return self._is_link_local + + +@lru_cache(maxsize=512) +def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4Address, IPv6Address]]: + """Cache IP addresses.""" + try: + return ZeroconfIPv4Address(address) + except (AddressValueError, NetmaskValueError): + pass + + try: + return ZeroconfIPv6Address(address) + except (AddressValueError, NetmaskValueError): + return None + + +cached_ip_addresses_wrapper = _cached_ip_addresses +cached_ip_addresses = cached_ip_addresses_wrapper + + +def get_ip_address_object_from_record(record: DNSAddress) -> Optional[Union[IPv4Address, IPv6Address]]: + """Get the IP address object from the record.""" + if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id is not None: + return ip_bytes_and_scope_to_address(record.address, record.scope_id) + return cached_ip_addresses_wrapper(record.address) + + +def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: + """Convert the bytes and scope to an IP address object.""" + base_address = cached_ip_addresses_wrapper(address) + if base_address is not None and base_address.is_link_local: + return cached_ip_addresses_wrapper(f"{base_address}%{scope}") + return base_address + + +def str_without_scope_id(addr: Union[IPv4Address, IPv6Address]) -> str: + """Return the string representation of the address without the scope id.""" + if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: + address_str = str(addr) + return address_str.partition('%')[0] + return str(addr) + + +__all__ = ( + "cached_ip_addresses", + "get_ip_address_object_from_record", + "ip_bytes_and_scope_to_address", + "str_without_scope_id", +) diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py new file mode 100644 index 00000000..9dd558f2 --- /dev/null +++ b/tests/utils/test_ipaddress.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python + +"""Unit tests for zeroconf._utils.ipaddress.""" + +from zeroconf._utils import ipaddress + + +def test_cached_ip_addresses_wrapper(): + """Test the cached_ip_addresses_wrapper.""" + assert ipaddress.cached_ip_addresses('') is None + assert ipaddress.cached_ip_addresses('foo') is None + assert ( + str(ipaddress.cached_ip_addresses(b'&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F')) + == '2606:2800:220:1:248:1893:25c8:1946' + ) + assert ipaddress.cached_ip_addresses('::1') == ipaddress.IPv6Address('::1') + + ipv4 = ipaddress.cached_ip_addresses('169.254.0.0') + assert ipv4 is not None + assert ipv4.is_link_local is True + + ipv6 = ipaddress.cached_ip_addresses('fe80::1') + assert ipv6 is not None + assert ipv6.is_link_local is True From ede0a2a153ec28536905217cdc801cba9cdcacf7 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 13 Dec 2023 21:17:09 +0000 Subject: [PATCH 194/434] 0.128.5 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d8b6d01f..4d59b097 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.128.5 (2023-12-13) + +### Fix + +* Performance regression with ServiceInfo IPv6Addresses ([#1330](https://github.com/python-zeroconf/python-zeroconf/issues/1330)) ([`e2f9f81`](https://github.com/python-zeroconf/python-zeroconf/commit/e2f9f81dbc54c3dd527eeb3298897d63f99d33f4)) + ## v0.128.4 (2023-12-10) ### Fix diff --git a/pyproject.toml b/pyproject.toml index f6230672..0fea63f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.4" +version = "0.128.5" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 7cdb30f5..7199ca50 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.4' +__version__ = '0.128.5' __license__ = 'LGPL' From a1c84dc6adeebd155faec1a647c0f70d70de2945 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Dec 2023 12:47:33 -1000 Subject: [PATCH 195/434] feat: cache is_unspecified for zeroconf ip address objects (#1331) --- src/zeroconf/_utils/ipaddress.py | 16 ++++++++++++++-- tests/utils/test_ipaddress.py | 12 ++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index b946efb5..abb1306f 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -34,13 +34,14 @@ class ZeroconfIPv4Address(IPv4Address): - __slots__ = ("_str", "_is_link_local") + __slots__ = ("_str", "_is_link_local", "_is_unspecified") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv4 address.""" super().__init__(*args, **kwargs) self._str = super().__str__() self._is_link_local = super().is_link_local + self._is_unspecified = super().is_unspecified def __str__(self) -> str: """Return the string representation of the IPv4 address.""" @@ -51,16 +52,22 @@ def is_link_local(self) -> bool: """Return True if this is a link-local address.""" return self._is_link_local + @property + def is_unspecified(self) -> bool: + """Return True if this is an unspecified address.""" + return self._is_unspecified + class ZeroconfIPv6Address(IPv6Address): - __slots__ = ("_str", "_is_link_local") + __slots__ = ("_str", "_is_link_local", "_is_unspecified") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv6 address.""" super().__init__(*args, **kwargs) self._str = super().__str__() self._is_link_local = super().is_link_local + self._is_unspecified = super().is_unspecified def __str__(self) -> str: """Return the string representation of the IPv6 address.""" @@ -71,6 +78,11 @@ def is_link_local(self) -> bool: """Return True if this is a link-local address.""" return self._is_link_local + @property + def is_unspecified(self) -> bool: + """Return True if this is an unspecified address.""" + return self._is_unspecified + @lru_cache(maxsize=512) def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4Address, IPv6Address]]: diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 9dd558f2..3ec1a9a7 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -18,7 +18,19 @@ def test_cached_ip_addresses_wrapper(): ipv4 = ipaddress.cached_ip_addresses('169.254.0.0') assert ipv4 is not None assert ipv4.is_link_local is True + assert ipv4.is_unspecified is False + + ipv4 = ipaddress.cached_ip_addresses('0.0.0.0') + assert ipv4 is not None + assert ipv4.is_link_local is False + assert ipv4.is_unspecified is True ipv6 = ipaddress.cached_ip_addresses('fe80::1') assert ipv6 is not None assert ipv6.is_link_local is True + assert ipv6.is_unspecified is False + + ipv6 = ipaddress.cached_ip_addresses('0:0:0:0:0:0:0:0') + assert ipv6 is not None + assert ipv6.is_link_local is False + assert ipv6.is_unspecified is True From d29553ab7de6b7af70769ddb804fe2aaf492f320 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Dec 2023 13:05:57 -1000 Subject: [PATCH 196/434] feat: ensure ServiceInfo.properties always returns bytes (#1333) --- src/zeroconf/_services/info.pxd | 1 + src/zeroconf/_services/info.py | 26 +++++++++++++++----------- tests/test_services.py | 1 + 3 files changed, 17 insertions(+), 11 deletions(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index ec19fcc6..6ab77424 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -72,6 +72,7 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(length="unsigned char", index="unsigned int", key_value=bytes, key_sep_value=tuple) cdef void _unpack_text_into_properties(self) + @cython.locals(properties_contain_str=bint) cpdef _set_properties(self, cython.dict properties) cdef _set_text(self, cython.bytes text) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 704c46b6..1397dcec 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -191,7 +191,7 @@ def __init__( self.priority = priority self.server = server if server else None self.server_key = server.lower() if server else None - self._properties: Optional[Dict[Union[str, bytes], Optional[Union[str, bytes]]]] = None + self._properties: Optional[Dict[bytes, Optional[bytes]]] = None if isinstance(properties, bytes): self._set_text(properties) else: @@ -260,14 +260,8 @@ def addresses(self, value: List[bytes]) -> None: self._ipv6_addresses.append(addr) @property - def properties(self) -> Dict[Union[str, bytes], Optional[Union[str, bytes]]]: - """If properties were set in the constructor this property returns the original dictionary - of type `Dict[Union[bytes, str], Any]`. - - If properties are coming from the network, after decoding a TXT record, the keys are always - bytes and the values are either bytes, if there was a value, even empty, or `None`, if there - was none. No further decoding is attempted. The type returned is `Dict[bytes, Optional[bytes]]`. - """ + def properties(self) -> Dict[bytes, Optional[bytes]]: + """Return properties as bytes.""" if self._properties is None: self._unpack_text_into_properties() if TYPE_CHECKING: @@ -356,21 +350,31 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: """Sets properties and text of this info from a dictionary""" - self._properties = properties list_: List[bytes] = [] + properties_contain_str = False result = b'' for key, value in properties.items(): if isinstance(key, str): key = key.encode('utf-8') + properties_contain_str = True record = key if value is not None: if not isinstance(value, bytes): value = str(value).encode('utf-8') + properties_contain_str = True record += b'=' + value list_.append(record) for item in list_: result = b''.join((result, bytes((len(item),)), item)) + if not properties_contain_str: + # If there are no str keys or values, we can use the properties + # as-is, without decoding them, otherwise calling + # self.properties will lazy decode them, which is expensive. + if TYPE_CHECKING: + self._properties = cast("Dict[bytes, Optional[bytes]]", properties) + else: + self._properties = properties self.text = result def _set_text(self, text: bytes) -> None: @@ -392,7 +396,7 @@ def _unpack_text_into_properties(self) -> None: return index = 0 - properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]] = {} + properties: Dict[bytes, Optional[bytes]] = {} while index < end: length = text[index] index += 1 diff --git a/tests/test_services.py b/tests/test_services.py index e21c23d9..87bb6fc9 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -133,6 +133,7 @@ def update_service(self, zeroconf, type, name): assert info.properties[b'prop_blank'] == properties['prop_blank'] assert info.properties[b'prop_true'] == b'1' assert info.properties[b'prop_false'] == b'0' + assert info.addresses == addresses[:1] # no V6 by default assert set(info.addresses_by_version(r.IPVersion.All)) == set(addresses) From 9b595a1dcacf109c699953219d70fe36296c7318 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Dec 2023 13:28:13 -1000 Subject: [PATCH 197/434] feat: add decoded_properties method to ServiceInfo (#1332) --- src/zeroconf/_services/info.pxd | 4 ++++ src/zeroconf/_services/info.py | 19 +++++++++++++++++++ tests/test_services.py | 9 +++++++++ 3 files changed, 32 insertions(+) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 6ab77424..c53342cb 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -53,6 +53,7 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public str server cdef public str server_key cdef public cython.dict _properties + cdef public cython.dict _decoded_properties cdef public object host_ttl cdef public object other_ttl cdef public object interface_index @@ -72,6 +73,9 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(length="unsigned char", index="unsigned int", key_value=bytes, key_sep_value=tuple) cdef void _unpack_text_into_properties(self) + @cython.locals(k=bytes, v=bytes) + cdef void _generate_decoded_properties(self) + @cython.locals(properties_contain_str=bint) cpdef _set_properties(self, cython.dict properties) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 1397dcec..962e76bf 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -143,6 +143,7 @@ class ServiceInfo(RecordUpdateListener): "server", "server_key", "_properties", + "_decoded_properties", "host_ttl", "other_ttl", "interface_index", @@ -192,6 +193,7 @@ def __init__( self.server = server if server else None self.server_key = server.lower() if server else None self._properties: Optional[Dict[bytes, Optional[bytes]]] = None + self._decoded_properties: Optional[Dict[str, Optional[str]]] = None if isinstance(properties, bytes): self._set_text(properties) else: @@ -268,6 +270,15 @@ def properties(self) -> Dict[bytes, Optional[bytes]]: assert self._properties is not None return self._properties + @property + def decoded_properties(self) -> Dict[str, Optional[str]]: + """Return properties as strings.""" + if self._decoded_properties is None: + self._generate_decoded_properties() + if TYPE_CHECKING: + assert self._decoded_properties is not None + return self._decoded_properties + def async_clear_cache(self) -> None: """Clear the cache for this service info.""" self._dns_address_cache = None @@ -384,6 +395,14 @@ def _set_text(self, text: bytes) -> None: self.text = text # Clear the properties cache self._properties = None + self._decoded_properties = None + + def _generate_decoded_properties(self) -> None: + """Generates decoded properties from the properties""" + self._decoded_properties = { + k.decode("ascii", "replace"): None if v is None else v.decode("utf-8", "replace") + for k, v in self.properties.items() + } def _unpack_text_into_properties(self) -> None: """Unpacks the text field into properties""" diff --git a/tests/test_services.py b/tests/test_services.py index 87bb6fc9..b7bebfa9 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -134,6 +134,13 @@ def update_service(self, zeroconf, type, name): assert info.properties[b'prop_true'] == b'1' assert info.properties[b'prop_false'] == b'0' + assert info.decoded_properties['prop_none'] is None + assert info.decoded_properties['prop_string'] == b'a_prop'.decode('utf-8') + assert info.decoded_properties['prop_float'] == '1.0' + assert info.decoded_properties['prop_blank'] == b'a blanked string'.decode('utf-8') + assert info.decoded_properties['prop_true'] == '1' + assert info.decoded_properties['prop_false'] == '0' + assert info.addresses == addresses[:1] # no V6 by default assert set(info.addresses_by_version(r.IPVersion.All)) == set(addresses) @@ -194,11 +201,13 @@ def update_service(self, zeroconf, type, name): info = zeroconf_browser.get_service_info(type_, registration_name) assert info is not None assert info.properties[b'prop_blank'] == properties['prop_blank'] + assert info.decoded_properties['prop_blank'] == b'an updated string'.decode('utf-8') cached_info = ServiceInfo(subtype, registration_name) cached_info.load_from_cache(zeroconf_browser) assert cached_info.properties is not None assert cached_info.properties[b'prop_blank'] == properties['prop_blank'] + assert cached_info.decoded_properties['prop_blank'] == b'an updated string'.decode('utf-8') zeroconf_registrar.unregister_service(info_service) service_removed.wait(1) From 9cd3e24ab625f993e4f48e399fa2e27aa4d26f93 Mon Sep 17 00:00:00 2001 From: github-actions Date: Wed, 13 Dec 2023 23:36:42 +0000 Subject: [PATCH 198/434] 0.129.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4d59b097..af0da45e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ +## v0.129.0 (2023-12-13) + +### Feature + +* Add decoded_properties method to ServiceInfo ([#1332](https://github.com/python-zeroconf/python-zeroconf/issues/1332)) ([`9b595a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9b595a1dcacf109c699953219d70fe36296c7318)) +* Ensure ServiceInfo.properties always returns bytes ([#1333](https://github.com/python-zeroconf/python-zeroconf/issues/1333)) ([`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) +* Cache is_unspecified for zeroconf ip address objects ([#1331](https://github.com/python-zeroconf/python-zeroconf/issues/1331)) ([`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) + ## v0.128.5 (2023-12-13) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 0fea63f9..c30d5ba2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.128.5" +version = "0.129.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 7199ca50..b2f0da53 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.128.5' +__version__ = '0.129.0' __license__ = 'LGPL' From 6c2d6e63dffac3be7465a0a917efde14f742d677 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 13 Dec 2023 13:41:40 -1000 Subject: [PATCH 199/434] chore: ensure properties change is mentioned in the CHANGELOG.md file (#1334) --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index af0da45e..32e70bff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,10 @@ * Ensure ServiceInfo.properties always returns bytes ([#1333](https://github.com/python-zeroconf/python-zeroconf/issues/1333)) ([`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) * Cache is_unspecified for zeroconf ip address objects ([#1331](https://github.com/python-zeroconf/python-zeroconf/issues/1331)) ([`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) +### Technically breaking change + +* `ServiceInfo.properties` always returns a dictionary with type `dict[bytes, bytes | None]` instead of a mix `str` and `bytes`. It was only possible to get a mixed dictionary if it was manually passed in when `ServiceInfo` was constructed. + ## v0.128.5 (2023-12-13) ### Fix From f78a196db632c4fe017a34f1af8a58903c15a575 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 14 Dec 2023 11:06:26 -1000 Subject: [PATCH 200/434] fix: ensure IPv6 scoped address construction uses the string cache (#1336) --- src/zeroconf/_utils/ipaddress.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index abb1306f..b0b551ff 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -113,7 +113,8 @@ def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Unio """Convert the bytes and scope to an IP address object.""" base_address = cached_ip_addresses_wrapper(address) if base_address is not None and base_address.is_link_local: - return cached_ip_addresses_wrapper(f"{base_address}%{scope}") + # Avoid expensive __format__ call by using PyUnicode_Join + return cached_ip_addresses_wrapper("".join((str(base_address), "%", str(scope)))) return base_address From 6560fad584e0d392962c9a9248759f17c416620e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 08:35:22 -1000 Subject: [PATCH 201/434] fix: microsecond precision loss in the query handler (#1339) --- src/zeroconf/_handlers/query_handler.pxd | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 8c42144c..3e726a53 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -39,7 +39,7 @@ cdef class _QueryResponse: cdef bint _is_probe cdef cython.list _questions - cdef float _now + cdef double _now cdef DNSCache _cache cdef cython.dict _additionals cdef cython.set _ucast @@ -91,7 +91,7 @@ cdef class QueryHandler: known_answers_set=cython.set, is_unicast=bint, is_probe=object, - now=float + now=double ) cpdef async_response(self, cython.list msgs, cython.bint unicast_source) From 157185f28bf1e83e6811e2a5cd1fa9b38966f780 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 08:35:57 -1000 Subject: [PATCH 202/434] feat: small performance improvement constructing outgoing questions (#1340) --- src/zeroconf/_protocol/outgoing.pxd | 6 +++--- src/zeroconf/_protocol/outgoing.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 3460f0c7..4353757a 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -127,16 +127,16 @@ cdef class DNSOutgoing: ) cpdef packets(self) - cpdef add_question_or_all_cache(self, DNSCache cache, object now, str name, object type_, object class_) + cpdef add_question_or_all_cache(self, DNSCache cache, double now, str name, object type_, object class_) - cpdef add_question_or_one_cache(self, DNSCache cache, object now, str name, object type_, object class_) + cpdef add_question_or_one_cache(self, DNSCache cache, double now, str name, object type_, object class_) cpdef add_question(self, DNSQuestion question) cpdef add_answer(self, DNSIncoming inp, DNSRecord record) @cython.locals(now_double=double) - cpdef add_answer_at_time(self, DNSRecord record, object now) + cpdef add_answer_at_time(self, DNSRecord record, double now) cpdef add_authorative_answer(self, DNSPointer record) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index e94cd0d2..57f98169 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -148,9 +148,9 @@ def add_question(self, record: DNSQuestion) -> None: def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: """Adds an answer""" if not record.suppressed_by(inp): - self.add_answer_at_time(record, 0) + self.add_answer_at_time(record, 0.0) - def add_answer_at_time(self, record: Optional[DNSRecord], now: Union[float, int]) -> None: + def add_answer_at_time(self, record: Optional[DNSRecord], now: float_) -> None: """Adds an answer if it does not expire by a certain time""" now_double = now if record is not None and (now_double == 0 or not record.is_expired(now_double)): From 810a3093c5a9411ee97740b468bd706bdf4a95de Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 08:36:39 -1000 Subject: [PATCH 203/434] feat: small performance improvement for ServiceInfo asking questions (#1341) --- src/zeroconf/_services/info.pxd | 2 +- src/zeroconf/_services/info.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index c53342cb..ecc2a534 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -124,4 +124,4 @@ cdef class ServiceInfo(RecordUpdateListener): cpdef async_clear_cache(self) @cython.locals(cache=DNSCache) - cdef _generate_request_query(self, object zc, object now, object question_type) + cdef _generate_request_query(self, object zc, double now, object question_type) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 962e76bf..3a27e10a 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -845,7 +845,7 @@ def _generate_request_query( out.add_question_or_one_cache(cache, now, name, _TYPE_TXT, _CLASS_IN) out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_A, _CLASS_IN) out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_AAAA, _CLASS_IN) - if question_type == DNS_QUESTION_TYPE_QU: + if question_type is DNS_QUESTION_TYPE_QU: for question in out.questions: question.unicast = True return out From 73d3ab90dd3b59caab771235dd6dbedf05bfe0b3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 08:41:01 -1000 Subject: [PATCH 204/434] feat: small performance improvement for converting time (#1342) --- src/zeroconf/_utils/time.pxd | 2 +- src/zeroconf/_utils/time.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/zeroconf/_utils/time.pxd b/src/zeroconf/_utils/time.pxd index a9600286..f6e70fe7 100644 --- a/src/zeroconf/_utils/time.pxd +++ b/src/zeroconf/_utils/time.pxd @@ -1,4 +1,4 @@ cpdef double current_time_millis() -cpdef millis_to_seconds(object millis) +cpdef millis_to_seconds(double millis) diff --git a/src/zeroconf/_utils/time.py b/src/zeroconf/_utils/time.py index c6811585..600d9028 100644 --- a/src/zeroconf/_utils/time.py +++ b/src/zeroconf/_utils/time.py @@ -26,15 +26,17 @@ _float = float -def current_time_millis() -> float: +def current_time_millis() -> _float: """Current time in milliseconds. The current implemention uses `time.monotonic` but may change in the future. + + The design requires the time to match asyncio.loop.time() """ return time.monotonic() * 1000 -def millis_to_seconds(millis: _float) -> float: +def millis_to_seconds(millis: _float) -> _float: """Convert milliseconds to seconds.""" return millis / 1000.0 From 6f23656576daa04e3de44e100f3ddd60ee4c560d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 08:45:41 -1000 Subject: [PATCH 205/434] fix: ensure question history suppresses duplicates (#1338) --- src/zeroconf/_history.pxd | 10 +++++----- tests/test_history.py | 9 +++++++-- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/zeroconf/_history.pxd b/src/zeroconf/_history.pxd index c1ff7619..02a0fc9e 100644 --- a/src/zeroconf/_history.pxd +++ b/src/zeroconf/_history.pxd @@ -9,10 +9,10 @@ cdef class QuestionHistory: cdef cython.dict _history - cpdef add_question_at_time(self, DNSQuestion question, float now, cython.set known_answers) + cpdef add_question_at_time(self, DNSQuestion question, double now, cython.set known_answers) - @cython.locals(than=cython.double, previous_question=cython.tuple, previous_known_answers=cython.set) - cpdef bint suppresses(self, DNSQuestion question, cython.double now, cython.set known_answers) + @cython.locals(than=double, previous_question=cython.tuple, previous_known_answers=cython.set) + cpdef bint suppresses(self, DNSQuestion question, double now, cython.set known_answers) - @cython.locals(than=cython.double, now_known_answers=cython.tuple) - cpdef async_expire(self, cython.double now) + @cython.locals(than=double, now_known_answers=cython.tuple) + cpdef async_expire(self, double now) diff --git a/tests/test_history.py b/tests/test_history.py index a8b8ae14..fca57be2 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -47,11 +47,16 @@ def test_question_suppression(): def test_question_expire(): history = QuestionHistory() - question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) now = r.current_time_millis() + question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) other_known_answers: Set[r.DNSRecord] = { r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-other._hap._tcp.local.' + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + 'known-to-other._hap._tcp.local.', + created=now, ) } history.add_question_at_time(question, now, other_known_answers) From 7a24b88ee2a7f9d65a4fa6a636d79fdc757b6ce5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 09:26:12 -1000 Subject: [PATCH 206/434] chore: add get_percentage_remaining_ttl helper to DNSRecord (#1343) --- src/zeroconf/_dns.pxd | 2 ++ src/zeroconf/_dns.py | 5 +++++ tests/test_dns.py | 20 ++++++++++++++++---- 3 files changed, 23 insertions(+), 4 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index d4116a66..72080517 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -54,6 +54,8 @@ cdef class DNSRecord(DNSEntry): cpdef get_remaining_ttl(self, double now) + cpdef unsigned int get_percentage_remaining_ttl(self, double now) + cpdef double get_expiration_time(self, cython.uint percent) cpdef bint is_expired(self, double now) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 66fb5b86..262dbb5f 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -193,6 +193,11 @@ def get_expiration_time(self, percent: _int) -> float: by a certain percentage.""" return self.created + (percent * self.ttl * 10) + def get_percentage_remaining_ttl(self, now: _float) -> _int: + """Returns the percentage remaining of the ttl between 0-100.""" + remain = (self.created + (_EXPIRE_FULL_TIME_MS * self.ttl) - now) / self.ttl / 10 + return 0 if remain <= 0 else round(remain) + # TODO: Switch to just int here def get_remaining_ttl(self, now: _float) -> Union[int, float]: """Returns the remaining TTL in seconds.""" diff --git a/tests/test_dns.py b/tests/test_dns.py index 0eac568d..b7e5a879 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -6,7 +6,6 @@ import logging import os import socket -import time import unittest import unittest.mock @@ -86,19 +85,32 @@ def test_dns_record_abc(self): record.write(None) # type: ignore[arg-type] def test_dns_record_reset_ttl(self): - record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) - time.sleep(1) - record2 = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) + start = r.current_time_millis() + record = r.DNSRecord( + 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, created=start + ) + later = start + 1000 + record2 = r.DNSRecord( + 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, created=later + ) now = r.current_time_millis() assert record.created != record2.created assert record.get_remaining_ttl(now) != record2.get_remaining_ttl(now) + assert record.get_percentage_remaining_ttl(now) != record2.get_percentage_remaining_ttl(now) + assert record2.get_percentage_remaining_ttl(later) == 100 + assert record2.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 record.reset_ttl(record2) assert record.ttl == record2.ttl assert record.created == record2.created assert record.get_remaining_ttl(now) == record2.get_remaining_ttl(now) + assert record.get_percentage_remaining_ttl(now) == record2.get_percentage_remaining_ttl(now) + assert record.get_percentage_remaining_ttl(later) == 100 + assert record2.get_percentage_remaining_ttl(later) == 100 + assert record.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 + assert record2.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 def test_service_info_dunder(self): type_ = "_test-srvc-type._tcp.local." From a0b8aed93c898aff685483bd0202e59e04c6c63c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 10:09:31 -1000 Subject: [PATCH 207/434] chore: partially revert add get_percentage_remaining_ttl helper to DNSRecord (#1344) --- src/zeroconf/_dns.pxd | 2 -- src/zeroconf/_dns.py | 5 ----- tests/test_dns.py | 8 -------- 3 files changed, 15 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 72080517..d4116a66 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -54,8 +54,6 @@ cdef class DNSRecord(DNSEntry): cpdef get_remaining_ttl(self, double now) - cpdef unsigned int get_percentage_remaining_ttl(self, double now) - cpdef double get_expiration_time(self, cython.uint percent) cpdef bint is_expired(self, double now) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 262dbb5f..66fb5b86 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -193,11 +193,6 @@ def get_expiration_time(self, percent: _int) -> float: by a certain percentage.""" return self.created + (percent * self.ttl * 10) - def get_percentage_remaining_ttl(self, now: _float) -> _int: - """Returns the percentage remaining of the ttl between 0-100.""" - remain = (self.created + (_EXPIRE_FULL_TIME_MS * self.ttl) - now) / self.ttl / 10 - return 0 if remain <= 0 else round(remain) - # TODO: Switch to just int here def get_remaining_ttl(self, now: _float) -> Union[int, float]: """Returns the remaining TTL in seconds.""" diff --git a/tests/test_dns.py b/tests/test_dns.py index b7e5a879..05562135 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -97,20 +97,12 @@ def test_dns_record_reset_ttl(self): assert record.created != record2.created assert record.get_remaining_ttl(now) != record2.get_remaining_ttl(now) - assert record.get_percentage_remaining_ttl(now) != record2.get_percentage_remaining_ttl(now) - assert record2.get_percentage_remaining_ttl(later) == 100 - assert record2.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 record.reset_ttl(record2) assert record.ttl == record2.ttl assert record.created == record2.created assert record.get_remaining_ttl(now) == record2.get_remaining_ttl(now) - assert record.get_percentage_remaining_ttl(now) == record2.get_percentage_remaining_ttl(now) - assert record.get_percentage_remaining_ttl(later) == 100 - assert record2.get_percentage_remaining_ttl(later) == 100 - assert record.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 - assert record2.get_percentage_remaining_ttl(later + (const._DNS_HOST_TTL * 1000 / 2)) == 50 def test_service_info_dunder(self): type_ = "_test-srvc-type._tcp.local." From 7de655b6f05012f20a3671e0bcdd44a1913d7b52 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 13:59:17 -1000 Subject: [PATCH 208/434] feat: small speed up to processing incoming records (#1345) --- src/zeroconf/_services/browser.pxd | 6 +++--- src/zeroconf/_services/info.pxd | 2 +- src/zeroconf/_updates.pxd | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index a1d79b08..a2d55acf 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -70,10 +70,10 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _generate_ready_queries(self, object first_request, object now) - cpdef _enqueue_callback(self, object state_change, object type_, object name) + cpdef void _enqueue_callback(self, object state_change, object type_, object name) @cython.locals(record_update=RecordUpdate, record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) - cpdef async_update_records(self, object zc, double now, cython.list records) + cpdef void async_update_records(self, object zc, double now, cython.list records) cpdef cython.list _names_matching_types(self, object types) @@ -89,4 +89,4 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef _cancel_send_timer(self) - cpdef async_update_records_complete(self) + cpdef void async_update_records_complete(self) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index ecc2a534..c17723eb 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -65,7 +65,7 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public cython.set _get_address_and_nsec_records_cache @cython.locals(record_update=RecordUpdate, update=bint, cache=DNSCache) - cpdef async_update_records(self, object zc, double now, cython.list records) + cpdef void async_update_records(self, object zc, double now, cython.list records) @cython.locals(cache=DNSCache) cpdef bint _load_from_cache(self, object zc, double now) diff --git a/src/zeroconf/_updates.pxd b/src/zeroconf/_updates.pxd index e1b44a12..3547d729 100644 --- a/src/zeroconf/_updates.pxd +++ b/src/zeroconf/_updates.pxd @@ -4,6 +4,6 @@ import cython cdef class RecordUpdateListener: - cpdef async_update_records(self, object zc, double now, cython.list records) + cpdef void async_update_records(self, object zc, double now, cython.list records) - cpdef async_update_records_complete(self) + cpdef void async_update_records_complete(self) From c65d869aec731b803484871e9d242a984f9f5848 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 16:48:31 -1000 Subject: [PATCH 209/434] feat: significantly improve efficiency of the ServiceBrowser scheduler (#1335) --- examples/browser.py | 2 +- src/zeroconf/_services/browser.pxd | 87 ++-- src/zeroconf/_services/browser.py | 441 +++++++++++----- src/zeroconf/const.py | 7 +- tests/__init__.py | 31 +- tests/services/test_browser.py | 804 +++++++++++++++++++++-------- tests/test_asyncio.py | 190 +++---- 7 files changed, 1083 insertions(+), 479 deletions(-) diff --git a/examples/browser.py b/examples/browser.py index a456a9eb..237de013 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -66,7 +66,7 @@ def on_service_state_change( zeroconf = Zeroconf(ip_version=ip_version) - services = ["_http._tcp.local.", "_hap._tcp.local."] + services = ["_http._tcp.local.", "_hap._tcp.local.", "_esphomelib._tcp.local.", "_airplay._tcp.local."] if args.find: services = list(ZeroconfServiceTypes.find(zc=zeroconf)) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index a2d55acf..88a5321d 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -14,41 +14,86 @@ cdef bint TYPE_CHECKING cdef object cached_possible_types cdef cython.uint _EXPIRE_REFRESH_TIME_PERCENT, _MAX_MSG_TYPICAL, _DNS_PACKET_HEADER_LEN cdef cython.uint _TYPE_PTR +cdef object _CLASS_IN cdef object SERVICE_STATE_CHANGE_ADDED, SERVICE_STATE_CHANGE_REMOVED, SERVICE_STATE_CHANGE_UPDATED cdef cython.set _ADDRESS_RECORD_TYPES +cdef float RESCUE_RECORD_RETRY_TTL_PERCENTAGE + +cdef object _MDNS_PORT, _BROWSER_TIME + +cdef object QU_QUESTION + +cdef object _FLAGS_QR_QUERY + +cdef object heappop, heappush + +cdef class _ScheduledPTRQuery: + + cdef public str alias + cdef public str name + cdef public unsigned int ttl + cdef public bint cancelled + cdef public double expire_time_millis + cdef public double when_millis cdef class _DNSPointerOutgoingBucket: - cdef public object now + cdef public double now_millis cdef public DNSOutgoing out cdef public cython.uint bytes cpdef add(self, cython.uint max_compressed_size, DNSQuestion question, cython.set answers) @cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord, qu_question=bint) -cpdef generate_service_query( +cpdef list generate_service_query( object zc, - double now, - list type_, + double now_millis, + set types_, bint multicast, object question_type ) @cython.locals(answer=DNSPointer, query_buckets=list, question=DNSQuestion, max_compressed_size=cython.uint, max_bucket_size=cython.uint, query_bucket=_DNSPointerOutgoingBucket) -cdef _group_ptr_queries_with_known_answers(object now, object multicast, cython.dict question_with_known_answers) +cdef list _group_ptr_queries_with_known_answers(double now_millis, bint multicast, cython.dict question_with_known_answers) cdef class QueryScheduler: - cdef cython.set _types - cdef cython.dict _next_time - cdef object _first_random_delay_interval - cdef cython.dict _delay + cdef object _zc + cdef set _types + cdef str _addr + cdef int _port + cdef bint _multicast + cdef tuple _first_random_delay_interval + cdef double _min_time_between_queries_millis + cdef object _loop + cdef unsigned int _startup_queries_sent + cdef public dict _next_scheduled_for_alias + cdef public list _query_heap + cdef object _next_run + cdef double _clock_resolution_millis + cdef object _question_type + + cpdef void schedule_ptr_first_refresh(self, DNSPointer pointer) + + cdef void _schedule_ptr_refresh(self, DNSPointer pointer, double expire_time_millis, double refresh_time_millis) + + cdef void _schedule_ptr_query(self, _ScheduledPTRQuery scheduled_query) - cpdef millis_to_wait(self, object now) + @cython.locals(scheduled=_ScheduledPTRQuery) + cpdef void cancel_ptr_refresh(self, DNSPointer pointer) - cpdef reschedule_type(self, object type_, object next_time) + @cython.locals(current=_ScheduledPTRQuery, expire_time=double) + cpdef void reschedule_ptr_first_refresh(self, DNSPointer pointer) - cpdef process_ready_types(self, object now) + @cython.locals(ttl_millis='unsigned int', additional_wait=double, next_query_time=double) + cpdef void schedule_rescue_query(self, _ScheduledPTRQuery query, double now_millis, float additional_percentage) + + cpdef void _process_startup_queries(self) + + @cython.locals(query=_ScheduledPTRQuery, next_scheduled=_ScheduledPTRQuery, next_when=double) + cpdef void _process_ready_types(self) + + cpdef void async_send_ready_queries(self, bint first_request, double now_millis, set ready_types) cdef class _ServiceBrowserBase(RecordUpdateListener): @@ -56,20 +101,12 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cdef public object zc cdef DNSCache _cache cdef object _loop - cdef public object addr - cdef public object port - cdef public object multicast - cdef public object question_type cdef public cython.dict _pending_handlers cdef public object _service_state_changed cdef public QueryScheduler query_scheduler cdef public bint done - cdef public object _first_request - cdef public object _next_send_timer cdef public object _query_sender_task - cpdef _generate_ready_queries(self, object first_request, object now) - cpdef void _enqueue_callback(self, object state_change, object type_, object name) @cython.locals(record_update=RecordUpdate, record=DNSRecord, cache=DNSCache, service=DNSRecord, pointer=DNSPointer) @@ -77,16 +114,6 @@ cdef class _ServiceBrowserBase(RecordUpdateListener): cpdef cython.list _names_matching_types(self, object types) - cpdef reschedule_type(self, object type_, object now, object next_time) - cpdef _fire_service_state_changed_event(self, cython.tuple event) - cpdef _async_send_ready_queries_schedule_next(self) - - cpdef _async_schedule_next(self, object now) - - cpdef _async_send_ready_queries(self, object now) - - cpdef _cancel_send_timer(self) - cpdef void async_update_records_complete(self) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index ca8c9aa5..4d7646a2 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -21,14 +21,17 @@ """ import asyncio +import heapq import queue import random import threading +import time import warnings from functools import partial from types import TracebackType # noqa # used in type hints from typing import ( TYPE_CHECKING, + Any, Callable, Dict, Iterable, @@ -56,7 +59,6 @@ from .._utils.time import current_time_millis, millis_to_seconds from ..const import ( _ADDRESS_RECORD_TYPES, - _BROWSER_BACKOFF_LIMIT, _BROWSER_TIME, _CLASS_IN, _DNS_PACKET_HEADER_LEN, @@ -82,6 +84,12 @@ SERVICE_STATE_CHANGE_REMOVED = ServiceStateChange.Removed SERVICE_STATE_CHANGE_UPDATED = ServiceStateChange.Updated +QU_QUESTION = DNSQuestionType.QU + +STARTUP_QUERIES = 4 + +RESCUE_RECORD_RETRY_TTL_PERCENTAGE = 0.1 + if TYPE_CHECKING: from .._core import Zeroconf @@ -92,23 +100,97 @@ _QuestionWithKnownAnswers = Dict[DNSQuestion, Set[DNSPointer]] +heappop = heapq.heappop +heappush = heapq.heappush + + +class _ScheduledPTRQuery: + + __slots__ = ('alias', 'name', 'ttl', 'cancelled', 'expire_time_millis', 'when_millis') + + def __init__( + self, alias: str, name: str, ttl: int, expire_time_millis: float, when_millis: float + ) -> None: + """Create a scheduled query.""" + self.alias = alias + self.name = name + self.ttl = ttl + # Since queries are stored in a heap we need to track if they are cancelled + # so we can remove them from the heap when they are cancelled as it would + # be too expensive to search the heap for the record to remove and instead + # we just mark it as cancelled and ignore it when we pop it off the heap + # when the query is due. + self.cancelled = False + # Expire time millis is the actual millisecond time the record will expire + self.expire_time_millis = expire_time_millis + # When millis is the millisecond time the query should be sent + # For the first query this is the refresh time which is 75% of the TTL + # + # For subsequent queries we increase the time by 10% of the TTL + # until we reach the expire time and then we stop because it means + # we failed to rescue the record. + self.when_millis = when_millis + + def __repr__(self) -> str: + """Return a string representation of the scheduled query.""" + return ( + f"<{self.__class__.__name__} " + f"alias={self.alias} " + f"name={self.name} " + f"ttl={self.ttl} " + f"cancelled={self.cancelled} " + f"expire_time_millis={self.expire_time_millis} " + f"when_millis={self.when_millis}" + ">" + ) + + def __lt__(self, other: '_ScheduledPTRQuery') -> bool: + """Compare two scheduled queries.""" + if type(other) is _ScheduledPTRQuery: + return self.when_millis < other.when_millis + return NotImplemented + + def __le__(self, other: '_ScheduledPTRQuery') -> bool: + """Compare two scheduled queries.""" + if type(other) is _ScheduledPTRQuery: + return self.when_millis < other.when_millis or self.__eq__(other) + return NotImplemented + + def __eq__(self, other: Any) -> bool: + """Compare two scheduled queries.""" + if type(other) is _ScheduledPTRQuery: + return self.when_millis == other.when_millis + return NotImplemented + + def __ge__(self, other: '_ScheduledPTRQuery') -> bool: + """Compare two scheduled queries.""" + if type(other) is _ScheduledPTRQuery: + return self.when_millis > other.when_millis or self.__eq__(other) + return NotImplemented + + def __gt__(self, other: '_ScheduledPTRQuery') -> bool: + """Compare two scheduled queries.""" + if type(other) is _ScheduledPTRQuery: + return self.when_millis > other.when_millis + return NotImplemented + class _DNSPointerOutgoingBucket: """A DNSOutgoing bucket.""" - __slots__ = ('now', 'out', 'bytes') + __slots__ = ('now_millis', 'out', 'bytes') - def __init__(self, now: float, multicast: bool) -> None: - """Create a bucke to wrap a DNSOutgoing.""" - self.now = now - self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast=multicast) + def __init__(self, now_millis: float, multicast: bool) -> None: + """Create a bucket to wrap a DNSOutgoing.""" + self.now_millis = now_millis + self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast) self.bytes = 0 def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer]) -> None: """Add a new set of questions and known answers to the outgoing.""" self.out.add_question(question) for answer in answers: - self.out.add_answer_at_time(answer, self.now) + self.out.add_answer_at_time(answer, self.now_millis) self.bytes += max_compressed_size @@ -127,7 +209,7 @@ def group_ptr_queries_with_known_answers( def _group_ptr_queries_with_known_answers( - now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers + now_millis: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers ) -> List[DNSOutgoing]: """Inner wrapper for group_ptr_queries_with_known_answers.""" # This is the maximum size the query + known answers can be with name compression. @@ -156,7 +238,7 @@ def _group_ptr_queries_with_known_answers( # If a single question and known answers won't fit in a packet # we will end up generating multiple packets, but there will never # be multiple questions - query_bucket = _DNSPointerOutgoingBucket(now, multicast) + query_bucket = _DNSPointerOutgoingBucket(now_millis, multicast) query_bucket.add(max_compressed_size, question, answers) query_buckets.append(query_bucket) @@ -164,11 +246,15 @@ def _group_ptr_queries_with_known_answers( def generate_service_query( - zc: 'Zeroconf', now: float_, types_: List[str], multicast: bool, question_type: Optional[DNSQuestionType] + zc: 'Zeroconf', + now_millis: float_, + types_: Set[str], + multicast: bool, + question_type: Optional[DNSQuestionType], ) -> List[DNSOutgoing]: """Generate a service query for sending with zeroconf.send.""" questions_with_known_answers: _QuestionWithKnownAnswers = {} - qu_question = not multicast if question_type is None else question_type == DNSQuestionType.QU + qu_question = not multicast if question_type is None else question_type is QU_QUESTION question_history = zc.question_history cache = zc.cache for type_ in types_: @@ -177,9 +263,9 @@ def generate_service_query( known_answers = { record for record in cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) - if not record.is_stale(now) + if not record.is_stale(now_millis) } - if not qu_question and question_history.suppresses(question, now, known_answers): + if not qu_question and question_history.suppresses(question, now_millis, known_answers): log.debug("Asking %s was suppressed by the question history", question) continue if TYPE_CHECKING: @@ -188,9 +274,9 @@ def generate_service_query( pointer_known_answers = known_answers questions_with_known_answers[question] = pointer_known_answers if not qu_question: - question_history.add_question_at_time(question, now, known_answers) + question_history.add_question_at_time(question, now_millis, known_answers) - return _group_ptr_queries_with_known_answers(now, multicast, questions_with_known_answers) + return _group_ptr_queries_with_known_answers(now_millis, multicast, questions_with_known_answers) def _on_change_dispatcher( @@ -223,25 +309,51 @@ class QueryScheduler: """ - __slots__ = ('_types', '_next_time', '_first_random_delay_interval', '_delay') + __slots__ = ( + '_zc', + '_types', + '_addr', + '_port', + '_multicast', + '_first_random_delay_interval', + '_min_time_between_queries_millis', + '_loop', + '_startup_queries_sent', + '_next_scheduled_for_alias', + '_query_heap', + '_next_run', + '_clock_resolution_millis', + '_question_type', + ) def __init__( self, + zc: "Zeroconf", types: Set[str], + addr: Optional[str], + port: int, + multicast: bool, delay: int, first_random_delay_interval: Tuple[int, int], + question_type: Optional[DNSQuestionType], ) -> None: + self._zc = zc self._types = types - self._next_time: Dict[str, float] = {} + self._addr = addr + self._port = port + self._multicast = multicast self._first_random_delay_interval = first_random_delay_interval - self._delay: Dict[str, float] = {check_type_: delay for check_type_ in self._types} - - def start(self, now: float_) -> None: - """Start the scheduler.""" - self._generate_first_next_time(now) - - def _generate_first_next_time(self, now: float_) -> None: - """Generate the initial next query times. + self._min_time_between_queries_millis = delay + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._startup_queries_sent = 0 + self._next_scheduled_for_alias: Dict[str, _ScheduledPTRQuery] = {} + self._query_heap: list[_ScheduledPTRQuery] = [] + self._next_run: Optional[asyncio.TimerHandle] = None + self._clock_resolution_millis = time.get_clock_info('monotonic').resolution * 1000 + self._question_type = question_type + + def start(self, loop: asyncio.AbstractEventLoop) -> None: + """Start the scheduler. https://datatracker.ietf.org/doc/html/rfc6762#section-5.2 To avoid accidental synchronization when, for some reason, multiple @@ -250,43 +362,173 @@ def _generate_first_next_time(self, now: float_) -> None: also delay the first query of the series by a randomly chosen amount in the range 20-120 ms. """ - delay = millis_to_seconds(random.randint(*self._first_random_delay_interval)) - next_time = now + delay - self._next_time = {check_type_: next_time for check_type_ in self._types} - - def millis_to_wait(self, now: float_) -> float: - """Returns the number of milliseconds to wait for the next event.""" - # Wait for the type has the smallest next time - next_time = min(self._next_time.values()) - return 0 if next_time <= now else next_time - now - - def reschedule_type(self, type_: str_, next_time: float_) -> bool: - """Reschedule the query for a type to happen sooner.""" - if next_time >= self._next_time[type_]: - return False - self._next_time[type_] = next_time - return True - - def _force_reschedule_type(self, type_: str_, next_time: float_) -> None: - """Force a reschedule of a type.""" - self._next_time[type_] = next_time - - def process_ready_types(self, now: float_) -> List[str]: - """Generate a list of ready types that is due and schedule the next time.""" - if self.millis_to_wait(now): - return [] + start_delay = millis_to_seconds(random.randint(*self._first_random_delay_interval)) + self._loop = loop + self._next_run = loop.call_later(start_delay, self._process_startup_queries) + + def stop(self) -> None: + """Stop the scheduler.""" + if self._next_run is not None: + self._next_run.cancel() + self._next_run = None + self._next_scheduled_for_alias.clear() + self._query_heap.clear() + + def schedule_ptr_first_refresh(self, pointer: DNSPointer) -> None: + """Schedule a query for a pointer.""" + expire_time_millis = pointer.get_expiration_time(100) + refresh_time_millis = pointer.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) + self._schedule_ptr_refresh(pointer, expire_time_millis, refresh_time_millis) + + def _schedule_ptr_refresh( + self, pointer: DNSPointer, expire_time_millis: float_, refresh_time_millis: float_ + ) -> None: + """Schedule a query for a pointer.""" + ttl = int(pointer.ttl) if isinstance(pointer.ttl, float) else pointer.ttl + scheduled_ptr_query = _ScheduledPTRQuery( + pointer.alias, pointer.name, ttl, expire_time_millis, refresh_time_millis + ) + self._schedule_ptr_query(scheduled_ptr_query) + + def _schedule_ptr_query(self, scheduled_query: _ScheduledPTRQuery) -> None: + """Schedule a query for a pointer.""" + self._next_scheduled_for_alias[scheduled_query.alias] = scheduled_query + heappush(self._query_heap, scheduled_query) + + def cancel_ptr_refresh(self, pointer: DNSPointer) -> None: + """Cancel a query for a pointer.""" + scheduled = self._next_scheduled_for_alias.pop(pointer.alias, None) + if scheduled: + scheduled.cancelled = True + + def reschedule_ptr_first_refresh(self, pointer: DNSPointer) -> None: + """Reschedule a query for a pointer.""" + current = self._next_scheduled_for_alias.get(pointer.alias) + refresh_time_millis = pointer.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) + if current is not None: + # If the expire time is within self._min_time_between_queries_millis + # of the current scheduled time avoid churn by not rescheduling + if ( + -self._min_time_between_queries_millis + <= refresh_time_millis - current.when_millis + <= self._min_time_between_queries_millis + ): + return + current.cancelled = True + expire_time_millis = pointer.get_expiration_time(100) + self._schedule_ptr_refresh(pointer, expire_time_millis, refresh_time_millis) + + def schedule_rescue_query( + self, query: _ScheduledPTRQuery, now_millis: float_, additional_percentage: float_ + ) -> None: + """Reschedule a query for a pointer at an additional percentage of expiration.""" + ttl_millis = query.ttl * 1000 + additional_wait = ttl_millis * additional_percentage + next_query_time = now_millis + additional_wait + if next_query_time >= query.expire_time_millis: + # If we would schedule past the expire time + # there is no point in scheduling as we already + # tried to rescue the record and failed + return + scheduled_ptr_query = _ScheduledPTRQuery( + query.alias, query.name, query.ttl, query.expire_time_millis, next_query_time + ) + self._schedule_ptr_query(scheduled_ptr_query) + + def _process_startup_queries(self) -> None: + if TYPE_CHECKING: + assert self._loop is not None + # This is a safety to ensure we stop sending queries if Zeroconf instance + # is stopped without the browser being cancelled + if self._zc.done: + return + + now_millis = current_time_millis() + + # At first we will send STARTUP_QUERIES queries to get the cache populated + self.async_send_ready_queries(self._startup_queries_sent == 0, now_millis, self._types) + self._startup_queries_sent += 1 + + # Once we finish sending the initial queries we will + # switch to a strategy of sending queries only when we + # need to refresh records that are about to expire + if self._startup_queries_sent >= STARTUP_QUERIES: + self._next_run = self._loop.call_at( + millis_to_seconds(now_millis + self._min_time_between_queries_millis), + self._process_ready_types, + ) + return + + self._next_run = self._loop.call_later(self._startup_queries_sent**2, self._process_startup_queries) - ready_types: List[str] = [] + def _process_ready_types(self) -> None: + """Generate a list of ready types that is due and schedule the next time.""" + if TYPE_CHECKING: + assert self._loop is not None + # This is a safety to ensure we stop sending queries if Zeroconf instance + # is stopped without the browser being cancelled + if self._zc.done: + return - for type_, due in self._next_time.items(): - if due > now: + now_millis = current_time_millis() + # Refresh records that are about to expire (aka + # _EXPIRE_REFRESH_TIME_PERCENT which is currently 75% of the TTL) and + # additional rescue queries if the 75% query failed to refresh the record + # with a minimum time between queries of _min_time_between_queries + # which defaults to 10s + + ready_types: Set[str] = set() + next_scheduled: Optional[_ScheduledPTRQuery] = None + end_time_millis = now_millis + self._clock_resolution_millis + schedule_rescue: List[_ScheduledPTRQuery] = [] + + while self._query_heap: + query = self._query_heap[0] + if query.cancelled: + heappop(self._query_heap) continue + if query.when_millis > end_time_millis: + next_scheduled = query + break + + ready_types.add(query.name) - ready_types.append(type_) - self._next_time[type_] = now + self._delay[type_] - self._delay[type_] = min(_BROWSER_BACKOFF_LIMIT * 1000, self._delay[type_] * 2) + heappop(self._query_heap) + del self._next_scheduled_for_alias[query.alias] + # If there is still more than 10% of the TTL remaining + # schedule a query again to try to rescue the record + # from expiring. If the record is refreshed before + # the query, the query will get cancelled. + schedule_rescue.append(query) - return ready_types + for query in schedule_rescue: + self.schedule_rescue_query(query, now_millis, RESCUE_RECORD_RETRY_TTL_PERCENTAGE) + + if ready_types: + self.async_send_ready_queries(False, now_millis, ready_types) + + next_time_millis = now_millis + self._min_time_between_queries_millis + + if next_scheduled is not None and next_scheduled.when_millis > next_time_millis: + next_when_millis = next_scheduled.when_millis + else: + next_when_millis = next_time_millis + + self._next_run = self._loop.call_at(millis_to_seconds(next_when_millis), self._process_ready_types) + + def async_send_ready_queries( + self, first_request: bool, now_millis: float_, ready_types: Set[str] + ) -> None: + """Send any ready queries.""" + # If they did not specify and this is the first request, ask QU questions + # https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 since we are + # just starting up and we know our cache is likely empty. This ensures + # the next outgoing will be sent with the known answers list. + question_type = QU_QUESTION if self._question_type is None and first_request else self._question_type + outs = generate_service_query(self._zc, now_millis, ready_types, self._multicast, question_type) + if outs: + for out in outs: + self._zc.async_send(out, self._addr, self._port) class _ServiceBrowserBase(RecordUpdateListener): @@ -297,16 +539,10 @@ class _ServiceBrowserBase(RecordUpdateListener): 'zc', '_cache', '_loop', - 'addr', - 'port', - 'multicast', - 'question_type', '_pending_handlers', '_service_state_changed', 'query_scheduler', 'done', - '_first_request', - '_next_send_timer', '_query_sender_task', ) @@ -347,16 +583,19 @@ def __init__( self._cache = zc.cache assert zc.loop is not None self._loop = zc.loop - self.addr = addr - self.port = port - self.multicast = self.addr in (None, _MDNS_ADDR, _MDNS_ADDR6) - self.question_type = question_type self._pending_handlers: Dict[Tuple[str, str], ServiceStateChange] = {} self._service_state_changed = Signal() - self.query_scheduler = QueryScheduler(self.types, delay, _FIRST_QUERY_DELAY_RANDOM_INTERVAL) + self.query_scheduler = QueryScheduler( + zc, + self.types, + addr, + port, + addr in (None, _MDNS_ADDR, _MDNS_ADDR6), + delay, + _FIRST_QUERY_DELAY_RANDOM_INTERVAL, + question_type, + ) self.done = False - self._first_request: bool = True - self._next_send_timer: Optional[asyncio.TimerHandle] = None self._query_sender_task: Optional[asyncio.Task] = None if hasattr(handlers, 'add_service'): @@ -377,7 +616,6 @@ def _async_start(self) -> None: Must be called by uses of this base class after they have finished setting their properties. """ - self.query_scheduler.start(current_time_millis()) self.zc.async_add_listener(self, [DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) for type_ in self.types]) # Only start queries after the listener is installed self._query_sender_task = asyncio.ensure_future(self._async_start_query_sender()) @@ -432,11 +670,12 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record for type_ in self.types.intersection(cached_possible_types(pointer.name)): if old_record is None: self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias) + self.query_scheduler.schedule_ptr_first_refresh(pointer) elif pointer.is_expired(now): self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias) + self.query_scheduler.cancel_ptr_refresh(pointer) else: - expire_time = pointer.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) - self.reschedule_type(type_, now, expire_time) + self.query_scheduler.reschedule_ptr_first_refresh(pointer) continue # If its expired or already exists in the cache it cannot be updated. @@ -487,67 +726,17 @@ def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], Servic def _async_cancel(self) -> None: """Cancel the browser.""" self.done = True - self._cancel_send_timer() + self.query_scheduler.stop() self.zc.async_remove_listener(self) assert self._query_sender_task is not None, "Attempted to cancel a browser that was not started" self._query_sender_task.cancel() - - def _generate_ready_queries(self, first_request: bool_, now: float_) -> List[DNSOutgoing]: - """Generate the service browser query for any type that is due.""" - ready_types = self.query_scheduler.process_ready_types(now) - if not ready_types: - return [] - - # If they did not specify and this is the first request, ask QU questions - # https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 since we are - # just starting up and we know our cache is likely empty. This ensures - # the next outgoing will be sent with the known answers list. - question_type = DNSQuestionType.QU if not self.question_type and first_request else self.question_type - return generate_service_query(self.zc, now, ready_types, self.multicast, question_type) + self._query_sender_task = None async def _async_start_query_sender(self) -> None: """Start scheduling queries.""" if not self.zc.started: await self.zc.async_wait_for_start() - self._async_send_ready_queries_schedule_next() - - def _cancel_send_timer(self) -> None: - """Cancel the next send.""" - if self._next_send_timer: - self._next_send_timer.cancel() - self._next_send_timer = None - - def reschedule_type(self, type_: str_, now: float_, next_time: float_) -> None: - """Reschedule a type to be refreshed in the future.""" - if self.query_scheduler.reschedule_type(type_, next_time): - # We need to send the queries before rescheduling the next one - # otherwise we may be scheduling a query to go out in the next - # iteration of the event loop which should be sent now. - if now >= next_time: - self._async_send_ready_queries(now) - self._cancel_send_timer() - self._async_schedule_next(now) - - def _async_send_ready_queries(self, now: float_) -> None: - """Send any ready queries.""" - outs = self._generate_ready_queries(self._first_request, now) - if outs: - self._first_request = False - for out in outs: - self.zc.async_send(out, addr=self.addr, port=self.port) - - def _async_send_ready_queries_schedule_next(self) -> None: - """Send ready queries and schedule next one checking for done first.""" - if self.done or self.zc.done: - return - now = current_time_millis() - self._async_send_ready_queries(now) - self._async_schedule_next(now) - - def _async_schedule_next(self, now: float_) -> None: - """Scheule the next time.""" - delay = millis_to_seconds(self.query_scheduler.millis_to_wait(now)) - self._next_send_timer = self._loop.call_later(delay, self._async_send_ready_queries_schedule_next) + self.query_scheduler.start(self._loop) class ServiceBrowser(_ServiceBrowserBase, threading.Thread): diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index ca199df5..aa64306e 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -29,10 +29,9 @@ _CHECK_TIME = 175 # ms _REGISTER_TIME = 225 # ms _LISTENER_TIME = 200 # ms -_BROWSER_TIME = 1000 # ms -_DUPLICATE_QUESTION_INTERVAL = _BROWSER_TIME - 1 # ms -_DUPLICATE_PACKET_SUPPRESSION_INTERVAL = 1000 -_BROWSER_BACKOFF_LIMIT = 3600 # s +_BROWSER_TIME = 10000 # ms +_DUPLICATE_PACKET_SUPPRESSION_INTERVAL = 1000 # ms +_DUPLICATE_QUESTION_INTERVAL = 999 # ms # Must be 1ms less than _DUPLICATE_PACKET_SUPPRESSION_INTERVAL _CACHE_CLEANUP_INTERVAL = 10 # s _LOADED_SYSTEM_TIMEOUT = 10 # s _STARTUP_TIMEOUT = 9 # s must be lower than _LOADED_SYSTEM_TIMEOUT diff --git a/tests/__init__.py b/tests/__init__.py index 98cd901c..cbba6073 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -19,17 +19,20 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ - import asyncio import socket +import time from functools import lru_cache -from typing import List, Set +from typing import List, Optional, Set +from unittest import mock import ifaddr from zeroconf import DNSIncoming, DNSQuestion, DNSRecord, Zeroconf from zeroconf._history import QuestionHistory +_MONOTONIC_RESOLUTION = time.get_clock_info("monotonic").resolution + class QuestionHistoryWithoutSuppression(QuestionHistory): def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: @@ -84,3 +87,27 @@ def has_working_ipv6(): def _clear_cache(zc: Zeroconf) -> None: zc.cache.cache.clear() zc.question_history.clear() + + +def time_changed_millis(millis: Optional[float] = None) -> None: + """Call all scheduled events for a time.""" + loop = asyncio.get_running_loop() + loop_time = loop.time() + if millis is not None: + mock_seconds_into_future = millis / 1000 + else: + mock_seconds_into_future = loop_time + + with mock.patch("time.monotonic", return_value=mock_seconds_into_future): + + for task in list(loop._scheduled): # type: ignore[attr-defined] + if not isinstance(task, asyncio.TimerHandle): + continue + if task.cancelled(): + continue + + future_seconds = task.when() - (loop_time + _MONOTONIC_RESOLUTION) + + if mock_seconds_into_future >= future_seconds: + task._run() + task.cancel() diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index a658ded9..6a3bd398 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -10,7 +10,7 @@ import time import unittest from threading import Event -from typing import Iterable, Set, cast +from typing import Iterable, List, Set, cast from unittest.mock import patch import pytest @@ -27,15 +27,16 @@ millis_to_seconds, ) from zeroconf._services import ServiceStateChange -from zeroconf._services.browser import ServiceBrowser +from zeroconf._services.browser import ServiceBrowser, _ScheduledPTRQuery from zeroconf._services.info import ServiceInfo -from zeroconf.asyncio import AsyncZeroconf +from zeroconf.asyncio import AsyncServiceBrowser, AsyncZeroconf from .. import ( QuestionHistoryWithoutSuppression, _inject_response, _wait_for_start, has_working_ipv6, + time_changed_millis, ) log = logging.getLogger('zeroconf') @@ -53,6 +54,13 @@ def teardown_module(): log.setLevel(original_logging_level) +def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + for record in records: + generated.add_answer_at_time(record, 0) + return r.DNSIncoming(generated.packets()[0]) + + def test_service_browser_cancel_multiple_times(): """Test we can cancel a ServiceBrowser multiple times before close.""" @@ -213,7 +221,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de assert service_info.server.lower() == service_server.lower() service_updated_event.set() - def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: + def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) assert generated.is_response() is True @@ -291,7 +299,7 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi wait_time = 3 # service added - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Added)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Added)) service_add_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 0 @@ -300,7 +308,7 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi # service SRV updated service_updated_event.clear() service_server = 'ash-2.local.' - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 1 @@ -309,7 +317,7 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi # service TXT updated service_updated_event.clear() service_text = b'path=/~matt2/' - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -318,7 +326,7 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi # service TXT updated - duplicate update should not trigger another service_updated service_updated_event.clear() service_text = b'path=/~matt2/' - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -329,7 +337,7 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi service_address = '10.0.1.3' # Verify we match on uppercase service_server = service_server.upper() - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 3 @@ -340,14 +348,14 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi service_server = 'ash-3.local.' service_text = b'path=/~matt3/' service_address = '10.0.1.3' - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 assert service_removed_count == 0 # service removed - _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Removed)) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Removed)) service_removed_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 @@ -385,7 +393,7 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de if service_removed_count == 3: service_removed_event.set() - def mock_incoming_msg( + def mock_record_update_incoming_msg( service_state_change: r.ServiceStateChange, service_type: str, service_name: str, ttl: int ) -> r.DNSIncoming: generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -403,11 +411,15 @@ def mock_incoming_msg( # all three services added _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Added, service_types[0], service_names[0], 120), + mock_record_update_incoming_msg( + r.ServiceStateChange.Added, service_types[0], service_names[0], 120 + ), ) _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Added, service_types[1], service_names[1], 120), + mock_record_update_incoming_msg( + r.ServiceStateChange.Added, service_types[1], service_names[1], 120 + ), ) time.sleep(0.1) @@ -424,14 +436,18 @@ def _mock_get_expiration_time(self, percent): with patch("zeroconf.DNSRecord.get_expiration_time", new=_mock_get_expiration_time): _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Added, service_types[0], service_names[0], 120), + mock_record_update_incoming_msg( + r.ServiceStateChange.Added, service_types[0], service_names[0], 120 + ), ) # Add the last record after updating the first one # to ensure the service_add_event only gets set # after the update _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Added, service_types[2], service_names[2], 120), + mock_record_update_incoming_msg( + r.ServiceStateChange.Added, service_types[2], service_names[2], 120 + ), ) service_add_event.wait(wait_time) assert called_with_refresh_time_check is True @@ -440,21 +456,29 @@ def _mock_get_expiration_time(self, percent): _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Updated, service_types[0], service_names[0], 0), + mock_record_update_incoming_msg( + r.ServiceStateChange.Updated, service_types[0], service_names[0], 0 + ), ) # all three services removed _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Removed, service_types[0], service_names[0], 0), + mock_record_update_incoming_msg( + r.ServiceStateChange.Removed, service_types[0], service_names[0], 0 + ), ) _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Removed, service_types[1], service_names[1], 0), + mock_record_update_incoming_msg( + r.ServiceStateChange.Removed, service_types[1], service_names[1], 0 + ), ) _inject_response( zeroconf, - mock_incoming_msg(r.ServiceStateChange.Removed, service_types[2], service_names[2], 0), + mock_record_update_incoming_msg( + r.ServiceStateChange.Removed, service_types[2], service_names[2], 0 + ), ) service_removed_event.wait(wait_time) assert service_added_count == 3 @@ -472,93 +496,6 @@ def _mock_get_expiration_time(self, percent): zeroconf.close() -def test_backoff(): - got_query = Event() - - type_ = "_http._tcp.local." - zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) - _wait_for_start(zeroconf_browser) - zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() - - # we are going to patch the zeroconf send to check query transmission - old_send = zeroconf_browser.async_send - - time_offset = 0.0 - start_time = time.monotonic() * 1000 - initial_query_interval = _services_browser._BROWSER_TIME / 1000 - - def _current_time_millis(): - """Current system time in milliseconds""" - return start_time + time_offset * 1000 - - def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): - """Sends an outgoing packet.""" - got_query.set() - old_send(out, addr=addr, port=port, v6_flow_scope=v6_flow_scope) - - class ServiceBrowserWithPatchedTime(_services_browser.ServiceBrowser): - def _async_start(self) -> None: - """Generate the next time and setup listeners. - - Must be called by uses of this base class after they - have finished setting their properties. - """ - super()._async_start() - self.query_scheduler.start(_current_time_millis()) - - def _async_send_ready_queries_schedule_next(self): - if self.done or self.zc.done: - return - now = _current_time_millis() - self._async_send_ready_queries(now) - self._async_schedule_next(now) - - # patch the zeroconf send - # patch the zeroconf current_time_millis - # patch the backoff limit to prevent test running forever - with patch.object(zeroconf_browser, "async_send", send), patch.object( - _services_browser, "_BROWSER_BACKOFF_LIMIT", 10 - ), patch.object(_services_browser, "_FIRST_QUERY_DELAY_RANDOM_INTERVAL", (0, 0)): - # dummy service callback - def on_service_state_change(zeroconf, service_type, state_change, name): - pass - - browser = ServiceBrowserWithPatchedTime(zeroconf_browser, type_, [on_service_state_change]) - - try: - # Test that queries are sent at increasing intervals - sleep_count = 0 - next_query_interval = 0.0 - expected_query_time = 0.0 - while True: - sleep_count += 1 - got_query.wait(0.1) - if time_offset == expected_query_time: - assert got_query.is_set() - got_query.clear() - if next_query_interval == _services_browser._BROWSER_BACKOFF_LIMIT: - # Only need to test up to the point where we've seen a query - # after the backoff limit has been hit - break - elif next_query_interval == 0: - next_query_interval = initial_query_interval - expected_query_time = initial_query_interval - else: - next_query_interval = min( - 2 * next_query_interval, _services_browser._BROWSER_BACKOFF_LIMIT - ) - expected_query_time += next_query_interval - else: - assert not got_query.is_set() - time_offset += initial_query_interval - assert zeroconf_browser.loop is not None - zeroconf_browser.loop.call_soon_threadsafe(browser._async_send_ready_queries_schedule_next) - - finally: - browser.cancel() - zeroconf_browser.close() - - def test_first_query_delay(): """Verify the first query is delayed. @@ -598,48 +535,225 @@ def on_service_state_change(zeroconf, service_type, state_change, name): zeroconf_browser.close() -def test_asking_default_is_asking_qm_questions_after_the_first_qu(): - """Verify the service browser's first question is QU and subsequent ones are QM questions.""" - type_ = "_quservice._tcp.local." - zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) +@pytest.mark.asyncio +async def test_asking_default_is_asking_qm_questions_after_the_first_qu(): + """Verify the service browser's first questions are QU and refresh queries are QM.""" + service_added = asyncio.Event() + service_removed = asyncio.Event() + unexpected_ttl = asyncio.Event() + got_query = asyncio.Event() - # we are going to patch the zeroconf send to check query transmission + type_ = "_http._tcp.local." + registration_name = "xxxyyy.%s" % type_ + + def on_service_state_change(zeroconf, service_type, state_change, name): + if name == registration_name: + if state_change is ServiceStateChange.Added: + service_added.set() + elif state_change is ServiceStateChange.Removed: + service_removed.set() + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() + await zeroconf_browser.async_wait_for_start() + + # we are going to patch the zeroconf send to check packet sizes old_send = zeroconf_browser.async_send - first_outgoing = None - second_outgoing = None + expected_ttl = const._DNS_OTHER_TTL + questions: List[List[DNSQuestion]] = [] - def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" - nonlocal first_outgoing - nonlocal second_outgoing - if first_outgoing is not None and second_outgoing is None: # type: ignore[unreachable] - second_outgoing = out # type: ignore[unreachable] - if first_outgoing is None: - first_outgoing = out - old_send(out, addr=addr, port=port) + pout = r.DNSIncoming(out.packets()[0]) + questions.append(pout.questions) + got_query.set() + old_send(out, addr=addr, port=port, v6_flow_scope=v6_flow_scope) - # patch the zeroconf send + assert len(zeroconf_browser.engine.protocols) == 2 + + aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_registrar = aio_zeroconf_registrar.zeroconf + await aio_zeroconf_registrar.zeroconf.async_wait_for_start() + + assert len(zeroconf_registrar.engine.protocols) == 2 + # patch the zeroconf send so we can capture what is being sent with patch.object(zeroconf_browser, "async_send", send): - # dummy service callback - def on_service_state_change(zeroconf, service_type, state_change, name): - pass + service_added = asyncio.Event() + service_removed = asyncio.Event() + + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + {'path': '/~paulsm/'}, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], + ) + task = await aio_zeroconf_registrar.async_register_service(info) + await task + loop = asyncio.get_running_loop() + try: + await asyncio.wait_for(service_added.wait(), 1) + assert service_added.is_set() + # Make sure the startup queries are sent + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + got_query.clear() + now_millis = original_now * 1000 + assert not unexpected_ttl.is_set() + # Move time forward past when the TTL is no longer + # fresh (AKA 75% of the TTL) + now_millis += (expected_ttl * 1000) * 0.80 + time_changed_millis(now_millis) + + await asyncio.wait_for(got_query.wait(), 1) + assert not unexpected_ttl.is_set() + + assert len(questions) == _services_browser.STARTUP_QUERIES + 1 + # The first question should be QU to try to + # populate the known answers and limit the impact + # of the QM questions that follow. We still + # have to ask QM questions for the startup queries + # because some devices will not respond to QU + assert questions[0][0].unicast is True + # The remaining questions should be QM questions + for question in questions[1:]: + assert question[0].unicast is False + # Don't remove service, allow close() to cleanup + finally: + await aio_zeroconf_registrar.async_close() + await asyncio.wait_for(service_removed.wait(), 1) + assert service_removed.is_set() + await browser.async_cancel() + await aiozc.async_close() + + +@pytest.mark.asyncio +async def test_ttl_refresh_cancelled_rescue_query(): + """Verify seeing a name again cancels the rescue query.""" + service_added = asyncio.Event() + service_removed = asyncio.Event() + unexpected_ttl = asyncio.Event() + got_query = asyncio.Event() - browser = ServiceBrowser(zeroconf_browser, type_, [on_service_state_change], delay=5) - time.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 120 + 50)) + type_ = "_http._tcp.local." + registration_name = "xxxyyy.%s" % type_ + + def on_service_state_change(zeroconf, service_type, state_change, name): + if name == registration_name: + if state_change is ServiceStateChange.Added: + service_added.set() + elif state_change is ServiceStateChange.Removed: + service_removed.set() + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() + await zeroconf_browser.async_wait_for_start() + + # we are going to patch the zeroconf send to check packet sizes + old_send = zeroconf_browser.async_send + + expected_ttl = const._DNS_OTHER_TTL + packets = [] + + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + pout = r.DNSIncoming(out.packets()[0]) + packets.append(pout) + got_query.set() + old_send(out, addr=addr, port=port, v6_flow_scope=v6_flow_scope) + + assert len(zeroconf_browser.engine.protocols) == 2 + + aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_registrar = aio_zeroconf_registrar.zeroconf + await aio_zeroconf_registrar.zeroconf.async_wait_for_start() + + assert len(zeroconf_registrar.engine.protocols) == 2 + # patch the zeroconf send so we can capture what is being sent + with patch.object(zeroconf_browser, "async_send", send): + service_added = asyncio.Event() + service_removed = asyncio.Event() + + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + {'path': '/~paulsm/'}, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], + ) + task = await aio_zeroconf_registrar.async_register_service(info) + await task + loop = asyncio.get_running_loop() try: - assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] - assert second_outgoing.questions[0].unicast is False # type: ignore[attr-defined] + await asyncio.wait_for(service_added.wait(), 1) + assert service_added.is_set() + # Make sure the startup queries are sent + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + now_millis = original_now * 1000 + assert not unexpected_ttl.is_set() + await asyncio.wait_for(got_query.wait(), 1) + got_query.clear() + assert len(packets) == _services_browser.STARTUP_QUERIES + packets.clear() + + # Move time forward past when the TTL is no longer + # fresh (AKA 75% of the TTL) + now_millis += (expected_ttl * 1000) * 0.80 + # Inject a response that will reschedule + # the rescue query so it does not happen + with patch("time.monotonic", return_value=now_millis / 1000): + zeroconf_browser.record_manager.async_updates_from_response( + mock_incoming_msg([info.dns_pointer()]), + ) + + time_changed_millis(now_millis) + await asyncio.sleep(0) + + # Verify we did not send a rescue query + assert not packets + + # We should still get a rescue query once the rescheduled + # query time is reached + now_millis += (expected_ttl * 1000) * 0.76 + time_changed_millis(now_millis) + await asyncio.wait_for(got_query.wait(), 1) + assert len(packets) == 1 + # Don't remove service, allow close() to cleanup finally: - browser.cancel() - zeroconf_browser.close() + await aio_zeroconf_registrar.async_close() + await asyncio.wait_for(service_removed.wait(), 1) + assert service_removed.is_set() + await browser.async_cancel() + await aiozc.async_close() -def test_asking_qm_questions(): +@pytest.mark.asyncio +async def test_asking_qm_questions(): """Verify explictly asking QM questions.""" type_ = "_quservice._tcp.local." - zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) - + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + await zeroconf_browser.async_wait_for_start() # we are going to patch the zeroconf send to check query transmission old_send = zeroconf_browser.async_send @@ -658,21 +772,24 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): def on_service_state_change(zeroconf, service_type, state_change, name): pass - browser = ServiceBrowser( + browser = AsyncServiceBrowser( zeroconf_browser, type_, [on_service_state_change], question_type=r.DNSQuestionType.QM ) - time.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) + await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is False # type: ignore[union-attr] finally: - browser.cancel() - zeroconf_browser.close() + await browser.async_cancel() + await aiozc.async_close() -def test_asking_qu_questions(): +@pytest.mark.asyncio +async def test_asking_qu_questions(): """Verify the service browser can ask QU questions.""" type_ = "_quservice._tcp.local." - zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + await zeroconf_browser.async_wait_for_start() # we are going to patch the zeroconf send to check query transmission old_send = zeroconf_browser.async_send @@ -692,15 +809,15 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): def on_service_state_change(zeroconf, service_type, state_change, name): pass - browser = ServiceBrowser( + browser = AsyncServiceBrowser( zeroconf_browser, type_, [on_service_state_change], question_type=r.DNSQuestionType.QU ) - time.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) + await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] finally: - browser.cancel() - zeroconf_browser.close() + await browser.async_cancel() + await aiozc.async_close() def test_legacy_record_update_listener(): @@ -788,12 +905,6 @@ def on_service_state_change(zeroconf, service_type, state_change, name): address = socket.inet_aton(address_parsed) info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) - def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for record in records: - generated.add_answer_at_time(record, 0) - return r.DNSIncoming(generated.packets()[0]) - _inject_response( zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), @@ -861,12 +972,6 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de address = socket.inet_aton(address_parsed) info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) - def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for record in records: - generated.add_answer_at_time(record, 0) - return r.DNSIncoming(generated.packets()[0]) - _inject_response( zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), @@ -920,12 +1025,6 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de address = socket.inet_aton(address_parsed) info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) - def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for record in records: - generated.add_answer_at_time(record, 0) - return r.DNSIncoming(generated.packets()[0]) - _inject_response( zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), @@ -948,7 +1047,7 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: zc.close() -def test_servicebrowser_uses_non_strict_names(): +def test_service_browser_uses_non_strict_names(): """Verify we can look for technically invalid names as we cannot change what others do.""" # dummy service callback @@ -1010,34 +1109,34 @@ async def test_generate_service_query_suppress_duplicate_questions(): assert zc.question_history.suppresses(question, now, other_known_answers) # The known answer list is different, do not suppress - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert outs zc.cache.async_add_records([answer]) # The known answer list contains all the asked questions in the history # we should suppress - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert not outs # We do not suppress once the question history expires outs = _services_browser.generate_service_query( - zc, now + 1000, [name], multicast=True, question_type=None + zc, now + 1000, {name}, multicast=True, question_type=None ) assert outs # We do not suppress QU queries ever - outs = _services_browser.generate_service_query(zc, now, [name], multicast=False, question_type=None) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=False, question_type=None) assert outs zc.question_history.async_expire(now + 2000) # No suppression after clearing the history - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert outs # The previous query we just sent is still remembered and # the next one is suppressed - outs = _services_browser.generate_service_query(zc, now, [name], multicast=True, question_type=None) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert not outs await aiozc.async_close() @@ -1047,47 +1146,162 @@ async def test_generate_service_query_suppress_duplicate_questions(): async def test_query_scheduler(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} - query_scheduler = _services_browser.QueryScheduler(types_, delay, (0, 0)) + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + await aiozc.zeroconf.async_wait_for_start() + zc = aiozc.zeroconf + sends: List[r.DNSIncoming] = [] - now = current_time_millis() - query_scheduler.start(now) + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + pout = r.DNSIncoming(out.packets()[0]) + sends.append(pout) - # Test query interval is increasing - assert query_scheduler.millis_to_wait(now - 1) == 1 - assert query_scheduler.millis_to_wait(now) == 0 - assert query_scheduler.millis_to_wait(now + 1) == 0 + query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) + loop = asyncio.get_running_loop() - assert set(query_scheduler.process_ready_types(now)) == types_ - assert set(query_scheduler.process_ready_types(now)) == set() - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay, 0.00001) + # patch the zeroconf send so we can capture what is being sent + with patch.object(zc, "async_send", send): - assert set(query_scheduler.process_ready_types(now + delay)) == types_ - assert set(query_scheduler.process_ready_types(now + delay)) == set() - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay * 3, 0.00001) + query_scheduler.start(loop) - assert set(query_scheduler.process_ready_types(now + delay * 3)) == types_ - assert set(query_scheduler.process_ready_types(now + delay * 3)) == set() - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay * 7, 0.00001) + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) - assert set(query_scheduler.process_ready_types(now + delay * 7)) == types_ - assert set(query_scheduler.process_ready_types(now + delay * 7)) == set() - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay * 15, 0.00001) + ptr_record = r.DNSPointer( + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "zoomer._hap._tcp.local.", + ) + ptr2_record = r.DNSPointer( + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "disappear._hap._tcp.local.", + ) - assert set(query_scheduler.process_ready_types(now + delay * 15)) == types_ - assert set(query_scheduler.process_ready_types(now + delay * 15)) == set() + query_scheduler.schedule_ptr_first_refresh(ptr_record) + expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_expire_time = ptr_record.get_expiration_time(100) + ptr_query = _ScheduledPTRQuery( + ptr_record.alias, ptr_record.name, int(ptr_record.ttl), expected_expire_time, expected_when_time + ) + assert query_scheduler._query_heap == [ptr_query] + + query_scheduler.schedule_ptr_first_refresh(ptr2_record) + expected_when_time = ptr2_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_expire_time = ptr2_record.get_expiration_time(100) + ptr2_query = _ScheduledPTRQuery( + ptr2_record.alias, + ptr2_record.name, + int(ptr2_record.ttl), + expected_expire_time, + expected_when_time, + ) + + assert query_scheduler._query_heap == [ptr_query, ptr2_query] + + # Simulate PTR one goodbye - # Test if we reschedule 1 second later, the millis_to_wait goes up by 1 - query_scheduler.reschedule_type("_hap._tcp.local.", now + delay * 16) - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay * 16, 0.00001) + query_scheduler.cancel_ptr_refresh(ptr_record) + ptr_query.cancelled = True - assert set(query_scheduler.process_ready_types(now + delay * 15)) == set() + assert query_scheduler._query_heap == [ptr_query, ptr2_query] + assert query_scheduler._query_heap[0].cancelled is True + assert query_scheduler._query_heap[1].cancelled is False - # Test if we reschedule 1 second later... and its ready for processing - assert set(query_scheduler.process_ready_types(now + delay * 16)) == {"_hap._tcp.local."} - assert query_scheduler.millis_to_wait(now) == pytest.approx(delay * 31, 0.00001) - assert set(query_scheduler.process_ready_types(now + delay * 20)) == set() + # Move time forward past when the TTL is no longer + # fresh (AKA 75% of the TTL) + now_millis += (ptr2_record.ttl * 1000) * 0.80 + time_changed_millis(now_millis) + assert len(query_scheduler._query_heap) == 1 + first_heap = query_scheduler._query_heap[0] + assert first_heap.cancelled is False + assert first_heap.alias == ptr2_record.alias + + # Move time forward past when the record expires + now_millis += (ptr2_record.ttl * 1000) * 0.20 + time_changed_millis(now_millis) + assert len(query_scheduler._query_heap) == 0 + + await aiozc.async_close() - assert set(query_scheduler.process_ready_types(now + delay * 31)) == {"_http._tcp.local."} + +@pytest.mark.asyncio +async def test_query_scheduler_rescue_records(): + delay = const._BROWSER_TIME + types_ = {"_hap._tcp.local.", "_http._tcp.local."} + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + await aiozc.zeroconf.async_wait_for_start() + zc = aiozc.zeroconf + sends: List[r.DNSIncoming] = [] + + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + pout = r.DNSIncoming(out.packets()[0]) + sends.append(pout) + + query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) + loop = asyncio.get_running_loop() + + # patch the zeroconf send so we can capture what is being sent + with patch.object(zc, "async_send", send): + + query_scheduler.start(loop) + + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + ptr_record = r.DNSPointer( + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "zoomer._hap._tcp.local.", + ) + + query_scheduler.schedule_ptr_first_refresh(ptr_record) + expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_expire_time = ptr_record.get_expiration_time(100) + ptr_query = _ScheduledPTRQuery( + ptr_record.alias, ptr_record.name, int(ptr_record.ttl), expected_expire_time, expected_when_time + ) + assert query_scheduler._query_heap == [ptr_query] + assert query_scheduler._query_heap[0].cancelled is False + + # Move time forward past when the TTL is no longer + # fresh (AKA 75% of the TTL) + now_millis += (ptr_record.ttl * 1000) * 0.76 + time_changed_millis(now_millis) + assert len(query_scheduler._query_heap) == 1 + new_when = query_scheduler._query_heap[0].when_millis + assert query_scheduler._query_heap[0].cancelled is False + assert new_when >= expected_when_time + + # Move time forward again, but not enough to expire the + # record to make sure we try to rescue it + now_millis += (ptr_record.ttl * 1000) * 0.11 + time_changed_millis(now_millis) + assert len(query_scheduler._query_heap) == 1 + second_new_when = query_scheduler._query_heap[0].when_millis + assert query_scheduler._query_heap[0].cancelled is False + assert second_new_when >= new_when + + # Move time forward again, enough that we will no longer + # try to rescue the record + now_millis += (ptr_record.ttl * 1000) * 0.11 + time_changed_millis(now_millis) + assert len(query_scheduler._query_heap) == 0 + + await aiozc.async_close() def test_service_browser_matching(): @@ -1130,12 +1344,6 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de not_match_type_, not_match_registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] ) - def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for record in records: - generated.add_answer_at_time(record, 0) - return r.DNSIncoming(generated.packets()[0]) - _inject_response( zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), @@ -1221,12 +1429,6 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de addresses=[address], ) - def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: - generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for record in records: - generated.add_answer_at_time(record, 0) - return r.DNSIncoming(generated.packets()[0]) - _inject_response( zc, mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), @@ -1269,3 +1471,181 @@ def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: browser.cancel() zc.close() + + +def test_scheduled_ptr_query_dunder_methods(): + query75 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 75) + query80 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 80) + query75_2 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 140, 75) + other = object() + stringified = str(query75) + assert "zoomy._hap._tcp.local." in stringified + assert "120" in stringified + assert "75" in stringified + assert "ScheduledPTRQuery" in stringified + + assert query75 == query75 + assert query75 != query80 + assert query75 == query75_2 + assert query75 < query80 + assert query75 <= query80 + assert query80 > query75 + assert query80 >= query75 + + assert query75 != other + with pytest.raises(TypeError): + query75 < other # type: ignore[operator] + with pytest.raises(TypeError): + query75 <= other # type: ignore[operator] + with pytest.raises(TypeError): + query75 > other # type: ignore[operator] + with pytest.raises(TypeError): + query75 >= other # type: ignore[operator] + + +@pytest.mark.asyncio +async def test_close_zeroconf_without_browser_before_start_up_queries(): + """Test that we stop sending startup queries if zeroconf is closed out from under the browser.""" + service_added = asyncio.Event() + type_ = "_http._tcp.local." + registration_name = "xxxyyy.%s" % type_ + + def on_service_state_change(zeroconf, service_type, state_change, name): + if name == registration_name: + if state_change is ServiceStateChange.Added: + service_added.set() + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() + await zeroconf_browser.async_wait_for_start() + + sends: list[r.DNSIncoming] = [] + + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + pout = r.DNSIncoming(out.packets()[0]) + sends.append(pout) + + assert len(zeroconf_browser.engine.protocols) == 2 + + aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_registrar = aio_zeroconf_registrar.zeroconf + await aio_zeroconf_registrar.zeroconf.async_wait_for_start() + + assert len(zeroconf_registrar.engine.protocols) == 2 + # patch the zeroconf send so we can capture what is being sent + with patch.object(zeroconf_browser, "async_send", send): + service_added = asyncio.Event() + + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + {'path': '/~paulsm/'}, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], + ) + task = await aio_zeroconf_registrar.async_register_service(info) + await task + loop = asyncio.get_running_loop() + try: + await asyncio.wait_for(service_added.wait(), 1) + assert service_added.is_set() + await aiozc.async_close() + sends.clear() + # Make sure the startup queries are sent + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + # We should not send any queries after close + assert not sends + finally: + await aio_zeroconf_registrar.async_close() + await browser.async_cancel() + + +@pytest.mark.asyncio +async def test_close_zeroconf_without_browser_after_start_up_queries(): + """Test that we stop sending rescue queries if zeroconf is closed out from under the browser.""" + service_added = asyncio.Event() + + type_ = "_http._tcp.local." + registration_name = "xxxyyy.%s" % type_ + + def on_service_state_change(zeroconf, service_type, state_change, name): + if name == registration_name: + if state_change is ServiceStateChange.Added: + service_added.set() + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = aiozc.zeroconf + zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() + await zeroconf_browser.async_wait_for_start() + + sends: list[r.DNSIncoming] = [] + + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + pout = r.DNSIncoming(out.packets()[0]) + sends.append(pout) + + assert len(zeroconf_browser.engine.protocols) == 2 + + aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + zeroconf_registrar = aio_zeroconf_registrar.zeroconf + await aio_zeroconf_registrar.zeroconf.async_wait_for_start() + + assert len(zeroconf_registrar.engine.protocols) == 2 + # patch the zeroconf send so we can capture what is being sent + with patch.object(zeroconf_browser, "async_send", send): + service_added = asyncio.Event() + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + expected_ttl = const._DNS_OTHER_TTL + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + {'path': '/~paulsm/'}, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], + ) + task = await aio_zeroconf_registrar.async_register_service(info) + await task + loop = asyncio.get_running_loop() + try: + await asyncio.wait_for(service_added.wait(), 1) + assert service_added.is_set() + sends.clear() + # Make sure the startup queries are sent + original_now = loop.time() + now_millis = original_now * 1000 + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + # We should not send any queries after close + assert sends + + await aiozc.async_close() + sends.clear() + + now_millis = original_now * 1000 + # Move time forward past when the TTL is no longer + # fresh (AKA 75% of the TTL) + now_millis += (expected_ttl * 1000) * 0.80 + time_changed_millis(now_millis) + + # We should not send the query after close + assert not sends + finally: + await aio_zeroconf_registrar.async_close() + await browser.async_cancel() diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 53bce4b4..d4594788 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -8,7 +8,6 @@ import os import socket import threading -import time from typing import cast from unittest.mock import ANY, call, patch @@ -44,7 +43,12 @@ ) from zeroconf.const import _LISTENER_TIME -from . import QuestionHistoryWithoutSuppression, _clear_cache, has_working_ipv6 +from . import ( + QuestionHistoryWithoutSuppression, + _clear_cache, + has_working_ipv6, + time_changed_millis, +) log = logging.getLogger('zeroconf') original_logging_level = logging.NOTSET @@ -991,20 +995,20 @@ def on_service_state_change(zeroconf, service_type, state_change, name): # we are going to patch the zeroconf send to check packet sizes old_send = zeroconf_browser.async_send - time_offset = 0.0 - - def _new_current_time_millis(): - """Current system time in milliseconds""" - return (time.monotonic() * 1000) + (time_offset * 1000) - - expected_ttl = const._DNS_HOST_TTL + expected_ttl = const._DNS_OTHER_TTL nbr_answers = 0 + answers = [] + packets = [] def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" pout = DNSIncoming(out.packets()[0]) + packets.append(pout) + last_answers = pout.answers() + answers.append(last_answers) + nonlocal nbr_answers - for answer in pout.answers(): + for answer in last_answers: nbr_answers += 1 if not answer.ttl > expected_ttl / 2: unexpected_ttl.set() @@ -1020,49 +1024,91 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aio_zeroconf_registrar.zeroconf.async_wait_for_start() assert len(zeroconf_registrar.engine.protocols) == 2 - # patch the zeroconf send - # patch the zeroconf current_time_millis - # patch the backoff limit to ensure we always get one query every 1/4 of the DNS TTL - # Disable duplicate question suppression and duplicate packet suppression for this test as it works - # by asking the same question over and over - with patch.object(zeroconf_browser, "async_send", send), patch( - "zeroconf._services.browser.current_time_millis", _new_current_time_millis - ), patch.object(_services_browser, "_BROWSER_BACKOFF_LIMIT", int(expected_ttl / 4)): + # patch the zeroconf send so we can capture what is being sent + with patch.object(zeroconf_browser, "async_send", send): service_added = asyncio.Event() service_removed = asyncio.Event() browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) - - desc = {'path': '/~paulsm/'} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + {'path': '/~paulsm/'}, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) task = await aio_zeroconf_registrar.async_register_service(info) await task - + loop = asyncio.get_running_loop() try: await asyncio.wait_for(service_added.wait(), 1) assert service_added.is_set() + # Make sure the startup queries are sent + original_now = loop.time() + start_millis = original_now * 1000 - # Test that we receive queries containing answers only if the remaining TTL - # is greater than half the original TTL - sleep_count = 0 - test_iterations = 50 - - while nbr_answers < test_iterations: - # Increase simulated time shift by 1/4 of the TTL in seconds - time_offset += expected_ttl / 4 - now = _new_current_time_millis() - # Force the next query to be sent since we are testing - # to see if the query contains answers and not the scheduler - browser.query_scheduler._force_reschedule_type(type_, now + (1000 * expected_ttl)) - browser.reschedule_type(type_, now, now) - sleep_count += 1 - await asyncio.wait_for(got_query.wait(), 1) - got_query.clear() - # Prevent the test running indefinitely in an error condition - assert sleep_count < test_iterations * 4 + now_millis = start_millis + for query_count in range(_services_browser.STARTUP_QUERIES): + now_millis += (2**query_count) * 1000 + time_changed_millis(now_millis) + + got_query.clear() + assert not unexpected_ttl.is_set() + + assert len(packets) == _services_browser.STARTUP_QUERIES + packets.clear() + + # Wait for the first refresh query + # Move time forward past when the TTL is no longer + # fresh (AKA ~75% of the TTL) + now_millis = start_millis + ((expected_ttl * 1000) * 0.76) + time_changed_millis(now_millis) + + await asyncio.wait_for(got_query.wait(), 1) + assert not unexpected_ttl.is_set() + assert len(packets) == 1 + packets.clear() + + assert len(answers) == _services_browser.STARTUP_QUERIES + 1 + # The first question should have no known answers + assert len(answers[0]) == 0 + # The rest of the startup questions should have + # known answers + for answer_list in answers[1:-2]: + assert len(answer_list) == 1 + # Once the TTL is reached, the last question should have no known answers + assert len(answers[-1]) == 0 + + got_query.clear() + packets.clear() + # Move time forward past when the TTL is no longer + # fresh (AKA 85% of the TTL) to ensure we try + # to rescue the record + now_millis = start_millis + ((expected_ttl * 1000) * 0.87) + time_changed_millis(now_millis) + + await asyncio.wait_for(got_query.wait(), 1) + assert len(packets) == 1 assert not unexpected_ttl.is_set() + + packets.clear() + got_query.clear() + # Move time forward past when the TTL is no longer + # fresh (AKA 95% of the TTL). At this point + # nothing should get scheduled rescued because the rescue + # would exceed the TTL + now_millis = start_millis + ((expected_ttl * 1000) * 0.98) + + # Verify we don't send a query for a record that is + # past the TTL as we should not try to rescue it + # once its past the TTL + time_changed_millis(now_millis) + await asyncio.wait_for(got_query.wait(), 1) + assert len(packets) == 1 + # Don't remove service, allow close() to cleanup finally: await aio_zeroconf_registrar.async_close() @@ -1305,67 +1351,3 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de ('add', '_http._tcp.local.', 'ShellyPro4PM-94B97EC07650._http._tcp.local.'), ('update', '_http._tcp.local.', 'ShellyPro4PM-94B97EC07650._http._tcp.local.'), ] - - -@pytest.mark.asyncio -async def test_service_browser_does_not_try_to_send_if_not_ready(): - """Test that the service browser does not try to send if not ready when rescheduling a type.""" - service_added = asyncio.Event() - type_ = "_http._tcp.local." - registration_name = "nosend.%s" % type_ - - def on_service_state_change(zeroconf, service_type, state_change, name): - if name == registration_name: - if state_change is ServiceStateChange.Added: - service_added.set() - - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) - zeroconf_browser = aiozc.zeroconf - await zeroconf_browser.async_wait_for_start() - - expected_ttl = const._DNS_HOST_TTL - time_offset = 0.0 - - def _new_current_time_millis(): - """Current system time in milliseconds""" - return (time.monotonic() * 1000) + (time_offset * 1000) - - assert len(zeroconf_browser.engine.protocols) == 2 - - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) - zeroconf_registrar = aio_zeroconf_registrar.zeroconf - await aio_zeroconf_registrar.zeroconf.async_wait_for_start() - assert len(zeroconf_registrar.engine.protocols) == 2 - with patch("zeroconf._services.browser.current_time_millis", _new_current_time_millis): - service_added = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) - desc = {'path': '/~paulsm/'} - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] - ) - task = await aio_zeroconf_registrar.async_register_service(info) - await task - - try: - await asyncio.wait_for(service_added.wait(), 1) - time_offset = 1000 * expected_ttl # set the time to the end of the ttl - now = _new_current_time_millis() - browser.query_scheduler._force_reschedule_type(type_, now + (1000 * expected_ttl)) - # Make sure the query schedule is to a time in the future - # so we will reschedule - with patch.object( - browser, "_async_send_ready_queries" - ) as _async_send_ready_queries, patch.object( - browser, "_async_send_ready_queries_schedule_next" - ) as _async_send_ready_queries_schedule_next: - # Reschedule the type to be sent in 1ms in the future - # to make sure the query is not sent - browser.reschedule_type(type_, now, now + 1) - assert not _async_send_ready_queries.called - await asyncio.sleep(0.01) - # Make sure it does happen after the sleep - assert _async_send_ready_queries_schedule_next.called - finally: - await aio_zeroconf_registrar.async_close() - await browser.async_cancel() - await aiozc.async_close() From b329d99917bb731b4c70bf20c7c010eeb85ad9fd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 17:04:26 -1000 Subject: [PATCH 210/434] feat: small speed up to ServiceInfo construction (#1346) --- src/zeroconf/_services/info.pxd | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index c17723eb..0178a111 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -77,9 +77,9 @@ cdef class ServiceInfo(RecordUpdateListener): cdef void _generate_decoded_properties(self) @cython.locals(properties_contain_str=bint) - cpdef _set_properties(self, cython.dict properties) + cpdef void _set_properties(self, cython.dict properties) - cdef _set_text(self, cython.bytes text) + cdef void _set_text(self, cython.bytes text) @cython.locals(record=DNSAddress) cdef _get_ip_addresses_from_cache_lifo(self, object zc, double now, object type) @@ -94,9 +94,9 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(cache=DNSCache) cdef cython.list _get_address_records_from_cache_by_type(self, object zc, object _type) - cdef _set_ipv4_addresses_from_cache(self, object zc, double now) + cdef void _set_ipv4_addresses_from_cache(self, object zc, double now) - cdef _set_ipv6_addresses_from_cache(self, object zc, double now) + cdef void _set_ipv6_addresses_from_cache(self, object zc, double now) cdef cython.list _ip_addresses_by_version_value(self, object version_value) @@ -121,7 +121,7 @@ cdef class ServiceInfo(RecordUpdateListener): @cython.locals(cacheable=cython.bint) cdef cython.set _get_address_and_nsec_records(self, object override_ttl) - cpdef async_clear_cache(self) + cpdef void async_clear_cache(self) @cython.locals(cache=DNSCache) - cdef _generate_request_query(self, object zc, double now, object question_type) + cdef DNSOutgoing _generate_request_query(self, object zc, double now, object question_type) From cf40470b89f918d3c24d7889d3536f3ffa44846c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 15 Dec 2023 17:39:51 -1000 Subject: [PATCH 211/434] fix: scheduling race with the QueryScheduler (#1347) --- src/zeroconf/_services/browser.pxd | 2 -- src/zeroconf/_services/browser.py | 13 +++---------- tests/services/test_browser.py | 6 +++--- 3 files changed, 6 insertions(+), 15 deletions(-) diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 88a5321d..4649291c 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -73,8 +73,6 @@ cdef class QueryScheduler: cdef double _clock_resolution_millis cdef object _question_type - cpdef void schedule_ptr_first_refresh(self, DNSPointer pointer) - cdef void _schedule_ptr_refresh(self, DNSPointer pointer, double expire_time_millis, double refresh_time_millis) cdef void _schedule_ptr_query(self, _ScheduledPTRQuery scheduled_query) diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 4d7646a2..2ff66074 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -374,12 +374,6 @@ def stop(self) -> None: self._next_scheduled_for_alias.clear() self._query_heap.clear() - def schedule_ptr_first_refresh(self, pointer: DNSPointer) -> None: - """Schedule a query for a pointer.""" - expire_time_millis = pointer.get_expiration_time(100) - refresh_time_millis = pointer.get_expiration_time(_EXPIRE_REFRESH_TIME_PERCENT) - self._schedule_ptr_refresh(pointer, expire_time_millis, refresh_time_millis) - def _schedule_ptr_refresh( self, pointer: DNSPointer, expire_time_millis: float_, refresh_time_millis: float_ ) -> None: @@ -415,6 +409,7 @@ def reschedule_ptr_first_refresh(self, pointer: DNSPointer) -> None: ): return current.cancelled = True + del self._next_scheduled_for_alias[pointer.alias] expire_time_millis = pointer.get_expiration_time(100) self._schedule_ptr_refresh(pointer, expire_time_millis, refresh_time_millis) @@ -490,10 +485,8 @@ def _process_ready_types(self) -> None: if query.when_millis > end_time_millis: next_scheduled = query break - + query = heappop(self._query_heap) ready_types.add(query.name) - - heappop(self._query_heap) del self._next_scheduled_for_alias[query.alias] # If there is still more than 10% of the TTL remaining # schedule a query again to try to rescue the record @@ -670,7 +663,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record for type_ in self.types.intersection(cached_possible_types(pointer.name)): if old_record is None: self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias) - self.query_scheduler.schedule_ptr_first_refresh(pointer) + self.query_scheduler.reschedule_ptr_first_refresh(pointer) elif pointer.is_expired(now): self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias) self.query_scheduler.cancel_ptr_refresh(pointer) diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 6a3bd398..37896ba1 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1185,7 +1185,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): "disappear._hap._tcp.local.", ) - query_scheduler.schedule_ptr_first_refresh(ptr_record) + query_scheduler.reschedule_ptr_first_refresh(ptr_record) expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( @@ -1193,7 +1193,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): ) assert query_scheduler._query_heap == [ptr_query] - query_scheduler.schedule_ptr_first_refresh(ptr2_record) + query_scheduler.reschedule_ptr_first_refresh(ptr2_record) expected_when_time = ptr2_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr2_record.get_expiration_time(100) ptr2_query = _ScheduledPTRQuery( @@ -1268,7 +1268,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): "zoomer._hap._tcp.local.", ) - query_scheduler.schedule_ptr_first_refresh(ptr_record) + query_scheduler.reschedule_ptr_first_refresh(ptr_record) expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( From b9aae1de07bf1491e873bc314f8a1d7996127ad3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 16 Dec 2023 06:23:29 -1000 Subject: [PATCH 212/434] feat: make ServiceInfo aware of question history (#1348) --- src/zeroconf/_history.pxd | 4 +- src/zeroconf/_protocol/outgoing.pxd | 15 ++-- src/zeroconf/_protocol/outgoing.py | 24 ------ src/zeroconf/_services/info.pxd | 39 ++++++++- src/zeroconf/_services/info.py | 97 +++++++++++++++++----- tests/services/test_info.py | 121 ++++++++++++++++++++++++++-- 6 files changed, 234 insertions(+), 66 deletions(-) diff --git a/src/zeroconf/_history.pxd b/src/zeroconf/_history.pxd index 02a0fc9e..d1bb7baf 100644 --- a/src/zeroconf/_history.pxd +++ b/src/zeroconf/_history.pxd @@ -9,10 +9,10 @@ cdef class QuestionHistory: cdef cython.dict _history - cpdef add_question_at_time(self, DNSQuestion question, double now, cython.set known_answers) + cpdef void add_question_at_time(self, DNSQuestion question, double now, cython.set known_answers) @cython.locals(than=double, previous_question=cython.tuple, previous_known_answers=cython.set) cpdef bint suppresses(self, DNSQuestion question, double now, cython.set known_answers) @cython.locals(than=double, now_known_answers=cython.tuple) - cpdef async_expire(self, double now) + cpdef void async_expire(self, double now) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 4353757a..2496a988 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -1,7 +1,6 @@ import cython -from .._cache cimport DNSCache from .._dns cimport DNSEntry, DNSPointer, DNSQuestion, DNSRecord from .incoming cimport DNSIncoming @@ -127,20 +126,16 @@ cdef class DNSOutgoing: ) cpdef packets(self) - cpdef add_question_or_all_cache(self, DNSCache cache, double now, str name, object type_, object class_) + cpdef void add_question(self, DNSQuestion question) - cpdef add_question_or_one_cache(self, DNSCache cache, double now, str name, object type_, object class_) - - cpdef add_question(self, DNSQuestion question) - - cpdef add_answer(self, DNSIncoming inp, DNSRecord record) + cpdef void add_answer(self, DNSIncoming inp, DNSRecord record) @cython.locals(now_double=double) - cpdef add_answer_at_time(self, DNSRecord record, double now) + cpdef void add_answer_at_time(self, DNSRecord record, double now) - cpdef add_authorative_answer(self, DNSPointer record) + cpdef void add_authorative_answer(self, DNSPointer record) - cpdef add_additional_answer(self, DNSRecord record) + cpdef void add_additional_answer(self, DNSRecord record) cpdef bint is_query(self) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 57f98169..f45c3935 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -25,7 +25,6 @@ from struct import Struct from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple, Union -from .._cache import DNSCache from .._dns import DNSPointer, DNSQuestion, DNSRecord from .._exceptions import NamePartTooLongException from .._logger import log @@ -198,29 +197,6 @@ def add_additional_answer(self, record: DNSRecord) -> None: """ self.additionals.append(record) - def add_question_or_one_cache( - self, cache: DNSCache, now: float_, name: str_, type_: int_, class_: int_ - ) -> None: - """Add a question if it is not already cached.""" - cached_entry = cache.get_by_details(name, type_, class_) - if not cached_entry: - self.add_question(DNSQuestion(name, type_, class_)) - else: - self.add_answer_at_time(cached_entry, now) - - def add_question_or_all_cache( - self, cache: DNSCache, now: float_, name: str_, type_: int_, class_: int_ - ) -> None: - """Add a question if it is not already cached. - This is currently only used for IPv6 addresses. - """ - cached_entries = cache.get_all_by_details(name, type_, class_) - if not cached_entries: - self.add_question(DNSQuestion(name, type_, class_)) - return - for cached_entry in cached_entries: - self.add_answer_at_time(cached_entry, now) - def _write_byte(self, value: int_) -> None: """Writes a single byte to the packet""" self.data.append(BYTE_TABLE[value]) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 0178a111..6f1bef71 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -2,7 +2,16 @@ import cython from .._cache cimport DNSCache -from .._dns cimport DNSAddress, DNSNsec, DNSPointer, DNSRecord, DNSService, DNSText +from .._dns cimport ( + DNSAddress, + DNSNsec, + DNSPointer, + DNSQuestion, + DNSRecord, + DNSService, + DNSText, +) +from .._history cimport QuestionHistory from .._protocol.outgoing cimport DNSOutgoing from .._record_update cimport RecordUpdate from .._updates cimport RecordUpdateListener @@ -27,18 +36,22 @@ cdef object _FLAGS_QR_QUERY cdef object service_type_name -cdef object DNS_QUESTION_TYPE_QU -cdef object DNS_QUESTION_TYPE_QM +cdef object QU_QUESTION +cdef object QM_QUESTION cdef object _IPVersion_All_value cdef object _IPVersion_V4Only_value cdef cython.set _ADDRESS_RECORD_TYPES +cdef unsigned int _DUPLICATE_QUESTION_INTERVAL + cdef bint TYPE_CHECKING cdef bint IPADDRESS_SUPPORTS_SCOPE_ID cdef object cached_ip_addresses +cdef object randint + cdef class ServiceInfo(RecordUpdateListener): cdef public cython.bytes text @@ -123,5 +136,23 @@ cdef class ServiceInfo(RecordUpdateListener): cpdef void async_clear_cache(self) - @cython.locals(cache=DNSCache) + @cython.locals(cache=DNSCache, history=QuestionHistory, out=DNSOutgoing, qu_question=bint) cdef DNSOutgoing _generate_request_query(self, object zc, double now, object question_type) + + @cython.locals(question=DNSQuestion, answer=DNSRecord) + cdef void _add_question_with_known_answers( + self, + DNSOutgoing out, + bint qu_question, + QuestionHistory question_history, + DNSCache cache, + double now, + str name, + object type_, + object class_, + bint skip_if_known_answers + ) + + cdef double _get_initial_delay(self) + + cdef double _get_random_delay(self) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 3a27e10a..48ad1140 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -26,16 +26,19 @@ from ipaddress import IPv4Address, IPv6Address, _BaseAddress from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast +from .._cache import DNSCache from .._dns import ( DNSAddress, DNSNsec, DNSPointer, + DNSQuestion, DNSQuestionType, DNSRecord, DNSService, DNSText, ) from .._exceptions import BadTypeInNameException +from .._history import QuestionHistory from .._logger import log from .._protocol.outgoing import DNSOutgoing from .._record_update import RecordUpdate @@ -61,6 +64,7 @@ _CLASS_IN_UNIQUE, _DNS_HOST_TTL, _DNS_OTHER_TTL, + _DUPLICATE_QUESTION_INTERVAL, _FLAGS_QR_QUERY, _LISTENER_TIME, _MDNS_PORT, @@ -89,10 +93,12 @@ bytes_ = bytes float_ = float int_ = int +str_ = str -DNS_QUESTION_TYPE_QU = DNSQuestionType.QU -DNS_QUESTION_TYPE_QM = DNSQuestionType.QM +QU_QUESTION = DNSQuestionType.QU +QM_QUESTION = DNSQuestionType.QM +randint = random.randint if TYPE_CHECKING: from .._core import Zeroconf @@ -774,6 +780,12 @@ def request( ) ) + def _get_initial_delay(self) -> float_: + return _LISTENER_TIME + + def _get_random_delay(self) -> int_: + return randint(*_AVOID_SYNC_DELAY_RANDOM_INTERVAL) + async def async_request( self, zc: 'Zeroconf', @@ -804,7 +816,7 @@ async def async_request( assert zc.loop is not None first_request = True - delay = _LISTENER_TIME + delay = self._get_initial_delay() next_ = now last = now + timeout try: @@ -813,18 +825,25 @@ async def async_request( if last <= now: return False if next_ <= now: - out = self._generate_request_query( - zc, - now, - question_type or DNS_QUESTION_TYPE_QU if first_request else DNS_QUESTION_TYPE_QM, - ) + this_question_type = question_type or QU_QUESTION if first_request else QM_QUESTION + out = self._generate_request_query(zc, now, this_question_type) first_request = False - if not out.questions: - return self._load_from_cache(zc, now) - zc.async_send(out, addr, port) + if out.questions: + # All questions may have been suppressed + # by the question history, so nothing to send, + # but keep waiting for answers in case another + # client on the network is asking the same + # question or they have not arrived yet. + zc.async_send(out, addr, port) next_ = now + delay - delay *= 2 - next_ += random.randint(*_AVOID_SYNC_DELAY_RANDOM_INTERVAL) + next_ += self._get_random_delay() + if this_question_type is QM_QUESTION and delay < _DUPLICATE_QUESTION_INTERVAL: + # If we just asked a QM question, we need to + # wait at least the duplicate question interval + # before asking another QM question otherwise + # its likely to be suppressed by the question + # history of the remote responder. + delay = _DUPLICATE_QUESTION_INTERVAL await self.async_wait(min(next_, last) - now, zc.loop) now = current_time_millis() @@ -833,21 +852,57 @@ async def async_request( return True + def _add_question_with_known_answers( + self, + out: DNSOutgoing, + qu_question: bool, + question_history: QuestionHistory, + cache: DNSCache, + now: float_, + name: str_, + type_: int_, + class_: int_, + skip_if_known_answers: bool, + ) -> None: + """Add a question with known answers if its not suppressed.""" + known_answers = { + answer for answer in cache.get_all_by_details(name, type_, class_) if not answer.is_stale(now) + } + if skip_if_known_answers and known_answers: + return + question = DNSQuestion(name, type_, class_) + if qu_question: + question.unicast = True + elif question_history.suppresses(question, now, known_answers): + return + else: + question_history.add_question_at_time(question, now, known_answers) + out.add_question(question) + for answer in known_answers: + out.add_answer_at_time(answer, now) + def _generate_request_query( self, zc: 'Zeroconf', now: float_, question_type: DNSQuestionType ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) name = self._name - server_or_name = self.server or name + server = self.server or name cache = zc.cache - out.add_question_or_one_cache(cache, now, name, _TYPE_SRV, _CLASS_IN) - out.add_question_or_one_cache(cache, now, name, _TYPE_TXT, _CLASS_IN) - out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_A, _CLASS_IN) - out.add_question_or_all_cache(cache, now, server_or_name, _TYPE_AAAA, _CLASS_IN) - if question_type is DNS_QUESTION_TYPE_QU: - for question in out.questions: - question.unicast = True + history = zc.question_history + qu_question = question_type is QU_QUESTION + self._add_question_with_known_answers( + out, qu_question, history, cache, now, name, _TYPE_SRV, _CLASS_IN, True + ) + self._add_question_with_known_answers( + out, qu_question, history, cache, now, name, _TYPE_TXT, _CLASS_IN, True + ) + self._add_question_with_known_answers( + out, qu_question, history, cache, now, server, _TYPE_A, _CLASS_IN, False + ) + self._add_question_with_known_answers( + out, qu_question, history, cache, now, server, _TYPE_AAAA, _CLASS_IN, False + ) return out def __repr__(self) -> str: diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 482b3b0c..c02d5e05 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -247,7 +247,7 @@ def test_get_info_partial(self): send_event = Event() service_info_event = Event() - last_sent = None # type: Optional[r.DNSOutgoing] + last_sent: Optional[r.DNSOutgoing] = None def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -280,7 +280,7 @@ def get_service_info_helper(zc, type, name): helper_thread.start() wait_time = 1 - # Expext query for SRV, TXT, A, AAAA + # Expect query for SRV, TXT, A, AAAA send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 @@ -290,7 +290,7 @@ def get_service_info_helper(zc, type, name): assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None - # Expext query for SRV, A, AAAA + # Expect query for SRV, A, AAAA last_sent = None send_event.clear() _inject_response( @@ -315,7 +315,7 @@ def get_service_info_helper(zc, type, name): assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None - # Expext query for A, AAAA + # Expect query for A, AAAA last_sent = None send_event.clear() _inject_response( @@ -343,7 +343,7 @@ def get_service_info_helper(zc, type, name): last_sent = None assert service_info is None - # Expext no further queries + # Expect no further queries last_sent = None send_event.clear() _inject_response( @@ -377,6 +377,117 @@ def get_service_info_helper(zc, type, name): zc.remove_all_service_listeners() zc.close() + @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') + @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + def test_get_info_suppressed_by_question_history(self): + zc = r.Zeroconf(interfaces=['127.0.0.1']) + + service_name = 'name._type._tcp.local.' + service_type = '_type._tcp.local.' + + service_info = None + send_event = Event() + service_info_event = Event() + + last_sent: Optional[r.DNSOutgoing] = None + + def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): + """Sends an outgoing packet.""" + nonlocal last_sent + + last_sent = out + send_event.set() + + # patch the zeroconf send + with patch.object(zc, "async_send", send): + + def mock_incoming_msg(records: Iterable[r.DNSRecord]) -> r.DNSIncoming: + generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + + for record in records: + generated.add_answer_at_time(record, 0) + + return r.DNSIncoming(generated.packets()[0]) + + def get_service_info_helper(zc, type, name): + nonlocal service_info + service_info = zc.get_service_info(type, name) + service_info_event.set() + + try: + helper_thread = threading.Thread( + target=get_service_info_helper, args=(zc, service_type, service_name) + ) + helper_thread.start() + wait_time = (const._LISTENER_TIME + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5) / 1000 + + # Expect query for SRV, TXT, A, AAAA + send_event.wait(wait_time) + assert last_sent is not None + assert len(last_sent.questions) == 4 + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert service_info is None + + # Expect query for SRV only as A, AAAA, and TXT are suppressed + # by the question history + last_sent = None + send_event.clear() + for _ in range(3): + send_event.wait( + wait_time * 0.25 + ) # Wait long enough to be inside the question history window + now = r.current_time_millis() + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), now, set() + ) + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), now, set() + ) + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), now, set() + ) + send_event.wait(wait_time * 0.25) + assert last_sent is not None + assert len(last_sent.questions) == 1 # type: ignore[unreachable] + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert service_info is None + + wait_time = ( + const._DUPLICATE_QUESTION_INTERVAL + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5 + ) / 1000 + # Expect no queries as all are suppressed by the question history + last_sent = None + send_event.clear() + for _ in range(3): + send_event.wait( + wait_time * 0.25 + ) # Wait long enough to be inside the question history window + now = r.current_time_millis() + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), now, set() + ) + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), now, set() + ) + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), now, set() + ) + zc.question_history.add_question_at_time( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN), now, set() + ) + send_event.wait(wait_time * 0.25) + # All questions are suppressed so no query should be sent + assert last_sent is None + assert service_info is None + + finally: + helper_thread.join() + zc.remove_all_service_listeners() + zc.close() + def test_get_info_single(self): zc = r.Zeroconf(interfaces=['127.0.0.1']) From 7ffbed800e48c3f0b57596d5551b71c0363ede56 Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 16 Dec 2023 16:33:06 +0000 Subject: [PATCH 213/434] 0.130.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 19 +++++++++++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 32e70bff..d437baa7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,25 @@ +## v0.130.0 (2023-12-16) + +### Feature + +* Make ServiceInfo aware of question history ([#1348](https://github.com/python-zeroconf/python-zeroconf/issues/1348)) ([`b9aae1d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9aae1de07bf1491e873bc314f8a1d7996127ad3)) +* Small speed up to ServiceInfo construction ([#1346](https://github.com/python-zeroconf/python-zeroconf/issues/1346)) ([`b329d99`](https://github.com/python-zeroconf/python-zeroconf/commit/b329d99917bb731b4c70bf20c7c010eeb85ad9fd)) +* Significantly improve efficiency of the ServiceBrowser scheduler ([#1335](https://github.com/python-zeroconf/python-zeroconf/issues/1335)) ([`c65d869`](https://github.com/python-zeroconf/python-zeroconf/commit/c65d869aec731b803484871e9d242a984f9f5848)) +* Small speed up to processing incoming records ([#1345](https://github.com/python-zeroconf/python-zeroconf/issues/1345)) ([`7de655b`](https://github.com/python-zeroconf/python-zeroconf/commit/7de655b6f05012f20a3671e0bcdd44a1913d7b52)) +* Small performance improvement for converting time ([#1342](https://github.com/python-zeroconf/python-zeroconf/issues/1342)) ([`73d3ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/73d3ab90dd3b59caab771235dd6dbedf05bfe0b3)) +* Small performance improvement for ServiceInfo asking questions ([#1341](https://github.com/python-zeroconf/python-zeroconf/issues/1341)) ([`810a309`](https://github.com/python-zeroconf/python-zeroconf/commit/810a3093c5a9411ee97740b468bd706bdf4a95de)) +* Small performance improvement constructing outgoing questions ([#1340](https://github.com/python-zeroconf/python-zeroconf/issues/1340)) ([`157185f`](https://github.com/python-zeroconf/python-zeroconf/commit/157185f28bf1e83e6811e2a5cd1fa9b38966f780)) + +### Fix + +* Scheduling race with the QueryScheduler ([#1347](https://github.com/python-zeroconf/python-zeroconf/issues/1347)) ([`cf40470`](https://github.com/python-zeroconf/python-zeroconf/commit/cf40470b89f918d3c24d7889d3536f3ffa44846c)) +* Ensure question history suppresses duplicates ([#1338](https://github.com/python-zeroconf/python-zeroconf/issues/1338)) ([`6f23656`](https://github.com/python-zeroconf/python-zeroconf/commit/6f23656576daa04e3de44e100f3ddd60ee4c560d)) +* Microsecond precision loss in the query handler ([#1339](https://github.com/python-zeroconf/python-zeroconf/issues/1339)) ([`6560fad`](https://github.com/python-zeroconf/python-zeroconf/commit/6560fad584e0d392962c9a9248759f17c416620e)) +* Ensure IPv6 scoped address construction uses the string cache ([#1336](https://github.com/python-zeroconf/python-zeroconf/issues/1336)) ([`f78a196`](https://github.com/python-zeroconf/python-zeroconf/commit/f78a196db632c4fe017a34f1af8a58903c15a575)) + ## v0.129.0 (2023-12-13) ### Feature diff --git a/pyproject.toml b/pyproject.toml index c30d5ba2..d1f58a14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.129.0" +version = "0.130.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b2f0da53..292c8a2f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.129.0' +__version__ = '0.130.0' __license__ = 'LGPL' From 9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 19:15:39 -1000 Subject: [PATCH 214/434] feat: speed up the query handler (#1350) --- src/zeroconf/_core.py | 49 +--------------- src/zeroconf/_handlers/query_handler.pxd | 25 +++++++- src/zeroconf/_handlers/query_handler.py | 71 +++++++++++++++++++---- src/zeroconf/_handlers/record_manager.pxd | 13 ++--- src/zeroconf/_handlers/record_manager.py | 5 +- src/zeroconf/_listener.pxd | 2 + src/zeroconf/_listener.py | 4 +- src/zeroconf/_protocol/incoming.pxd | 2 +- src/zeroconf/_transport.py | 4 +- tests/conftest.py | 5 +- 10 files changed, 107 insertions(+), 73 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 5827e2d5..3a3381a9 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -31,10 +31,6 @@ from ._dns import DNSQuestion, DNSQuestionType from ._engine import AsyncEngine from ._exceptions import NonUniqueNameException, NotRunningException -from ._handlers.answers import ( - construct_outgoing_multicast_answers, - construct_outgoing_unicast_answers, -) from ._handlers.multicast_outgoing_queue import MulticastOutgoingQueue from ._handlers.query_handler import QueryHandler from ._handlers.record_manager import RecordManager @@ -187,15 +183,15 @@ def __init__( self.registry = ServiceRegistry() self.cache = DNSCache() self.question_history = QuestionHistory() - self.query_handler = QueryHandler(self.registry, self.cache, self.question_history) + self.query_handler = QueryHandler(self) self.record_manager = RecordManager(self) self._notify_futures: Set[asyncio.Future] = set() self.loop: Optional[asyncio.AbstractEventLoop] = None self._loop_thread: Optional[threading.Thread] = None - self._out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) - self._out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) + self.out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) + self.out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) self.start() @@ -567,45 +563,6 @@ def handle_response(self, msg: DNSIncoming) -> None: self.log_warning_once("handle_response is deprecated, use record_manager.async_updates_from_response") self.record_manager.async_updates_from_response(msg) - def handle_assembled_query( - self, - packets: List[DNSIncoming], - addr: str, - port: int, - transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]], - ) -> None: - """Respond to a (re)assembled query. - - If the protocol received packets with the TC bit set, it will - wait a bit for the rest of the packets and only call - handle_assembled_query once it has a complete set of packets - or the timer expires. If the TC bit is not set, a single - packet will be in packets. - """ - ucast_source = port != _MDNS_PORT - question_answers = self.query_handler.async_response(packets, ucast_source) - if not question_answers: - return - now = packets[0].now - if question_answers.ucast: - questions = packets[0].questions - id_ = packets[0].id - out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) - # When sending unicast, only send back the reply - # via the same socket that it was recieved from - # as we know its reachable from that socket - self.async_send(out, addr, port, v6_flow_scope, transport) - if question_answers.mcast_now: - self.async_send(construct_outgoing_multicast_answers(question_answers.mcast_now)) - if question_answers.mcast_aggregate: - self._out_queue.async_add(now, question_answers.mcast_aggregate) - if question_answers.mcast_aggregate_last_second: - # https://datatracker.ietf.org/doc/html/rfc6762#section-14 - # If we broadcast it in the last second, we have to delay - # at least a second before we send it again - self._out_delay_queue.async_add(now, question_answers.mcast_aggregate_last_second) - def send( self, out: DNSOutgoing, diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index 3e726a53..bb7198be 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -7,7 +7,12 @@ from .._history cimport QuestionHistory from .._protocol.incoming cimport DNSIncoming from .._services.info cimport ServiceInfo from .._services.registry cimport ServiceRegistry -from .answers cimport QuestionAnswers +from .answers cimport ( + QuestionAnswers, + construct_outgoing_multicast_answers, + construct_outgoing_unicast_answers, +) +from .multicast_outgoing_queue cimport MulticastOutgoingQueue cdef bint TYPE_CHECKING @@ -65,6 +70,7 @@ cdef class _QueryResponse: cdef class QueryHandler: + cdef object zc cdef ServiceRegistry registry cdef DNSCache cache cdef QuestionHistory question_history @@ -93,7 +99,22 @@ cdef class QueryHandler: is_probe=object, now=double ) - cpdef async_response(self, cython.list msgs, cython.bint unicast_source) + cpdef QuestionAnswers async_response(self, cython.list msgs, cython.bint unicast_source) @cython.locals(name=str, question_lower_name=str) cdef _get_answer_strategies(self, DNSQuestion question) + + @cython.locals( + first_packet=DNSIncoming, + ucast_source=bint, + out_queue=MulticastOutgoingQueue, + out_delay_queue=MulticastOutgoingQueue + ) + cpdef void handle_assembled_query( + self, + list packets, + object addr, + object port, + object transport, + tuple v6_flow_scope + ) diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index c66d9c30..8349b584 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -20,19 +20,19 @@ USA """ -from typing import TYPE_CHECKING, List, Optional, Set, cast +from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast from .._cache import DNSCache, _UniqueRecordsType from .._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet -from .._history import QuestionHistory from .._protocol.incoming import DNSIncoming from .._services.info import ServiceInfo -from .._services.registry import ServiceRegistry +from .._transport import _WrappedTransport from .._utils.net import IPVersion from ..const import ( _ADDRESS_RECORD_TYPES, _CLASS_IN, _DNS_OTHER_TTL, + _MDNS_PORT, _ONE_SECOND, _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_A, @@ -43,7 +43,12 @@ _TYPE_SRV, _TYPE_TXT, ) -from .answers import QuestionAnswers, _AnswerWithAdditionalsType +from .answers import ( + QuestionAnswers, + _AnswerWithAdditionalsType, + construct_outgoing_multicast_answers, + construct_outgoing_unicast_answers, +) _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} @@ -53,7 +58,7 @@ _IPVersion_ALL = IPVersion.All _int = int - +_str = str _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION = 0 _ANSWER_STRATEGY_POINTER = 1 @@ -61,6 +66,9 @@ _ANSWER_STRATEGY_SERVICE = 3 _ANSWER_STRATEGY_TEXT = 4 +if TYPE_CHECKING: + from .._core import Zeroconf + class _AnswerStrategy: @@ -183,13 +191,14 @@ def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: class QueryHandler: """Query the ServiceRegistry.""" - __slots__ = ("registry", "cache", "question_history") + __slots__ = ("zc", "registry", "cache", "question_history") - def __init__(self, registry: ServiceRegistry, cache: DNSCache, question_history: QuestionHistory) -> None: + def __init__(self, zc: 'Zeroconf') -> None: """Init the query handler.""" - self.registry = registry - self.cache = cache - self.question_history = question_history + self.zc = zc + self.registry = zc.registry + self.cache = zc.cache + self.question_history = zc.question_history def _add_service_type_enumeration_query_answers( self, types: List[str], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet @@ -385,3 +394,45 @@ def _get_answer_strategies( ) return strategies + + def handle_assembled_query( + self, + packets: List[DNSIncoming], + addr: _str, + port: _int, + transport: _WrappedTransport, + v6_flow_scope: Union[Tuple[()], Tuple[int, int]], + ) -> None: + """Respond to a (re)assembled query. + + If the protocol recieved packets with the TC bit set, it will + wait a bit for the rest of the packets and only call + handle_assembled_query once it has a complete set of packets + or the timer expires. If the TC bit is not set, a single + packet will be in packets. + """ + first_packet = packets[0] + now = first_packet.now + ucast_source = port != _MDNS_PORT + question_answers = self.async_response(packets, ucast_source) + if not question_answers: + return + if question_answers.ucast: + questions = first_packet.questions + id_ = first_packet.id + out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) + # When sending unicast, only send back the reply + # via the same socket that it was recieved from + # as we know its reachable from that socket + self.zc.async_send(out, addr, port, v6_flow_scope, transport) + if question_answers.mcast_now: + self.zc.async_send(construct_outgoing_multicast_answers(question_answers.mcast_now)) + if question_answers.mcast_aggregate: + out_queue = self.zc.out_queue + out_queue.async_add(now, question_answers.mcast_aggregate) + if question_answers.mcast_aggregate_last_second: + # https://datatracker.ietf.org/doc/html/rfc6762#section-14 + # If we broadcast it in the last second, we have to delay + # at least a second before we send it again + out_delay_queue = self.zc.out_delay_queue + out_delay_queue.async_add(now, question_answers.mcast_aggregate_last_second) diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 0f543aff..5be2c283 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -22,22 +22,21 @@ cdef class RecordManager: cdef public DNSCache cache cdef public cython.set listeners - cpdef async_updates(self, object now, object records) + cpdef void async_updates(self, object now, object records) - cpdef async_updates_complete(self, object notify) + cpdef void async_updates_complete(self, bint notify) @cython.locals( cache=DNSCache, record=DNSRecord, answers=cython.list, maybe_entry=DNSRecord, - now_double=double ) - cpdef async_updates_from_response(self, DNSIncoming msg) + cpdef void async_updates_from_response(self, DNSIncoming msg) - cpdef async_add_listener(self, RecordUpdateListener listener, object question) + cpdef void async_add_listener(self, RecordUpdateListener listener, object question) - cpdef async_remove_listener(self, RecordUpdateListener listener) + cpdef void async_remove_listener(self, RecordUpdateListener listener) @cython.locals(question=DNSQuestion, record=DNSRecord) - cdef _async_update_matching_records(self, RecordUpdateListener listener, cython.list questions) + cdef void _async_update_matching_records(self, RecordUpdateListener listener, cython.list questions) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 129acd0b..0a0f6c54 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -84,7 +84,6 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: other_adds: List[DNSRecord] = [] removes: Set[DNSRecord] = set() now = msg.now - now_double = now unique_types: Set[Tuple[str, int, int]] = set() cache = self.cache answers = msg.answers() @@ -113,7 +112,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: record = cast(_UniqueRecordsType, record) maybe_entry = cache.async_get_unique(record) - if not record.is_expired(now_double): + if not record.is_expired(now): if maybe_entry is not None: maybe_entry.reset_ttl(record) else: @@ -129,7 +128,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now_double) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) if updates: self.async_updates(now, updates) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 8b144653..96f52be0 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -1,6 +1,7 @@ import cython +from ._handlers.query_handler cimport QueryHandler from ._handlers.record_manager cimport RecordManager from ._protocol.incoming cimport DNSIncoming from ._services.registry cimport ServiceRegistry @@ -21,6 +22,7 @@ cdef class AsyncListener: cdef public object zc cdef ServiceRegistry _registry cdef RecordManager _record_manager + cdef QueryHandler _query_handler cdef public cython.bytes data cdef public double last_time cdef public DNSIncoming last_message diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 23d24578..0f8a8cac 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -59,6 +59,7 @@ class AsyncListener: 'zc', '_registry', '_record_manager', + "_query_handler", 'data', 'last_time', 'last_message', @@ -72,6 +73,7 @@ def __init__(self, zc: 'Zeroconf') -> None: self.zc = zc self._registry = zc.registry self._record_manager = zc.record_manager + self._query_handler = zc.query_handler self.data: Optional[bytes] = None self.last_time: float = 0 self.last_message: Optional[DNSIncoming] = None @@ -228,7 +230,7 @@ def _respond_query( if msg: packets.append(msg) - self.zc.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) + self._query_handler.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) def error_received(self, exc: Exception) -> None: """Likely socket closed or IPv6.""" diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index 07ae6e78..a8c0dbdb 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -56,7 +56,7 @@ cdef class DNSIncoming: cdef cython.uint _num_authorities cdef cython.uint _num_additionals cdef public bint valid - cdef public object now + cdef public double now cdef public object scope_id cdef public object source cdef bint _has_qu_question diff --git a/src/zeroconf/_transport.py b/src/zeroconf/_transport.py index 7f6d7ac8..c37af2ef 100644 --- a/src/zeroconf/_transport.py +++ b/src/zeroconf/_transport.py @@ -22,7 +22,7 @@ import asyncio import socket -from typing import Any +from typing import Tuple class _WrappedTransport: @@ -42,7 +42,7 @@ def __init__( is_ipv6: bool, sock: socket.socket, fileno: int, - sock_name: Any, + sock_name: Tuple, ) -> None: """Initialize the wrapped transport. diff --git a/tests/conftest.py b/tests/conftest.py index c0e926a3..5525c4ee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,6 +9,7 @@ import pytest from zeroconf import _core, const +from zeroconf._handlers import query_handler @pytest.fixture(autouse=True) @@ -23,7 +24,9 @@ def verify_threads_ended(): @pytest.fixture def run_isolated(): """Change the mDNS port to run the test in isolation.""" - with patch.object(_core, "_MDNS_PORT", 5454), patch.object(const, "_MDNS_PORT", 5454): + with patch.object(query_handler, "_MDNS_PORT", 5454), patch.object( + _core, "_MDNS_PORT", 5454 + ), patch.object(const, "_MDNS_PORT", 5454): yield From a014c7caac50ad71085ddcaf010a702e972e83f8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 21:50:28 -1000 Subject: [PATCH 215/434] chore: remove deprecated handle_response (#1353) --- src/zeroconf/_core.py | 7 ------- tests/test_asyncio.py | 1 - 2 files changed, 8 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 3a3381a9..156e0b1a 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -36,7 +36,6 @@ from ._handlers.record_manager import RecordManager from ._history import QuestionHistory from ._logger import QuietLogger, log -from ._protocol.incoming import DNSIncoming from ._protocol.outgoing import DNSOutgoing from ._services import ServiceListener from ._services.browser import ServiceBrowser @@ -557,12 +556,6 @@ def async_remove_listener(self, listener: RecordUpdateListener) -> None: """ self.record_manager.async_remove_listener(listener) - def handle_response(self, msg: DNSIncoming) -> None: - """Deal with incoming response packets. All answers - are held in the cache, and listeners are notified.""" - self.log_warning_once("handle_response is deprecated, use record_manager.async_updates_from_response") - self.record_manager.async_updates_from_response(msg) - def send( self, out: DNSOutgoing, diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index d4594788..63255158 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1246,7 +1246,6 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de ) zc.record_manager.async_updates_from_response(DNSIncoming(generated.packets()[0])) - zc.handle_response(DNSIncoming(generated.packets()[0])) await browser.async_cancel() await asyncio.sleep(0) From 6c153258a995cf9459a6f23267b7e379b5e2550f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 21:50:45 -1000 Subject: [PATCH 216/434] feat: speed up processing incoming packets (#1352) --- src/zeroconf/_cache.pxd | 16 +++++++------- src/zeroconf/_core.py | 7 ++++--- src/zeroconf/_handlers/answers.pxd | 14 ++++++------- .../_handlers/multicast_outgoing_queue.pxd | 6 +++--- src/zeroconf/_handlers/query_handler.pxd | 18 ++++++++-------- src/zeroconf/_handlers/query_handler.py | 21 +++++++++---------- src/zeroconf/_protocol/incoming.pxd | 12 +++++------ 7 files changed, 47 insertions(+), 47 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 84107957..af27a1d5 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -26,23 +26,23 @@ cdef class DNSCache: cpdef bint async_add_records(self, object entries) - cpdef async_remove_records(self, object entries) + cpdef void async_remove_records(self, object entries) @cython.locals( store=cython.dict, ) - cpdef async_get_unique(self, DNSRecord entry) + cpdef DNSRecord async_get_unique(self, DNSRecord entry) @cython.locals( record=DNSRecord, ) - cpdef async_expire(self, double now) + cpdef list async_expire(self, double now) @cython.locals( records=cython.dict, record=DNSRecord, ) - cpdef async_all_by_details(self, str name, object type_, object class_) + cpdef list async_all_by_details(self, str name, object type_, object class_) cpdef cython.dict async_entries_with_name(self, str name) @@ -51,7 +51,7 @@ cdef class DNSCache: @cython.locals( cached_entry=DNSRecord, ) - cpdef get_by_details(self, str name, object type_, object class_) + cpdef DNSRecord get_by_details(self, str name, object type_, object class_) @cython.locals( records=cython.dict, @@ -62,12 +62,12 @@ cdef class DNSCache: @cython.locals( store=cython.dict, ) - cdef _async_add(self, DNSRecord record) + cdef bint _async_add(self, DNSRecord record) - cdef _async_remove(self, DNSRecord record) + cdef void _async_remove(self, DNSRecord record) @cython.locals( record=DNSRecord, created_double=double, ) - cpdef async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) + cpdef void async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 156e0b1a..4b29717a 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -182,6 +182,10 @@ def __init__( self.registry = ServiceRegistry() self.cache = DNSCache() self.question_history = QuestionHistory() + + self.out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) + self.out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) + self.query_handler = QueryHandler(self) self.record_manager = RecordManager(self) @@ -189,9 +193,6 @@ def __init__( self.loop: Optional[asyncio.AbstractEventLoop] = None self._loop_thread: Optional[threading.Thread] = None - self.out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) - self.out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) - self.start() @property diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index 5a3010ad..25b3c1a1 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -7,10 +7,10 @@ from .._protocol.outgoing cimport DNSOutgoing cdef class QuestionAnswers: - cdef public object ucast - cdef public object mcast_now - cdef public object mcast_aggregate - cdef public object mcast_aggregate_last_second + cdef public dict ucast + cdef public dict mcast_now + cdef public dict mcast_aggregate + cdef public dict mcast_aggregate_last_second cdef class AnswerGroup: @@ -25,11 +25,11 @@ cdef class AnswerGroup: cdef object _FLAGS_QR_RESPONSE_AA cdef object NAME_GETTER -cpdef construct_outgoing_multicast_answers(cython.dict answers) +cpdef DNSOutgoing construct_outgoing_multicast_answers(cython.dict answers) -cpdef construct_outgoing_unicast_answers( +cpdef DNSOutgoing construct_outgoing_unicast_answers( cython.dict answers, bint ucast_source, cython.list questions, object id_ ) @cython.locals(answer=DNSRecord, additionals=cython.set, additional=DNSRecord) -cdef _add_answers_additionals(DNSOutgoing out, cython.dict answers) +cdef void _add_answers_additionals(DNSOutgoing out, cython.dict answers) diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd index 1a8d6741..88cfdaa0 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.pxd +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.pxd @@ -19,9 +19,9 @@ cdef class MulticastOutgoingQueue: cdef object _aggregation_delay @cython.locals(last_group=AnswerGroup, random_int=cython.uint) - cpdef async_add(self, double now, cython.dict answers) + cpdef void async_add(self, double now, cython.dict answers) @cython.locals(pending=AnswerGroup) - cdef _remove_answers_from_queue(self, cython.dict answers) + cdef void _remove_answers_from_queue(self, cython.dict answers) - cpdef async_ready(self) + cpdef void async_ready(self) diff --git a/src/zeroconf/_handlers/query_handler.pxd b/src/zeroconf/_handlers/query_handler.pxd index bb7198be..89a1f2b2 100644 --- a/src/zeroconf/_handlers/query_handler.pxd +++ b/src/zeroconf/_handlers/query_handler.pxd @@ -53,12 +53,12 @@ cdef class _QueryResponse: cdef cython.set _mcast_aggregate_last_second @cython.locals(record=DNSRecord) - cdef add_qu_question_response(self, cython.dict answers) + cdef void add_qu_question_response(self, cython.dict answers) - cdef add_ucast_question_response(self, cython.dict answers) + cdef void add_ucast_question_response(self, cython.dict answers) @cython.locals(answer=DNSRecord, question=DNSQuestion) - cdef add_mcast_question_response(self, cython.dict answers) + cdef void add_mcast_question_response(self, cython.dict answers) @cython.locals(maybe_entry=DNSRecord) cdef bint _has_mcast_within_one_quarter_ttl(self, DNSRecord record) @@ -74,15 +74,17 @@ cdef class QueryHandler: cdef ServiceRegistry registry cdef DNSCache cache cdef QuestionHistory question_history + cdef MulticastOutgoingQueue out_queue + cdef MulticastOutgoingQueue out_delay_queue @cython.locals(service=ServiceInfo) - cdef _add_service_type_enumeration_query_answers(self, list types, cython.dict answer_set, DNSRRSet known_answers) + cdef void _add_service_type_enumeration_query_answers(self, list types, cython.dict answer_set, DNSRRSet known_answers) @cython.locals(service=ServiceInfo) - cdef _add_pointer_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers) + cdef void _add_pointer_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers) @cython.locals(service=ServiceInfo, dns_address=DNSAddress) - cdef _add_address_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) + cdef void _add_address_answers(self, list services, cython.dict answer_set, DNSRRSet known_answers, cython.uint type_) @cython.locals(question_lower_name=str, type_=cython.uint, service=ServiceInfo) cdef cython.dict _answer_question(self, DNSQuestion question, unsigned int strategy_type, list types, list services, DNSRRSet known_answers) @@ -102,13 +104,11 @@ cdef class QueryHandler: cpdef QuestionAnswers async_response(self, cython.list msgs, cython.bint unicast_source) @cython.locals(name=str, question_lower_name=str) - cdef _get_answer_strategies(self, DNSQuestion question) + cdef list _get_answer_strategies(self, DNSQuestion question) @cython.locals( first_packet=DNSIncoming, ucast_source=bint, - out_queue=MulticastOutgoingQueue, - out_delay_queue=MulticastOutgoingQueue ) cpdef void handle_assembled_query( self, diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 8349b584..ba9c9e31 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -191,7 +191,7 @@ def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: class QueryHandler: """Query the ServiceRegistry.""" - __slots__ = ("zc", "registry", "cache", "question_history") + __slots__ = ("zc", "registry", "cache", "question_history", "out_queue", "out_delay_queue") def __init__(self, zc: 'Zeroconf') -> None: """Init the query handler.""" @@ -199,6 +199,8 @@ def __init__(self, zc: 'Zeroconf') -> None: self.registry = zc.registry self.cache = zc.cache self.question_history = zc.question_history + self.out_queue = zc.out_queue + self.out_delay_queue = zc.out_delay_queue def _add_service_type_enumeration_query_answers( self, types: List[str], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet @@ -301,7 +303,7 @@ def async_response( # pylint: disable=unused-argument """ strategies: List[_AnswerStrategy] = [] for msg in msgs: - for question in msg.questions: + for question in msg._questions: strategies.extend(self._get_answer_strategies(question)) if not strategies: @@ -311,7 +313,8 @@ def async_response( # pylint: disable=unused-argument return None is_probe = False - questions = msg.questions + msg = msgs[0] + questions = msg._questions # Only decode known answers if we are not a probe and we have # at least one answer strategy answers: List[DNSRecord] = [] @@ -321,7 +324,6 @@ def async_response( # pylint: disable=unused-argument else: answers.extend(msg.answers()) - msg = msgs[0] query_res = _QueryResponse(self.cache, questions, is_probe, msg.now) known_answers = DNSRRSet(answers) known_answers_set: Optional[Set[DNSRecord]] = None @@ -412,13 +414,12 @@ def handle_assembled_query( packet will be in packets. """ first_packet = packets[0] - now = first_packet.now ucast_source = port != _MDNS_PORT question_answers = self.async_response(packets, ucast_source) - if not question_answers: + if question_answers is None: return if question_answers.ucast: - questions = first_packet.questions + questions = first_packet._questions id_ = first_packet.id out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) # When sending unicast, only send back the reply @@ -428,11 +429,9 @@ def handle_assembled_query( if question_answers.mcast_now: self.zc.async_send(construct_outgoing_multicast_answers(question_answers.mcast_now)) if question_answers.mcast_aggregate: - out_queue = self.zc.out_queue - out_queue.async_add(now, question_answers.mcast_aggregate) + self.out_queue.async_add(first_packet.now, question_answers.mcast_aggregate) if question_answers.mcast_aggregate_last_second: # https://datatracker.ietf.org/doc/html/rfc6762#section-14 # If we broadcast it in the last second, we have to delay # at least a second before we send it again - out_delay_queue = self.zc.out_delay_queue - out_delay_queue.async_add(now, question_answers.mcast_aggregate_last_second) + self.out_delay_queue.async_add(first_packet.now, question_answers.mcast_aggregate_last_second) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index a8c0dbdb..bb438303 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -70,7 +70,7 @@ cdef class DNSIncoming: cpdef bint is_probe(self) - cpdef answers(self) + cpdef list answers(self) cpdef bint is_response(self) @@ -86,16 +86,16 @@ cdef class DNSIncoming: cdef unsigned int _decode_labels_at_offset(self, unsigned int off, cython.list labels, cython.set seen_pointers) @cython.locals(offset="unsigned int") - cdef _read_header(self) + cdef void _read_header(self) - cdef _initial_parse(self) + cdef void _initial_parse(self) @cython.locals( end="unsigned int", length="unsigned int", offset="unsigned int" ) - cdef _read_others(self) + cdef void _read_others(self) @cython.locals(offset="unsigned int") cdef _read_questions(self) @@ -123,6 +123,6 @@ cdef class DNSIncoming: i="unsigned int", bitmap_length="unsigned int", ) - cdef _read_bitmap(self, unsigned int end) + cdef list _read_bitmap(self, unsigned int end) - cdef _read_name(self) + cdef str _read_name(self) From 517d7d00ca7738c770077738125aec0e4824c000 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 18 Dec 2023 22:29:22 -1000 Subject: [PATCH 217/434] feat: small speed up to constructing outgoing packets (#1354) --- src/zeroconf/_protocol/outgoing.pxd | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index 2496a988..fa1aeebc 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -50,17 +50,17 @@ cdef class DNSOutgoing: cdef public cython.list authorities cdef public cython.list additionals - cpdef _reset_for_next_packet(self) + cpdef void _reset_for_next_packet(self) - cdef _write_byte(self, cython.uint value) + cdef void _write_byte(self, cython.uint value) cdef void _insert_short_at_start(self, unsigned int value) - cdef _replace_short(self, cython.uint index, cython.uint value) + cdef void _replace_short(self, cython.uint index, cython.uint value) cdef _get_short(self, cython.uint value) - cdef _write_int(self, object value) + cdef void _write_int(self, object value) cdef cython.bint _write_question(self, DNSQuestion question) @@ -73,7 +73,7 @@ cdef class DNSOutgoing: cdef cython.bint _write_record(self, DNSRecord record, double now) @cython.locals(class_=cython.uint) - cdef _write_record_class(self, DNSEntry record) + cdef void _write_record_class(self, DNSEntry record) @cython.locals( start_size_int=object @@ -91,7 +91,7 @@ cdef class DNSOutgoing: cdef bint _has_more_to_add(self, unsigned int questions_offset, unsigned int answer_offset, unsigned int authority_offset, unsigned int additional_offset) - cdef _write_ttl(self, DNSRecord record, double now) + cdef void _write_ttl(self, DNSRecord record, double now) @cython.locals( labels=cython.list, @@ -100,16 +100,16 @@ cdef class DNSOutgoing: start_size=cython.uint, name_length=cython.uint, ) - cpdef write_name(self, cython.str name) + cpdef void write_name(self, cython.str name) - cdef _write_link_to_name(self, unsigned int index) + cdef void _write_link_to_name(self, unsigned int index) - cpdef write_short(self, cython.uint value) + cpdef void write_short(self, cython.uint value) - cpdef write_string(self, cython.bytes value) + cpdef void write_string(self, cython.bytes value) @cython.locals(utfstr=bytes) - cpdef _write_utf(self, cython.str value) + cdef void _write_utf(self, cython.str value) @cython.locals( debug_enable=bint, From dfc9b8d7dec519ca713a811613122718cb2d733e Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 19 Dec 2023 08:38:02 +0000 Subject: [PATCH 218/434] 0.131.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d437baa7..4e2fbc0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,14 @@ +## v0.131.0 (2023-12-19) + +### Feature + +* Small speed up to constructing outgoing packets ([#1354](https://github.com/python-zeroconf/python-zeroconf/issues/1354)) ([`517d7d0`](https://github.com/python-zeroconf/python-zeroconf/commit/517d7d00ca7738c770077738125aec0e4824c000)) +* Speed up processing incoming packets ([#1352](https://github.com/python-zeroconf/python-zeroconf/issues/1352)) ([`6c15325`](https://github.com/python-zeroconf/python-zeroconf/commit/6c153258a995cf9459a6f23267b7e379b5e2550f)) +* Speed up the query handler ([#1350](https://github.com/python-zeroconf/python-zeroconf/issues/1350)) ([`9eac0a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb)) + ## v0.130.0 (2023-12-16) ### Feature diff --git a/pyproject.toml b/pyproject.toml index d1f58a14..c711d9a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.130.0" +version = "0.131.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 292c8a2f..e6b8e481 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.130.0' +__version__ = '0.131.0' __license__ = 'LGPL' From 4877829e6442de5426db152d11827b1ba85dbf59 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 7 Feb 2024 16:59:31 -0600 Subject: [PATCH 219/434] feat: drop python 3.7 support (#1359) --- .github/workflows/ci.yml | 12 +- poetry.lock | 308 ++++++++++++++++----------------------- pyproject.toml | 4 +- src/zeroconf/const.py | 6 +- 4 files changed, 135 insertions(+), 195 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index da9db349..00d3fe9b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,13 +36,13 @@ jobs: fail-fast: false matrix: python-version: - - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - - "pypy-3.7" + - "pypy-3.8" + - "pypy-3.9" os: - ubuntu-latest - macos-latest @@ -56,7 +56,13 @@ jobs: - os: windows-latest extension: use_cython - os: windows-latest - python-version: "pypy-3.7" + python-version: "pypy-3.8" + - os: windows-latest + python-version: "pypy-3.9" + - os: macos-latest + python-version: "pypy-3.8" + - os: macos-latest + python-version: "pypy-3.9" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 diff --git a/poetry.lock b/poetry.lock index 71c5d27c..a9a7c6c2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "async-timeout" @@ -11,9 +11,6 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - [[package]] name = "colorama" version = "0.4.6" @@ -27,71 +24,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.7" +version = "7.4.1" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, + {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, + {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, + {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, + {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, + {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, + {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, + {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, + {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, + {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, + {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, + {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, + {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, + {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, + {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, + {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, + {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, + {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, + {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, + {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, + {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, + {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, + {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, + {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, + {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, + {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, + {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, + {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, ] [package.dependencies] @@ -102,69 +91,69 @@ toml = ["tomli"] [[package]] name = "cython" -version = "3.0.6" +version = "3.0.8" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Cython-3.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fcdfbf6fc7d0bd683d55e617c3d5a5f25b28ce8b405bc1e89054fc7c52a97e5"}, - {file = "Cython-3.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccbee314f8d15ee8ddbe270859dda427e1187123f2c7c41526d1f260eee6c8f7"}, - {file = "Cython-3.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14b992f36ffa1294921fca5f6488ea192fadd75770dc64fa25975379382551e9"}, - {file = "Cython-3.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2e90a75d405070f3c41e701bb8005892f14d42322f1d8fd00a61d660bbae7"}, - {file = "Cython-3.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4121c1160bc1bd8828546e8ce45906bd9ff27799d14747ce3fbbc9d67efbb1b8"}, - {file = "Cython-3.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:519814b8f80869ee5f9ee2cb2363e5c310067c0298cbea291c556b22da1ef6ae"}, - {file = "Cython-3.0.6-cp310-cp310-win32.whl", hash = "sha256:b029d8c754ef867ab4d67fc2477dde9782bf0409cb8e4024a7d29cf5aff37530"}, - {file = "Cython-3.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:2262390f453eedf600e084b074144286576ed2a56bb7fbfe15ad8d9499eceb52"}, - {file = "Cython-3.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfe8c7ac60363769ed8d91fca26398aaa9640368ab999a79b0ccb5e788d3bcf8"}, - {file = "Cython-3.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e31a9b18ec6ce57eb3479df920e6093596fe4ba8010dcc372720040386b4bdb"}, - {file = "Cython-3.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca2542f1f34f0141475b13777df040c31f2073a055097734a0a793ac3a4fb72"}, - {file = "Cython-3.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b24c1c38dad4bd85e142ccbe2f88122807f8d5a75352321e1e4baf2b293df7c6"}, - {file = "Cython-3.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dc4b4e76c1414584bb55465dfb6f41dd6bd27fd53fb41ddfcaca9edf00c1f80e"}, - {file = "Cython-3.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:805a2c532feee09aeed064eaeb7b6ee35cbab650569d0a3756975f3cc4f246cf"}, - {file = "Cython-3.0.6-cp311-cp311-win32.whl", hash = "sha256:dcdb9a177c7c385fe0c0709a9a6790b6508847d67dcac76bb65a2c7ea447efe5"}, - {file = "Cython-3.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:b8640b7f6503292c358cef925df5a69adf230045719893ffe20ad98024fdf7ae"}, - {file = "Cython-3.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:16b3b02cc7b3bc42ee1a0118b1465ca46b0f3fb32d003e6f1a3a352a819bb9a3"}, - {file = "Cython-3.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11e1d9b153573c425846b627bef52b3b99cb73d4fbfbb136e500a878d4b5e803"}, - {file = "Cython-3.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a7a406f78c2f297bf82136ff5deac3150288446005ed1e56552a9e3ac1469f"}, - {file = "Cython-3.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88be4fbc760de8f313df89ca8256098c0963c9ec72f3aa88538384b80ef1a6ef"}, - {file = "Cython-3.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea2e5a7c503b41618bfb10e4bc610f780ab1c729280531b5cabb24e05aa21cf2"}, - {file = "Cython-3.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d296b48e1410cab50220a28a834167f2d7ac6c0e7de12834d66e42248a1b0f6"}, - {file = "Cython-3.0.6-cp312-cp312-win32.whl", hash = "sha256:7f19e99c6e334e9e30dfa844c3ca4ac09931b94dbba406c646bde54687aed758"}, - {file = "Cython-3.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:9cae02e26967ffb6503c6e91b77010acbadfb7189a5a11d6158d634fb0f73679"}, - {file = "Cython-3.0.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cb6a54543869a5b0ad009d86eb0ebc0879fab838392bfd253ad6d4f5e0f17d84"}, - {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d2d9e53bf021cc7a5c7b6b537b5b5a7ba466ba7348d498aa17499d0ad12637e"}, - {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05d15854b2b363b35c755d22015c1c2fc590b8128202f8c9eb85578461101d9c"}, - {file = "Cython-3.0.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5548316497a3b8b2d9da575ea143476472db90dee73c67def061621940f78ae"}, - {file = "Cython-3.0.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9b853e0855e4b3d164c05b24718e5e2df369e5af54f47cb8d923c4f497dfc92c"}, - {file = "Cython-3.0.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2c77f97f462a40a319dda7e28c1669370cb26f9175f3e8f9bab99d2f8f3f2f09"}, - {file = "Cython-3.0.6-cp36-cp36m-win32.whl", hash = "sha256:3ac8b6734f2cad5640f2da21cd33cf88323547d07e445fb7453ab38ec5033b1f"}, - {file = "Cython-3.0.6-cp36-cp36m-win_amd64.whl", hash = "sha256:8dd5f5f3587909ff71f0562f50e00d4b836c948e56e8f74897b12f38a29e41b9"}, - {file = "Cython-3.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9c0472c6394750469062deb2c166125b10411636f63a0418b5c36a60d0c9a96a"}, - {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97081932c8810bb99cb26b4b0402202a1764b58ee287c8b306071d2848148c24"}, - {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e781b3880dfd0d4d37983c9d414bfd5f26c2141f6d763d20ef1964a0a4cb2405"}, - {file = "Cython-3.0.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef88c46e91e21772a5d3b6b1e70a6da5fe098154ad4768888129b1c05e93bba7"}, - {file = "Cython-3.0.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a38b9e7a252ec27dbc21ee8f00f09a896e88285eebb6ed99207b2ff1ea6af28e"}, - {file = "Cython-3.0.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4975cdaf720d29288ec225b76b4f4471ff03f4f8b51841ba85d6587699ab2ad5"}, - {file = "Cython-3.0.6-cp37-cp37m-win32.whl", hash = "sha256:9b89463ea330318461ca47d3e49b5f606e7e82446b6f37e5c19b60392439674c"}, - {file = "Cython-3.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0ca8f379b47417bfad98faeb14bf8a3966fc92cf69f8aaf7635cf6885e50d001"}, - {file = "Cython-3.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3dda1e80eb577b9563cee6cf31923a7b88836b9f9be0043ec545b138b95d8e8"}, - {file = "Cython-3.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:267e34e9a96f98c379100ef4192994a311678fb5c9af34c83ba5230223577581"}, - {file = "Cython-3.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:345d9112fde4ae0347d656f58591fd52017c61a19779c95423bb38735fe4a401"}, - {file = "Cython-3.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25da0e51331ac12ff16cd858d1d836e092c984e1dc45d338166081d3802297c0"}, - {file = "Cython-3.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:eebbf09089b4988b9f398ed46f168892e32fcfeec346b15954fdd818aa103456"}, - {file = "Cython-3.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e3ed0c125556324fa49b9e92bea13be7b158fcae6f72599d63c8733688257788"}, - {file = "Cython-3.0.6-cp38-cp38-win32.whl", hash = "sha256:86e1e5a5c9157a547d0a769de59c98a1fc5e46cfad976f32f60423cc6de11052"}, - {file = "Cython-3.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:0d45a84a315bd84d1515cd3571415a0ee0709eb4e2cd4b13668ede928af344a7"}, - {file = "Cython-3.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a8e788e64b659bb8fe980bc37da3118e1f7285dec40c5fb293adabc74d4205f2"}, - {file = "Cython-3.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a77a174c7fb13d80754c8bf9912efd3f3696d13285b2f568eca17324263b3f7"}, - {file = "Cython-3.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1074e84752cd0daf3226823ddbc37cca8bc45f61c94a1db2a34e641f2b9b0797"}, - {file = "Cython-3.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49d5cae02d56e151e1481e614a1af9a0fe659358f2aa5eca7a18f05aa641db61"}, - {file = "Cython-3.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b94610fa49e36db068446cfd149a42e3246f38a4256bbe818512ac181446b4b"}, - {file = "Cython-3.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fabb2d14dd71add618a7892c40ffec584d1dae1e477caa193778e52e06821d83"}, - {file = "Cython-3.0.6-cp39-cp39-win32.whl", hash = "sha256:ce442c0be72ab014c305399d955b78c3d1e69d5a5ce24398122b605691b69078"}, - {file = "Cython-3.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:8a05f79a0761fc76c42e945e5a9cb5d7986aa9e8e526fdf52bd9ca61a12d4567"}, - {file = "Cython-3.0.6-py2.py3-none-any.whl", hash = "sha256:5921a175ea20779d4443ef99276cfa9a1a47de0e32d593be7679be741c9ed93b"}, - {file = "Cython-3.0.6.tar.gz", hash = "sha256:399d185672c667b26eabbdca420c98564583798af3bc47670a8a09e9f19dd660"}, + {file = "Cython-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a846e0a38e2b24e9a5c5dc74b0e54c6e29420d88d1dafabc99e0fc0f3e338636"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45523fdc2b78d79b32834cc1cc12dc2ca8967af87e22a3ee1bff20e77c7f5520"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa0b7f3f841fe087410cab66778e2d3fb20ae2d2078a2be3dffe66c6574be39"}, + {file = "Cython-3.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e87294e33e40c289c77a135f491cd721bd089f193f956f7b8ed5aa2d0b8c558f"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a1df7a129344b1215c20096d33c00193437df1a8fcca25b71f17c23b1a44f782"}, + {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:13c2a5e57a0358da467d97667297bf820b62a1a87ae47c5f87938b9bb593acbd"}, + {file = "Cython-3.0.8-cp310-cp310-win32.whl", hash = "sha256:96b028f044f5880e3cb18ecdcfc6c8d3ce9d0af28418d5ab464509f26d8adf12"}, + {file = "Cython-3.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:8140597a8b5cc4f119a1190f5a2228a84f5ca6d8d9ec386cfce24663f48b2539"}, + {file = "Cython-3.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aae26f9663e50caf9657148403d9874eea41770ecdd6caf381d177c2b1bb82ba"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:547eb3cdb2f8c6f48e6865d5a741d9dd051c25b3ce076fbca571727977b28ac3"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a567d4b9ba70b26db89d75b243529de9e649a2f56384287533cf91512705bee"}, + {file = "Cython-3.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51d1426263b0e82fb22bda8ea60dc77a428581cc19e97741011b938445d383f1"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c26daaeccda072459b48d211415fd1e5507c06bcd976fa0d5b8b9f1063467d7b"}, + {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:289ce7838208211cd166e975865fd73b0649bf118170b6cebaedfbdaf4a37795"}, + {file = "Cython-3.0.8-cp311-cp311-win32.whl", hash = "sha256:c8aa05f5e17f8042a3be052c24f2edc013fb8af874b0bf76907d16c51b4e7871"}, + {file = "Cython-3.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:000dc9e135d0eec6ecb2b40a5b02d0868a2f8d2e027a41b0fe16a908a9e6de02"}, + {file = "Cython-3.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d3fe31db55685d8cb97d43b0ec39ef614fcf660f83c77ed06aa670cb0e164f"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e24791ddae2324e88e3c902a765595c738f19ae34ee66bfb1a6dac54b1833419"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f020fa1c0552052e0660790b8153b79e3fc9a15dbd8f1d0b841fe5d204a6ae6"}, + {file = "Cython-3.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18bfa387d7a7f77d7b2526af69a65dbd0b731b8d941aaff5becff8e21f6d7717"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fe81b339cffd87c0069c6049b4d33e28bdd1874625ee515785bf42c9fdff3658"}, + {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80fd94c076e1e1b1ee40a309be03080b75f413e8997cddcf401a118879863388"}, + {file = "Cython-3.0.8-cp312-cp312-win32.whl", hash = "sha256:85077915a93e359a9b920280d214dc0cf8a62773e1f3d7d30fab8ea4daed670c"}, + {file = "Cython-3.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:0cb2dcc565c7851f75d496f724a384a790fab12d1b82461b663e66605bec429a"}, + {file = "Cython-3.0.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:870d2a0a7e3cbd5efa65aecdb38d715ea337a904ea7bb22324036e78fb7068e7"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8f2454128974905258d86534f4fd4f91d2f1343605657ecab779d80c9d6d5e"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1949d6aa7bc792554bee2b67a9fe41008acbfe22f4f8df7b6ec7b799613a4b3"}, + {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9f2c6e1b8f3bcd6cb230bac1843f85114780bb8be8614855b1628b36bb510e0"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:05d7eddc668ae7993643f32c7661f25544e791edb745758672ea5b1a82ecffa6"}, + {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bfabe115deef4ada5d23c87bddb11289123336dcc14347011832c07db616dd93"}, + {file = "Cython-3.0.8-cp36-cp36m-win32.whl", hash = "sha256:0c38c9f0bcce2df0c3347285863621be904ac6b64c5792d871130569d893efd7"}, + {file = "Cython-3.0.8-cp36-cp36m-win_amd64.whl", hash = "sha256:6c46939c3983217d140999de7c238c3141f56b1ea349e47ca49cae899969aa2c"}, + {file = "Cython-3.0.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:115f0a50f752da6c99941b103b5cb090da63eb206abbc7c2ad33856ffc73f064"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c0f29246734561c90f36e70ed0506b61aa3d044e4cc4cba559065a2a741fae"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab75242869ff71e5665fe5c96f3378e79e792fa3c11762641b6c5afbbbbe026"}, + {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6717c06e9cfc6c1df18543cd31a21f5d8e378a40f70c851fa2d34f0597037abc"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9d3f74388db378a3c6fd06e79a809ed98df3f56484d317b81ee762dbf3c263e0"}, + {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae7ac561fd8253a9ae96311e91d12af5f701383564edc11d6338a7b60b285a6f"}, + {file = "Cython-3.0.8-cp37-cp37m-win32.whl", hash = "sha256:97b2a45845b993304f1799664fa88da676ee19442b15fdcaa31f9da7e1acc434"}, + {file = "Cython-3.0.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9e2be2b340fea46fb849d378f9b80d3c08ff2e81e2bfbcdb656e2e3cd8c6b2dc"}, + {file = "Cython-3.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2cde23c555470db3f149ede78b518e8274853745289c956a0e06ad8d982e4db9"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7990ca127e1f1beedaf8fc8bf66541d066ef4723ad7d8d47a7cbf842e0f47580"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b983c8e6803f016146c26854d9150ddad5662960c804ea7f0c752c9266752f0"}, + {file = "Cython-3.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a973268d7ca1a2bdf78575e459a94a78e1a0a9bb62a7db0c50041949a73b02ff"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:61a237bc9dd23c7faef0fcfce88c11c65d0c9bb73c74ccfa408b3a012073c20e"}, + {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a3d67f079598af49e90ff9655bf85bd358f093d727eb21ca2708f467c489cae"}, + {file = "Cython-3.0.8-cp38-cp38-win32.whl", hash = "sha256:17a642bb01a693e34c914106566f59844b4461665066613913463a719e0dd15d"}, + {file = "Cython-3.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:2cdfc32252f3b6dc7c94032ab744dcedb45286733443c294d8f909a4854e7f83"}, + {file = "Cython-3.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa97893d99385386925d00074654aeae3a98867f298d1e12ceaf38a9054a9bae"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05c0bf9d085c031df8f583f0d506aa3be1692023de18c45d0aaf78685bbb944"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de892422582f5758bd8de187e98ac829330ec1007bc42c661f687792999988a7"}, + {file = "Cython-3.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:314f2355a1f1d06e3c431eaad4708cf10037b5e91e4b231d89c913989d0bdafd"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:78825a3774211e7d5089730f00cdf7f473042acc9ceb8b9eeebe13ed3a5541de"}, + {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:df8093deabc55f37028190cf5e575c26aad23fc673f34b85d5f45076bc37ce39"}, + {file = "Cython-3.0.8-cp39-cp39-win32.whl", hash = "sha256:1aca1b97e0095b3a9a6c33eada3f661a4ed0d499067d121239b193e5ba3bb4f0"}, + {file = "Cython-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:16873d78be63bd38ffb759da7ab82814b36f56c769ee02b1d5859560e4c3ac3c"}, + {file = "Cython-3.0.8-py2.py3-none-any.whl", hash = "sha256:171b27051253d3f9108e9759e504ba59ff06e7f7ba944457f94deaf9c21bf0b6"}, + {file = "Cython-3.0.8.tar.gz", hash = "sha256:8333423d8fd5765e7cceea3a9985dd1e0a5dfeb2734629e1a2ed2d6233d39de6"}, ] [[package]] @@ -192,26 +181,6 @@ files = [ {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] -[[package]] -name = "importlib-metadata" -version = "6.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -236,37 +205,33 @@ files = [ [[package]] name = "pluggy" -version = "1.2.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -288,7 +253,6 @@ files = [ [package.dependencies] pytest = ">=6.1.0" -typing-extensions = {version = ">=3.7.2", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] @@ -353,33 +317,7 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "zipp" -version = "3.15.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [metadata] lock-version = "2.0" -python-versions = "^3.7" -content-hash = "5d7b707a062b320ee2930929c2b948e1e542f16eba9363175eaa09f09b111a02" +python-versions = "^3.8" +content-hash = "26c7f2ec91a34a0661a5511d2ade43511d80dd4f89e1aefbb59c9fafc2c92df2" diff --git a/pyproject.toml b/pyproject.toml index c711d9a6..2d866327 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ build_command = "pip install poetry && poetry build" tag_format = "{version}" [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" async-timeout = {version = ">=3.0.0", python = "<3.11"} ifaddr = ">=0.1.7" @@ -151,7 +151,7 @@ ignore_errors = true [build-system] # 1.5.2 required for https://github.com/python-poetry/poetry/issues/7505 -requires = ['setuptools>=65.4.1', 'wheel', 'Cython>=3.0.5', "poetry-core>=1.5.2"] +requires = ['setuptools>=65.4.1', 'wheel', 'Cython>=3.0.8', "poetry-core>=1.5.2"] build-backend = "poetry.core.masonry.api" [tool.codespell] diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index aa64306e..73c60d3b 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -156,8 +156,4 @@ # https://datatracker.ietf.org/doc/html/rfc6763#section-9 _SERVICE_TYPE_ENUMERATION_NAME = "_services._dns-sd._udp.local." -try: - _IPPROTO_IPV6 = socket.IPPROTO_IPV6 -except AttributeError: - # Sigh: https://bugs.python.org/issue29515 - _IPPROTO_IPV6 = 41 +_IPPROTO_IPV6 = socket.IPPROTO_IPV6 From 0108b5047bcbac0c49a5bdd801d2d4a59d488624 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Feb 2024 13:14:46 -1000 Subject: [PATCH 220/434] chore: add test for parsing matter packet (#1364) --- tests/test_protocol.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index c830b6c3..6990917a 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1055,3 +1055,29 @@ def test_txt_after_invalid_nsec_name_still_usable(): b't=2\x0emdnssequence=0' ) assert len(parsed.answers()) == 5 + + +def test_parse_matter_packet(): + """Test our wire parser can handle a packet from matter.""" + packet_hex = ( + "000084000000000a00000000075f6d6174746572045f746370056c6f63" + "616c00000c000100001194002421413336303441463533314638364442" + "372d30303030303030303030303030303636c00cc00c000c0001000011" + "94002421333346353633363743453244333646302d3030303030303030" + "3444423341334541c00cc00c000c000100001194002421414531313941" + "304130374145304632302d34383742343631363639333638413332c00c" + "c00c000c00010000119400242141333630344146353331463836444237" + "2d30303030303030303030303030303237c00cc00c000c000100001194" + "002421413336303441463533314638364442372d303030303030303030" + "30303030303637c00cc00c000c00010000119400242133334635363336" + "3743453244333646302d30303030303030304243363637324136c00cc0" + "0c000c000100001194002421414531313941304130374145304632302d" + "39464534383646413645373730464433c00cc00c000c00010000119400" + "2421413336303441463533314638364442372d30303030303030303030" + "303030303434c00cc00c000c0001000011940024213935374431413839" + "44463239343033312d41423337393041444346434231423239c00cc00c" + "000c000100001194002421413336303441463533314638364442372d30" + "303030303030303030303030303638c00c" + ) + parsed = r.DNSIncoming(bytes.fromhex(packet_hex)) + assert len(parsed.answers()) == 10 From c4c2deeb05279ddbb0eba1330c7ae58795fea001 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 09:43:31 -1000 Subject: [PATCH 221/434] feat: make async_get_service_info available on the Zeroconf object (#1366) --- src/zeroconf/_core.py | 31 +++++++++++++++++++++++++++++-- src/zeroconf/_services/info.py | 16 ++++++++++++++++ src/zeroconf/asyncio.py | 19 +++++++++---------- tests/test_asyncio.py | 4 ++++ 4 files changed, 58 insertions(+), 12 deletions(-) diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 4b29717a..cb488b4e 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -39,7 +39,11 @@ from ._protocol.outgoing import DNSOutgoing from ._services import ServiceListener from ._services.browser import ServiceBrowser -from ._services.info import ServiceInfo, instance_name_from_service_info +from ._services.info import ( + AsyncServiceInfo, + ServiceInfo, + instance_name_from_service_info, +) from ._services.registry import ServiceRegistry from ._transport import _WrappedTransport from ._updates import RecordUpdateListener @@ -261,7 +265,13 @@ def get_service_info( ) -> Optional[ServiceInfo]: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, - which defaults to 3 seconds.""" + which defaults to 3 seconds. + + :param type_: fully qualified service type name + :param name: the name of the service + :param timeout: milliseconds to wait for a response + :param question_type: The type of questions to ask (DNSQuestionType.QM or DNSQuestionType.QU) + """ info = ServiceInfo(type_, name) if info.request(self, timeout, question_type): return info @@ -360,6 +370,23 @@ async def async_update_service(self, info: ServiceInfo) -> Awaitable: self.registry.async_update(info) return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) + async def async_get_service_info( + self, type_: str, name: str, timeout: int = 3000, question_type: Optional[DNSQuestionType] = None + ) -> Optional[AsyncServiceInfo]: + """Returns network's service information for a particular + name and type, or None if no service matches by the timeout, + which defaults to 3 seconds. + + :param type_: fully qualified service type name + :param name: the name of the service + :param timeout: milliseconds to wait for a response + :param question_type: The type of questions to ask (DNSQuestionType.QM or DNSQuestionType.QU) + """ + info = AsyncServiceInfo(type_, name) + if await info.async_request(self, timeout, question_type): + return info + return None + async def _async_broadcast_service( self, info: ServiceInfo, diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 48ad1140..6d68de83 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -770,6 +770,12 @@ def request( While it is not expected during normal operation, this function may raise EventLoopBlocked if the underlying call to `async_request` cannot be completed. + + :param zc: Zeroconf instance + :param timeout: time in milliseconds to wait for a response + :param question_type: question type to ask + :param addr: address to send the request to + :param port: port to send the request to """ assert zc.loop is not None and zc.loop.is_running() if zc.loop == get_running_loop(): @@ -803,6 +809,12 @@ async def async_request( mDNS multicast address and port. This is useful for directing requests to a specific host that may be able to respond across subnets. + + :param zc: Zeroconf instance + :param timeout: time in milliseconds to wait for a response + :param question_type: question type to ask + :param addr: address to send the request to + :param port: port to send the request to """ if not zc.started: await zc.async_wait_for_start() @@ -924,3 +936,7 @@ def __repr__(self) -> str: ) ), ) + + +class AsyncServiceInfo(ServiceInfo): + """An async version of ServiceInfo.""" diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index cfe3693e..b2daeb10 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -28,7 +28,7 @@ from ._dns import DNSQuestionType from ._services import ServiceListener from ._services.browser import _ServiceBrowserBase -from ._services.info import ServiceInfo +from ._services.info import AsyncServiceInfo, ServiceInfo from ._services.types import ZeroconfServiceTypes from ._utils.net import InterfaceChoice, InterfacesType, IPVersion from .const import _BROWSER_TIME, _MDNS_PORT, _SERVICE_TYPE_ENUMERATION_NAME @@ -41,10 +41,6 @@ ] -class AsyncServiceInfo(ServiceInfo): - """An async version of ServiceInfo.""" - - class AsyncServiceBrowser(_ServiceBrowserBase): """Used to browse for a service for specific type(s). @@ -239,11 +235,14 @@ async def async_get_service_info( ) -> Optional[AsyncServiceInfo]: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, - which defaults to 3 seconds.""" - info = AsyncServiceInfo(type_, name) - if await info.async_request(self.zeroconf, timeout, question_type): - return info - return None + which defaults to 3 seconds. + + :param type_: fully qualified service type name + :param name: the name of the service + :param timeout: milliseconds to wait for a response + :param question_type: The type of questions to ask (DNSQuestionType.QM or DNSQuestionType.QU) + """ + return await self.zeroconf.async_get_service_info(type_, name, timeout, question_type) async def async_add_service_listener(self, type_: str, listener: ServiceListener) -> None: """Adds a listener for a particular service type. This object diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 63255158..382b1a3d 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -680,6 +680,10 @@ async def test_service_info_async_request() -> None: assert aiosinfo is not None assert aiosinfo.addresses == [socket.inet_aton("10.0.1.3")] + aiosinfo = await aiozc.zeroconf.async_get_service_info(type_, registration_name) + assert aiosinfo is not None + assert aiosinfo.addresses == [socket.inet_aton("10.0.1.3")] + aiosinfos = await asyncio.gather( aiozc.async_get_service_info(type_, registration_name), aiozc.async_get_service_info(type_, registration_name2), From edc4a556819956c238a11332052000dcbcb07e3d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 1 Apr 2024 09:43:36 -1000 Subject: [PATCH 222/434] fix: avoid including scope_id in IPv6Address object if its zero (#1367) --- src/zeroconf/_utils/ipaddress.py | 2 +- tests/utils/test_ipaddress.py | 37 ++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index b0b551ff..ba137955 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -104,7 +104,7 @@ def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4 def get_ip_address_object_from_record(record: DNSAddress) -> Optional[Union[IPv4Address, IPv6Address]]: """Get the IP address object from the record.""" - if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id is not None: + if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id: return ip_bytes_and_scope_to_address(record.address, record.scope_id) return cached_ip_addresses_wrapper(record.address) diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 3ec1a9a7..73c5ab7e 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -2,6 +2,12 @@ """Unit tests for zeroconf._utils.ipaddress.""" +import sys + +import pytest + +from zeroconf import const +from zeroconf._dns import DNSAddress from zeroconf._utils import ipaddress @@ -34,3 +40,34 @@ def test_cached_ip_addresses_wrapper(): assert ipv6 is not None assert ipv6.is_link_local is False assert ipv6.is_unspecified is True + + +@pytest.mark.skipif(sys.version_info < (3, 9, 0), reason='scope_id is not supported') +def test_get_ip_address_object_from_record(): + """Test the get_ip_address_object_from_record.""" + # not link local + packed = b'&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F' + record = DNSAddress( + 'domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=3 + ) + assert record.scope_id == 3 + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( + '2606:2800:220:1:248:1893:25c8:1946' + ) + + # link local + packed = b'\xfe\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' + record = DNSAddress( + 'domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=3 + ) + assert record.scope_id == 3 + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1%3') + record = DNSAddress('domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed) + assert record.scope_id is None + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1') + record = DNSAddress( + 'domain.local', const._TYPE_A, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=0 + ) + assert record.scope_id == 0 + # Ensure scope_id of 0 is not appended to the address + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1') From 0758c1e22e8686be85f214a46f482aa4b46da9e9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 1 Apr 2024 19:55:41 +0000 Subject: [PATCH 223/434] 0.132.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 11 +++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4e2fbc0d..905ab5e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,17 @@ +## v0.132.0 (2024-04-01) + +### Feature + +* Make async_get_service_info available on the Zeroconf object ([#1366](https://github.com/python-zeroconf/python-zeroconf/issues/1366)) ([`c4c2dee`](https://github.com/python-zeroconf/python-zeroconf/commit/c4c2deeb05279ddbb0eba1330c7ae58795fea001)) +* Drop python 3.7 support ([#1359](https://github.com/python-zeroconf/python-zeroconf/issues/1359)) ([`4877829`](https://github.com/python-zeroconf/python-zeroconf/commit/4877829e6442de5426db152d11827b1ba85dbf59)) + +### Fix + +* Avoid including scope_id in IPv6Address object if its zero ([#1367](https://github.com/python-zeroconf/python-zeroconf/issues/1367)) ([`edc4a55`](https://github.com/python-zeroconf/python-zeroconf/commit/edc4a556819956c238a11332052000dcbcb07e3d)) + ## v0.131.0 (2023-12-19) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 2d866327..67ed1d47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.131.0" +version = "0.132.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index e6b8e481..ab80996f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.131.0' +__version__ = '0.132.0' __license__ = 'LGPL' From e9f8aa5741ae2d490c33a562b459f0af1014dbb0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 12 Apr 2024 11:47:46 -1000 Subject: [PATCH 224/434] fix: set change during iteration when dispatching listeners (#1370) --- src/zeroconf/_handlers/record_manager.py | 4 +- tests/test_handlers.py | 74 ++++++++++++++++++++++++ 2 files changed, 76 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 0a0f6c54..70f2e5e1 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -56,7 +56,7 @@ def async_updates(self, now: _float, records: List[RecordUpdate]) -> None: This method will be run in the event loop. """ - for listener in self.listeners: + for listener in self.listeners.copy(): listener.async_update_records(self.zc, now, records) def async_updates_complete(self, notify: bool) -> None: @@ -67,7 +67,7 @@ def async_updates_complete(self, notify: bool) -> None: This method will be run in the event loop. """ - for listener in self.listeners: + for listener in self.listeners.copy(): listener.async_update_records_complete() if notify: self.zc.async_notify_all() diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 1a1066fa..a13824e0 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1762,3 +1762,77 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor ) await aiozc.async_close() + + +@pytest.mark.asyncio +async def test_async_updates_iteration_safe(): + """Ensure we can safely iterate over the async_updates.""" + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zc: Zeroconf = aiozc.zeroconf + updated = [] + good_bye_answer = r.DNSPointer( + "myservicelow_tcp._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN | const._CLASS_UNIQUE, + 0, + 'goodbye.local.', + ) + + class OtherListener(r.RecordUpdateListener): + """A RecordUpdateListener that does not implement update_records.""" + + def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + """Update multiple records in one shot.""" + updated.extend(records) + + other = OtherListener() + + class ListenerThatAddsListener(r.RecordUpdateListener): + """A RecordUpdateListener that does not implement update_records.""" + + def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + """Update multiple records in one shot.""" + updated.extend(records) + zc.async_add_listener(other, None) + + zc.async_add_listener(ListenerThatAddsListener(), None) + await asyncio.sleep(0) # flush out any call soons + + # This should not raise RuntimeError: set changed size during iteration + zc.record_manager.async_updates( + now=current_time_millis(), records=[r.RecordUpdate(good_bye_answer, None)] + ) + + assert len(updated) == 1 + await aiozc.async_close() + + +@pytest.mark.asyncio +async def test_async_updates_complete_iteration_safe(): + """Ensure we can safely iterate over the async_updates_complete.""" + + aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zc: Zeroconf = aiozc.zeroconf + + class OtherListener(r.RecordUpdateListener): + """A RecordUpdateListener that does not implement update_records.""" + + def async_update_records_complete(self) -> None: + """Update multiple records in one shot.""" + + other = OtherListener() + + class ListenerThatAddsListener(r.RecordUpdateListener): + """A RecordUpdateListener that does not implement update_records.""" + + def async_update_records_complete(self) -> None: + """Update multiple records in one shot.""" + zc.async_add_listener(other, None) + + zc.async_add_listener(ListenerThatAddsListener(), None) + await asyncio.sleep(0) # flush out any call soons + + # This should not raise RuntimeError: set changed size during iteration + zc.record_manager.async_updates_complete(False) + await aiozc.async_close() From 07742e68ef1c48e21f957f5f43cbcc11851c5216 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 12 Apr 2024 21:57:49 +0000 Subject: [PATCH 225/434] 0.132.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 905ab5e2..ca5f012b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.132.1 (2024-04-12) + +### Fix + +* Set change during iteration when dispatching listeners ([#1370](https://github.com/python-zeroconf/python-zeroconf/issues/1370)) ([`e9f8aa5`](https://github.com/python-zeroconf/python-zeroconf/commit/e9f8aa5741ae2d490c33a562b459f0af1014dbb0)) + ## v0.132.0 (2024-04-01) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 67ed1d47..04ad76bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.132.0" +version = "0.132.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ab80996f..0fcbdccd 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.132.0' +__version__ = '0.132.1' __license__ = 'LGPL' From 83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 12 Apr 2024 14:29:39 -1000 Subject: [PATCH 226/434] fix: bump cibuildwheel to fix wheel builds (#1371) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00d3fe9b..3ad892f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -159,7 +159,7 @@ jobs: platforms: arm64 - name: Build wheels - uses: pypa/cibuildwheel@v2.16.2 + uses: pypa/cibuildwheel@v2.17.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 From 599524a5ce1e4c1731519dd89377c2a852e59935 Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Fri, 12 Apr 2024 17:32:44 -0700 Subject: [PATCH 227/434] fix: update references to minimum-supported python version of 3.8 (#1369) --- README.rst | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 5cc5a91b..eba4d7fe 100644 --- a/README.rst +++ b/README.rst @@ -45,8 +45,8 @@ Compared to some other Zeroconf/Bonjour/Avahi Python packages, python-zeroconf: Python compatibility -------------------- -* CPython 3.7+ -* PyPy3.7 7.3+ +* CPython 3.8+ +* PyPy3.8 7.3+ Versioning ---------- diff --git a/pyproject.toml b/pyproject.toml index 04ad76bb..3acc77b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,11 +16,11 @@ classifiers=[ 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Libraries', - 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ] From 9d8dd27c75768663319c0ee610ba9d274799e32c Mon Sep 17 00:00:00 2001 From: github-actions Date: Sat, 13 Apr 2024 00:41:48 +0000 Subject: [PATCH 228/434] 0.132.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ca5f012b..a2026cba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ +## v0.132.2 (2024-04-13) + +### Fix + +* Update references to minimum-supported python version of 3.8 ([#1369](https://github.com/python-zeroconf/python-zeroconf/issues/1369)) ([`599524a`](https://github.com/python-zeroconf/python-zeroconf/commit/599524a5ce1e4c1731519dd89377c2a852e59935)) +* Bump cibuildwheel to fix wheel builds ([#1371](https://github.com/python-zeroconf/python-zeroconf/issues/1371)) ([`83e4ce3`](https://github.com/python-zeroconf/python-zeroconf/commit/83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210)) + ## v0.132.1 (2024-04-12) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 3acc77b9..1be7d81a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.132.1" +version = "0.132.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 0fcbdccd..4e6fb157 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -85,7 +85,7 @@ __author__ = 'Paul Scott-Murphy, William McBrine' __maintainer__ = 'Jakub Stasiak ' -__version__ = '0.132.1' +__version__ = '0.132.2' __license__ = 'LGPL' From 0c68d711212a036e481332202bf46ae7cae69c3a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 5 Jul 2024 17:29:38 -0500 Subject: [PATCH 229/434] chore: fix to ruff for lint and format (#1382) --- .pre-commit-config.yaml | 14 +- bench/create_destory.py | 5 +- bench/incoming.py | 5 +- bench/outgoing.py | 5 +- build_ext.py | 4 +- docs/conf.py | 36 +- examples/async_apple_scanner.py | 39 +- examples/async_browser.py | 31 +- examples/async_registration.py | 12 +- examples/async_service_info_request.py | 22 +- examples/browser.py | 30 +- examples/registration.py | 14 +- examples/resolver.py | 14 +- examples/self_test.py | 23 +- pyproject.toml | 2 +- src/zeroconf/__init__.py | 44 +- src/zeroconf/_cache.py | 74 +- src/zeroconf/_core.py | 155 +++-- src/zeroconf/_dns.py | 162 +++-- src/zeroconf/_engine.py | 87 ++- src/zeroconf/_exceptions.py | 40 +- src/zeroconf/_handlers/__init__.py | 32 +- src/zeroconf/_handlers/answers.py | 72 +- .../_handlers/multicast_outgoing_queue.py | 62 +- src/zeroconf/_handlers/query_handler.py | 150 +++-- src/zeroconf/_handlers/record_manager.py | 58 +- src/zeroconf/_history.py | 48 +- src/zeroconf/_listener.py | 93 +-- src/zeroconf/_logger.py | 48 +- src/zeroconf/_protocol/__init__.py | 32 +- src/zeroconf/_protocol/incoming.py | 147 ++-- src/zeroconf/_protocol/outgoing.py | 141 ++-- src/zeroconf/_record_update.py | 40 +- src/zeroconf/_services/__init__.py | 60 +- src/zeroconf/_services/browser.py | 272 +++++--- src/zeroconf/_services/info.py | 199 ++++-- src/zeroconf/_services/registry.py | 44 +- src/zeroconf/_services/types.py | 44 +- src/zeroconf/_transport.py | 44 +- src/zeroconf/_updates.py | 50 +- src/zeroconf/_utils/__init__.py | 32 +- src/zeroconf/_utils/asyncio.py | 60 +- src/zeroconf/_utils/ipaddress.py | 61 +- src/zeroconf/_utils/name.py | 89 +-- src/zeroconf/_utils/net.py | 199 ++++-- src/zeroconf/_utils/time.py | 41 +- src/zeroconf/asyncio.py | 76 ++- src/zeroconf/const.py | 62 +- tests/__init__.py | 48 +- tests/conftest.py | 2 +- tests/services/__init__.py | 32 +- tests/services/test_browser.py | 444 ++++++++---- tests/services/test_info.py | 626 +++++++++++------ tests/services/test_registry.py | 64 +- tests/services/test_types.py | 34 +- tests/test_asyncio.py | 251 ++++--- tests/test_cache.py | 202 ++++-- tests/test_core.py | 254 ++++--- tests/test_dns.py | 284 ++++++-- tests/test_engine.py | 46 +- tests/test_exceptions.py | 119 ++-- tests/test_handlers.py | 633 +++++++++++++----- tests/test_history.py | 14 +- tests/test_init.py | 39 +- tests/test_listener.py | 89 ++- tests/test_logger.py | 12 +- tests/test_protocol.py | 292 ++++---- tests/test_services.py | 78 ++- tests/test_updates.py | 38 +- tests/utils/__init__.py | 32 +- tests/utils/test_asyncio.py | 10 +- tests/utils/test_ipaddress.py | 71 +- tests/utils/test_name.py | 56 +- tests/utils/test_net.py | 100 ++- 74 files changed, 4442 insertions(+), 2502 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b7ae9294..e4a88203 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -38,20 +38,18 @@ repos: hooks: - id: pyupgrade args: [--py37-plus] - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.5.0 hooks: - - id: isort - - repo: https://github.com/psf/black - rev: 22.8.0 - hooks: - - id: black + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + - id: ruff-format # - repo: https://github.com/codespell-project/codespell # rev: v2.2.1 # hooks: # - id: codespell - repo: https://github.com/PyCQA/flake8 - rev: 5.0.4 + rev: 7.1.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy diff --git a/bench/create_destory.py b/bench/create_destory.py index f1941423..77d8af6f 100644 --- a/bench/create_destory.py +++ b/bench/create_destory.py @@ -1,4 +1,5 @@ """Benchmark for AsyncZeroconf.""" + import asyncio import time @@ -17,7 +18,9 @@ async def _run() -> None: start = time.perf_counter() await _create_destroy(iterations) duration = time.perf_counter() - start - print(f"Creating and destroying {iterations} Zeroconf instances took {duration} seconds") + print( + f"Creating and destroying {iterations} Zeroconf instances took {duration} seconds" + ) asyncio.run(_run()) diff --git a/bench/incoming.py b/bench/incoming.py index 233f19e9..d0cc3588 100644 --- a/bench/incoming.py +++ b/bench/incoming.py @@ -1,4 +1,5 @@ """Benchmark for DNSIncoming.""" + import socket import timeit from typing import List @@ -121,8 +122,8 @@ def generate_packets() -> List[bytes]: const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) diff --git a/bench/outgoing.py b/bench/outgoing.py index d832a05b..8c8097cb 100644 --- a/bench/outgoing.py +++ b/bench/outgoing.py @@ -1,4 +1,5 @@ """Benchmark for DNSOutgoing.""" + import socket import timeit @@ -113,8 +114,8 @@ def generate_packets() -> DNSOutgoing: const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) diff --git a/build_ext.py b/build_ext.py index 0f02f53a..4fecbdf1 100644 --- a/build_ext.py +++ b/build_ext.py @@ -47,7 +47,9 @@ def build(setup_kwargs: Any) -> None: cmdclass=dict(build_ext=BuildExt), ) ) - setup_kwargs["exclude_package_data"] = {pkg: ["*.c"] for pkg in setup_kwargs["packages"]} + setup_kwargs["exclude_package_data"] = { + pkg: ["*.c"] for pkg in setup_kwargs["packages"] + } except Exception: if os.environ.get("REQUIRE_CYTHON"): raise diff --git a/docs/conf.py b/docs/conf.py index afaa510e..b3ad57ea 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -23,23 +23,23 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'python-zeroconf' -copyright = 'python-zeroconf authors' +project = "python-zeroconf" +copyright = "python-zeroconf authors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -62,7 +62,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None @@ -79,7 +79,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -92,7 +92,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -121,7 +121,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -133,8 +133,8 @@ # Custom sidebar templates, maps document names to template names. html_sidebars = { - 'index': ('sidebar.html', 'sourcelink.html', 'searchbox.html'), - '**': ('localtoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'), + "index": ("sidebar.html", "sourcelink.html", "searchbox.html"), + "**": ("localtoc.html", "relations.html", "sourcelink.html", "searchbox.html"), } # Additional templates that should be rendered to pages, maps page names to @@ -168,7 +168,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'zeroconfdoc' +htmlhelp_basename = "zeroconfdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -231,17 +231,17 @@ # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'http://docs.python.org/': None} +intersphinx_mapping = {"http://docs.python.org/": None} def setup(app): # type: ignore[no-untyped-def] - app.connect('autodoc-skip-member', skip_member) + app.connect("autodoc-skip-member", skip_member) def skip_member(app, what, name, obj, skip, options): # type: ignore[no-untyped-def] return ( skip - or getattr(obj, '__doc__', None) is None - or getattr(obj, '__private__', False) is True - or getattr(getattr(obj, '__func__', None), '__private__', False) is True + or getattr(obj, "__doc__", None) is None + or getattr(obj, "__private__", False) is True + or getattr(getattr(obj, "__func__", None), "__private__", False) is True ) diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index ff558f82..ed549e01 100644 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -""" Scan for apple devices. """ +"""Scan for apple devices.""" import argparse import asyncio @@ -43,15 +43,21 @@ def async_on_service_state_change( device_name = f"{base_name}.{DEVICE_INFO_SERVICE}" asyncio.ensure_future(_async_show_service_info(zeroconf, service_type, name)) # Also probe for device info - asyncio.ensure_future(_async_show_service_info(zeroconf, DEVICE_INFO_SERVICE, device_name)) + asyncio.ensure_future( + _async_show_service_info(zeroconf, DEVICE_INFO_SERVICE, device_name) + ) -async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: +async def _async_show_service_info( + zeroconf: Zeroconf, service_type: str, name: str +) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000, question_type=DNSQuestionType.QU) print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] + addresses = [ + "%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses() + ] print(" Name: %s" % name) print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) @@ -64,7 +70,7 @@ async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: print(" No properties") else: print(" No info") - print('\n') + print("\n") class AsyncAppleScanner: @@ -77,10 +83,17 @@ async def async_run(self) -> None: self.aiozc = AsyncZeroconf(ip_version=ip_version) await self.aiozc.zeroconf.async_wait_for_start() print("\nBrowsing %s service(s), press Ctrl-C to exit...\n" % ALL_SERVICES) - kwargs = {'handlers': [async_on_service_state_change], 'question_type': DNSQuestionType.QU} + kwargs = { + "handlers": [async_on_service_state_change], + "question_type": DNSQuestionType.QU, + } if self.args.target: kwargs["addr"] = self.args.target - self.aiobrowser = AsyncServiceBrowser(self.aiozc.zeroconf, ALL_SERVICES, **kwargs) # type: ignore + self.aiobrowser = AsyncServiceBrowser( + self.aiozc.zeroconf, + ALL_SERVICES, + **kwargs, # type: ignore[arg-type] + ) while True: await asyncio.sleep(1) @@ -91,19 +104,19 @@ async def async_close(self) -> None: await self.aiozc.async_close() -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') + parser.add_argument("--debug", action="store_true") version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--target', help='Unicast target') - version_group.add_argument('--v6', action='store_true') - version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument("--target", help="Unicast target") + version_group.add_argument("--v6", action="store_true") + version_group.add_argument("--v6-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6: ip_version = IPVersion.All elif args.v6_only: diff --git a/examples/async_browser.py b/examples/async_browser.py index f7fb7151..cd4c7786 100644 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -""" Example of browsing for a service. +"""Example of browsing for a service. The default is HTTP and HAP; use --find to search for all available services in the network """ @@ -28,12 +28,17 @@ def async_on_service_state_change( asyncio.ensure_future(async_display_service_info(zeroconf, service_type, name)) -async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: +async def async_display_service_info( + zeroconf: Zeroconf, service_type: str, name: str +) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000) print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] + addresses = [ + "%s:%d" % (addr, cast(int, info.port)) + for addr in info.parsed_scoped_addresses() + ] print(" Name: %s" % name) print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) @@ -46,7 +51,7 @@ async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name print(" No properties") else: print(" No info") - print('\n') + print("\n") class AsyncRunner: @@ -61,7 +66,9 @@ async def async_run(self) -> None: services = ["_http._tcp.local.", "_hap._tcp.local."] if self.args.find: services = list( - await AsyncZeroconfServiceTypes.async_find(aiozc=self.aiozc, ip_version=ip_version) + await AsyncZeroconfServiceTypes.async_find( + aiozc=self.aiozc, ip_version=ip_version + ) ) print("\nBrowsing %s service(s), press Ctrl-C to exit...\n" % services) @@ -78,19 +85,21 @@ async def async_close(self) -> None: await self.aiozc.async_close() -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') - parser.add_argument('--find', action='store_true', help='Browse all available services') + parser.add_argument("--debug", action="store_true") + parser.add_argument( + "--find", action="store_true", help="Browse all available services" + ) version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6', action='store_true') - version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument("--v6", action="store_true") + version_group.add_argument("--v6-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6: ip_version = IPVersion.All elif args.v6_only: diff --git a/examples/async_registration.py b/examples/async_registration.py index c3aab326..a75b5566 100644 --- a/examples/async_registration.py +++ b/examples/async_registration.py @@ -33,18 +33,18 @@ async def unregister_services(self, infos: List[AsyncServiceInfo]) -> None: await self.aiozc.async_close() -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') + parser.add_argument("--debug", action="store_true") version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6', action='store_true') - version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument("--v6", action="store_true") + version_group.add_argument("--v6-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6: ip_version = IPVersion.All elif args.v6_only: @@ -60,7 +60,7 @@ async def unregister_services(self, infos: List[AsyncServiceInfo]) -> None: f"Paul's Test Web Site {i}._http._tcp.local.", addresses=[socket.inet_aton("127.0.0.1")], port=80, - properties={'path': '/~paulsm/'}, + properties={"path": "/~paulsm/"}, server=f"zcdemohost-{i}.local.", ) ) diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py index 5bb24761..fca58745 100644 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -31,7 +31,10 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: for info in infos: print("Info for %s" % (info.name)) if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] + addresses = [ + "%s:%d" % (addr, cast(int, info.port)) + for addr in info.parsed_addresses() + ] print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) print(f" Server: {info.server}") @@ -43,7 +46,7 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: print(" No properties") else: print(" No info") - print('\n') + print("\n") class AsyncRunner: @@ -57,7 +60,10 @@ async def async_run(self) -> None: assert self.aiozc is not None def on_service_state_change( - zeroconf: Zeroconf, service_type: str, state_change: ServiceStateChange, name: str + zeroconf: Zeroconf, + service_type: str, + state_change: ServiceStateChange, + name: str, ) -> None: """Dummy handler.""" @@ -73,18 +79,18 @@ async def async_close(self) -> None: await self.aiozc.async_close() -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') + parser.add_argument("--debug", action="store_true") version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6', action='store_true') - version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument("--v6", action="store_true") + version_group.add_argument("--v6-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6: ip_version = IPVersion.All elif args.v6_only: diff --git a/examples/browser.py b/examples/browser.py index 237de013..1a801a44 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -""" Example of browsing for a service. +"""Example of browsing for a service. The default is HTTP and HAP; use --find to search for all available services in the network """ @@ -29,7 +29,10 @@ def on_service_state_change( print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] + addresses = [ + "%s:%d" % (addr, cast(int, info.port)) + for addr in info.parsed_scoped_addresses() + ] print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) print(f" Server: {info.server}") @@ -41,22 +44,24 @@ def on_service_state_change( print(" No properties") else: print(" No info") - print('\n') + print("\n") -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') - parser.add_argument('--find', action='store_true', help='Browse all available services') + parser.add_argument("--debug", action="store_true") + parser.add_argument( + "--find", action="store_true", help="Browse all available services" + ) version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6-only', action='store_true') - version_group.add_argument('--v4-only', action='store_true') + version_group.add_argument("--v6-only", action="store_true") + version_group.add_argument("--v4-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6_only: ip_version = IPVersion.V6Only elif args.v4_only: @@ -66,7 +71,12 @@ def on_service_state_change( zeroconf = Zeroconf(ip_version=ip_version) - services = ["_http._tcp.local.", "_hap._tcp.local.", "_esphomelib._tcp.local.", "_airplay._tcp.local."] + services = [ + "_http._tcp.local.", + "_hap._tcp.local.", + "_esphomelib._tcp.local.", + "_airplay._tcp.local.", + ] if args.find: services = list(ZeroconfServiceTypes.find(zc=zeroconf)) diff --git a/examples/registration.py b/examples/registration.py index 65c22199..5be9f45d 100755 --- a/examples/registration.py +++ b/examples/registration.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -""" Example of announcing a service (in this case, a fake HTTP server) """ +"""Example of announcing a service (in this case, a fake HTTP server)""" import argparse import logging @@ -9,18 +9,18 @@ from zeroconf import IPVersion, ServiceInfo, Zeroconf -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) parser = argparse.ArgumentParser() - parser.add_argument('--debug', action='store_true') + parser.add_argument("--debug", action="store_true") version_group = parser.add_mutually_exclusive_group() - version_group.add_argument('--v6', action='store_true') - version_group.add_argument('--v6-only', action='store_true') + version_group.add_argument("--v6", action="store_true") + version_group.add_argument("--v6-only", action="store_true") args = parser.parse_args() if args.debug: - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) if args.v6: ip_version = IPVersion.All elif args.v6_only: @@ -28,7 +28,7 @@ else: ip_version = IPVersion.V4Only - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( "_http._tcp.local.", diff --git a/examples/resolver.py b/examples/resolver.py index 6a550fcb..e7a11f82 100755 --- a/examples/resolver.py +++ b/examples/resolver.py @@ -1,24 +1,24 @@ #!/usr/bin/env python3 -""" Example of resolving a service with a known name """ +"""Example of resolving a service with a known name""" import logging import sys from zeroconf import Zeroconf -TYPE = '_test._tcp.local.' -NAME = 'My Service Name' +TYPE = "_test._tcp.local." +NAME = "My Service Name" -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) if len(sys.argv) > 1: - assert sys.argv[1:] == ['--debug'] - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + assert sys.argv[1:] == ["--debug"] + logging.getLogger("zeroconf").setLevel(logging.DEBUG) zeroconf = Zeroconf() try: - print(zeroconf.get_service_info(TYPE, NAME + '.' + TYPE)) + print(zeroconf.get_service_info(TYPE, NAME + "." + TYPE)) finally: zeroconf.close() diff --git a/examples/self_test.py b/examples/self_test.py index 2178629b..63aca4f3 100755 --- a/examples/self_test.py +++ b/examples/self_test.py @@ -6,23 +6,23 @@ from zeroconf import ServiceInfo, Zeroconf, __version__ -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) if len(sys.argv) > 1: - assert sys.argv[1:] == ['--debug'] - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + assert sys.argv[1:] == ["--debug"] + logging.getLogger("zeroconf").setLevel(logging.DEBUG) # Test a few module features, including service registration, service # query (for Zoe), and service unregistration. print(f"Multicast DNS Service Discovery for Python, version {__version__}") r = Zeroconf() print("1. Testing registration of a service...") - desc = {'version': '0.10', 'a': 'test value', 'b': 'another value'} + desc = {"version": "0.10", "a": "test value", "b": "another value"} addresses = [socket.inet_aton("127.0.0.1")] - expected = {'127.0.0.1'} + expected = {"127.0.0.1"} if socket.has_ipv6: - addresses.append(socket.inet_pton(socket.AF_INET6, '::1')) - expected.add('::1') + addresses.append(socket.inet_pton(socket.AF_INET6, "::1")) + expected.add("::1") info = ServiceInfo( "_http._tcp.local.", "My Service Name._http._tcp.local.", @@ -34,10 +34,15 @@ r.register_service(info) print(" Registration done.") print("2. Testing query of service information...") - print(" Getting ZOE service: %s" % (r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local."))) + print( + " Getting ZOE service: %s" + % (r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local.")) + ) print(" Query done.") print("3. Testing query of own service...") - queried_info = r.get_service_info("_http._tcp.local.", "My Service Name._http._tcp.local.") + queried_info = r.get_service_info( + "_http._tcp.local.", "My Service Name._http._tcp.local." + ) assert queried_info assert set(queried_info.parsed_addresses()) == expected print(f" Getting self: {queried_info}") diff --git a/pyproject.toml b/pyproject.toml index 1be7d81a..1d88efbd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -130,7 +130,7 @@ mypy_path = "src/" no_implicit_optional = true show_error_codes = true warn_unreachable = true -warn_unused_ignores = true +warn_unused_ignores = false exclude = [ 'docs/*', 'bench/*', diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 4e6fb157..0c89a881 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import sys @@ -83,10 +83,10 @@ millis_to_seconds, ) -__author__ = 'Paul Scott-Murphy, William McBrine' -__maintainer__ = 'Jakub Stasiak ' -__version__ = '0.132.2' -__license__ = 'LGPL' +__author__ = "Paul Scott-Murphy, William McBrine" +__maintainer__ = "Jakub Stasiak " +__version__ = "0.132.2" +__license__ = "LGPL" __all__ = [ @@ -117,9 +117,9 @@ if sys.version_info <= (3, 6): # pragma: no cover raise ImportError( # pragma: no cover - ''' + """ Python version > 3.6 required for python-zeroconf. If you need support for Python 2 or Python 3.3-3.4 please use version 19.1 If you need support for Python 3.5 please use version 0.28.0 - ''' + """ ) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 35a13cf6..809be9c1 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, cast @@ -119,7 +119,12 @@ def async_expire(self, now: _float) -> List[DNSRecord]: This function must be run in from event loop. """ - expired = [record for records in self.cache.values() for record in records if record.is_expired(now)] + expired = [ + record + for records in self.cache.values() + for record in records + if record.is_expired(now) + ] self.async_remove_records(expired) return expired @@ -135,7 +140,9 @@ def async_get_unique(self, entry: _UniqueRecordsType) -> Optional[DNSRecord]: return None return store.get(entry) - def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DNSRecord]: + def async_all_by_details( + self, name: _str, type_: _int, class_: _int + ) -> List[DNSRecord]: """Gets all matching entries by details. This function is not thread-safe and must be called from @@ -181,7 +188,9 @@ def get(self, entry: DNSEntry) -> Optional[DNSRecord]: return cached_entry return None - def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRecord]: + def get_by_details( + self, name: str, type_: _int, class_: _int + ) -> Optional[DNSRecord]: """Gets the first matching entry by details. Returns None if no entries match. Calling this function is not recommended as it will only @@ -202,13 +211,19 @@ def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRe return cached_entry return None - def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRecord]: + def get_all_by_details( + self, name: str, type_: _int, class_: _int + ) -> List[DNSRecord]: """Gets all matching entries by details.""" key = name.lower() records = self.cache.get(key) if records is None: return [] - return [entry for entry in list(records) if type_ == entry.type and class_ == entry.class_] + return [ + entry + for entry in list(records) + if type_ == entry.type and class_ == entry.class_ + ] def entries_with_server(self, server: str) -> List[DNSRecord]: """Returns a list of entries whose server matches the name.""" @@ -218,7 +233,9 @@ def entries_with_name(self, name: str) -> List[DNSRecord]: """Returns a list of entries whose key matches the name.""" return list(self.cache.get(name.lower(), [])) - def current_entry_with_name_and_alias(self, name: str, alias: str) -> Optional[DNSRecord]: + def current_entry_with_name_and_alias( + self, name: str, alias: str + ) -> Optional[DNSRecord]: now = current_time_millis() for record in reversed(self.entries_with_name(name)): if ( @@ -234,7 +251,10 @@ def names(self) -> List[str]: return list(self.cache) def async_mark_unique_records_older_than_1s_to_expire( - self, unique_types: Set[Tuple[_str, _int, _int]], answers: Iterable[DNSRecord], now: _float + self, + unique_types: Set[Tuple[_str, _int, _int]], + answers: Iterable[DNSRecord], + now: _float, ) -> None: # rfc6762#section-10.2 para 2 # Since unique is set, all old records with that name, rrtype, diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index cb488b4e..5386df63 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -121,7 +121,7 @@ def async_send_with_transport( return if log_debug: log.debug( - 'Sending to (%s, %d) via [socket %s (%s)] (%d bytes #%d) %r as %r...', + "Sending to (%s, %d) via [socket %s (%s)] (%d bytes #%d) %r as %r...", real_addr, port or _MDNS_PORT, transport.fileno, @@ -140,7 +140,6 @@ def async_send_with_transport( class Zeroconf(QuietLogger): - """Implementation of Zeroconf Multicast DNS Service Discovery Supports registration, unregistration, queries and browsing. @@ -173,12 +172,18 @@ def __init__( self.done = False - if apple_p2p and sys.platform != 'darwin': - raise RuntimeError('Option `apple_p2p` is not supported on non-Apple platforms.') + if apple_p2p and sys.platform != "darwin": + raise RuntimeError( + "Option `apple_p2p` is not supported on non-Apple platforms." + ) self.unicast = unicast - listen_socket, respond_sockets = create_sockets(interfaces, unicast, ip_version, apple_p2p=apple_p2p) - log.debug('Listen socket %s, respond sockets %s', listen_socket, respond_sockets) + listen_socket, respond_sockets = create_sockets( + interfaces, unicast, ip_version, apple_p2p=apple_p2p + ) + log.debug( + "Listen socket %s, respond sockets %s", listen_socket, respond_sockets + ) self.engine = AsyncEngine(self, listen_socket, respond_sockets) @@ -188,7 +193,9 @@ def __init__( self.question_history = QuestionHistory() self.out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) - self.out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) + self.out_delay_queue = MulticastOutgoingQueue( + self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY + ) self.query_handler = QueryHandler(self) self.record_manager = RecordManager(self) @@ -202,7 +209,11 @@ def __init__( @property def started(self) -> bool: """Check if the instance has started.""" - return bool(not self.done and self.engine.running_event and self.engine.running_event.is_set()) + return bool( + not self.done + and self.engine.running_event + and self.engine.running_event.is_set() + ) def start(self) -> None: """Start Zeroconf.""" @@ -261,7 +272,11 @@ def async_notify_all(self) -> None: _resolve_all_futures_to_none(notify_futures) def get_service_info( - self, type_: str, name: str, timeout: int = 3000, question_type: Optional[DNSQuestionType] = None + self, + type_: str, + name: str, + timeout: int = 3000, + question_type: Optional[DNSQuestionType] = None, ) -> Optional[ServiceInfo]: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, @@ -317,7 +332,9 @@ def register_service( assert self.loop is not None run_coro_with_timeout( await_awaitable( - self.async_register_service(info, ttl, allow_name_change, cooperating_responders, strict) + self.async_register_service( + info, ttl, allow_name_change, cooperating_responders, strict + ) ), self.loop, _REGISTER_TIME * _REGISTER_BROADCASTS, @@ -345,9 +362,13 @@ async def async_register_service( info.set_server_if_missing() await self.async_wait_for_start() - await self.async_check_service(info, allow_name_change, cooperating_responders, strict) + await self.async_check_service( + info, allow_name_change, cooperating_responders, strict + ) self.registry.async_add(info) - return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) + return asyncio.ensure_future( + self._async_broadcast_service(info, _REGISTER_TIME, None) + ) def update_service(self, info: ServiceInfo) -> None: """Registers service information to the network with a default TTL. @@ -360,7 +381,9 @@ def update_service(self, info: ServiceInfo) -> None: """ assert self.loop is not None run_coro_with_timeout( - await_awaitable(self.async_update_service(info)), self.loop, _REGISTER_TIME * _REGISTER_BROADCASTS + await_awaitable(self.async_update_service(info)), + self.loop, + _REGISTER_TIME * _REGISTER_BROADCASTS, ) async def async_update_service(self, info: ServiceInfo) -> Awaitable: @@ -368,10 +391,16 @@ async def async_update_service(self, info: ServiceInfo) -> Awaitable: Zeroconf will then respond to requests for information for that service.""" self.registry.async_update(info) - return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) + return asyncio.ensure_future( + self._async_broadcast_service(info, _REGISTER_TIME, None) + ) async def async_get_service_info( - self, type_: str, name: str, timeout: int = 3000, question_type: Optional[DNSQuestionType] = None + self, + type_: str, + name: str, + timeout: int = 3000, + question_type: Optional[DNSQuestionType] = None, ) -> Optional[AsyncServiceInfo]: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, @@ -398,7 +427,9 @@ async def _async_broadcast_service( for i in range(_REGISTER_BROADCASTS): if i != 0: await asyncio.sleep(millis_to_seconds(interval)) - self.async_send(self.generate_service_broadcast(info, ttl, broadcast_addresses)) + self.async_send( + self.generate_service_broadcast(info, ttl, broadcast_addresses) + ) def generate_service_broadcast( self, @@ -453,7 +484,9 @@ def unregister_service(self, info: ServiceInfo) -> None: """ assert self.loop is not None run_coro_with_timeout( - self.async_unregister_service(info), self.loop, _UNREGISTER_TIME * _REGISTER_BROADCASTS + self.async_unregister_service(info), + self.loop, + _UNREGISTER_TIME * _REGISTER_BROADCASTS, ) async def async_unregister_service(self, info: ServiceInfo) -> Awaitable: @@ -467,7 +500,9 @@ async def async_unregister_service(self, info: ServiceInfo) -> Awaitable: entries = self.registry.async_get_infos_server(info.server_key) broadcast_addresses = not bool(entries) return asyncio.ensure_future( - self._async_broadcast_service(info, _UNREGISTER_TIME, 0, broadcast_addresses) + self._async_broadcast_service( + info, _UNREGISTER_TIME, 0, broadcast_addresses + ) ) def generate_unregister_all_services(self) -> Optional[DNSOutgoing]: @@ -506,7 +541,9 @@ def unregister_all_services(self) -> None: """ assert self.loop is not None run_coro_with_timeout( - self.async_unregister_all_services(), self.loop, _UNREGISTER_TIME * _REGISTER_BROADCASTS + self.async_unregister_all_services(), + self.loop, + _UNREGISTER_TIME * _REGISTER_BROADCASTS, ) async def async_check_service( @@ -531,7 +568,7 @@ async def async_check_service( raise NonUniqueNameException # change the name and look for a conflict - info.name = f'{instance_name}-{next_instance_number}.{info.type}' + info.name = f"{instance_name}-{next_instance_number}.{info.type}" next_instance_number += 1 service_type_name(info.name, strict=strict) next_time = now @@ -547,7 +584,9 @@ async def async_check_service( next_time += _CHECK_TIME def add_listener( - self, listener: RecordUpdateListener, question: Optional[Union[DNSQuestion, List[DNSQuestion]]] + self, + listener: RecordUpdateListener, + question: Optional[Union[DNSQuestion, List[DNSQuestion]]], ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -556,7 +595,9 @@ def add_listener( This function is threadsafe """ assert self.loop is not None - self.loop.call_soon_threadsafe(self.record_manager.async_add_listener, listener, question) + self.loop.call_soon_threadsafe( + self.record_manager.async_add_listener, listener, question + ) def remove_listener(self, listener: RecordUpdateListener) -> None: """Removes a listener. @@ -564,10 +605,14 @@ def remove_listener(self, listener: RecordUpdateListener) -> None: This function is threadsafe """ assert self.loop is not None - self.loop.call_soon_threadsafe(self.record_manager.async_remove_listener, listener) + self.loop.call_soon_threadsafe( + self.record_manager.async_remove_listener, listener + ) def async_add_listener( - self, listener: RecordUpdateListener, question: Optional[Union[DNSQuestion, List[DNSQuestion]]] + self, + listener: RecordUpdateListener, + question: Optional[Union[DNSQuestion, List[DNSQuestion]]], ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -594,7 +639,9 @@ def send( ) -> None: """Sends an outgoing packet threadsafe.""" assert self.loop is not None - self.loop.call_soon_threadsafe(self.async_send, out, addr, port, v6_flow_scope, transport) + self.loop.call_soon_threadsafe( + self.async_send, out, addr, port, v6_flow_scope, transport + ) def async_send( self, @@ -615,11 +662,23 @@ def async_send( for packet_num, packet in enumerate(out.packets()): if len(packet) > _MAX_MSG_ABSOLUTE: - self.log_warning_once("Dropping %r over-sized packet (%d bytes) %r", out, len(packet), packet) + self.log_warning_once( + "Dropping %r over-sized packet (%d bytes) %r", + out, + len(packet), + packet, + ) return for send_transport in transports: async_send_with_transport( - log_debug, send_transport, packet, packet_num, out, addr, port, v6_flow_scope + log_debug, + send_transport, + packet, + packet_num, + out, + addr, + port, + v6_flow_scope, ) def _close(self) -> None: @@ -672,7 +731,7 @@ async def _async_close(self) -> None: await self.engine._async_close() # pylint: disable=protected-access self._shutdown_threads() - def __enter__(self) -> 'Zeroconf': + def __enter__(self) -> "Zeroconf": return self def __exit__( # pylint: disable=useless-return diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 66fb5b86..f85969a9 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import enum @@ -33,7 +33,9 @@ _LEN_SHORT = 2 _LEN_INT = 4 -_BASE_MAX_SIZE = _LEN_SHORT + _LEN_SHORT + _LEN_INT + _LEN_SHORT # type # class # ttl # length +_BASE_MAX_SIZE = ( + _LEN_SHORT + _LEN_SHORT + _LEN_INT + _LEN_SHORT +) # type # class # ttl # length _NAME_COMPRESSION_MIN_SIZE = _LEN_BYTE * 2 _EXPIRE_FULL_TIME_MS = 1000 @@ -62,10 +64,9 @@ class DNSQuestionType(enum.Enum): class DNSEntry: - """A DNS entry""" - __slots__ = ('key', 'name', 'type', 'class_', 'unique') + __slots__ = ("key", "name", "type", "class_", "unique") def __init__(self, name: str, type_: int, class_: int) -> None: self.name = name @@ -78,7 +79,11 @@ def _set_class(self, class_: _int) -> None: self.unique = (class_ & _CLASS_UNIQUE) != 0 def _dns_entry_matches(self, other) -> bool: # type: ignore[no-untyped-def] - return self.key == other.key and self.type == other.type and self.class_ == other.class_ + return ( + self.key == other.key + and self.type == other.type + and self.class_ == other.class_ + ) def __eq__(self, other: Any) -> bool: """Equality test on key (lowercase name), type, and class""" @@ -107,18 +112,21 @@ def entry_to_string(self, hdr: str, other: Optional[Union[bytes, str]]) -> str: class DNSQuestion(DNSEntry): - """A DNS question entry""" - __slots__ = ('_hash',) + __slots__ = ("_hash",) def __init__(self, name: str, type_: int, class_: int) -> None: super().__init__(name, type_, class_) self._hash = hash((self.key, type_, self.class_)) - def answered_by(self, rec: 'DNSRecord') -> bool: + def answered_by(self, rec: "DNSRecord") -> bool: """Returns true if the question is answered by the record""" - return self.class_ == rec.class_ and self.type in (rec.type, _TYPE_ANY) and self.name == rec.name + return ( + self.class_ == rec.class_ + and self.type in (rec.type, _TYPE_ANY) + and self.name == rec.name + ) def __hash__(self) -> int: return self._hash @@ -130,7 +138,9 @@ def __eq__(self, other: Any) -> bool: @property def max_size(self) -> int: """Maximum size of the question in the packet.""" - return len(self.name.encode('utf-8')) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT # type # class + return ( + len(self.name.encode("utf-8")) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT + ) # type # class @property def unicast(self) -> bool: @@ -157,14 +167,18 @@ def __repr__(self) -> str: class DNSRecord(DNSEntry): - """A DNS record - like a DNS entry, but has a TTL""" - __slots__ = ('ttl', 'created') + __slots__ = ("ttl", "created") # TODO: Switch to just int ttl def __init__( - self, name: str, type_: int, class_: int, ttl: Union[float, int], created: Optional[float] = None + self, + name: str, + type_: int, + class_: int, + ttl: Union[float, int], + created: Optional[float] = None, ) -> None: super().__init__(name, type_, class_) self.ttl = ttl @@ -174,7 +188,7 @@ def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use """Abstract method""" raise AbstractMethodException - def suppressed_by(self, msg: 'DNSIncoming') -> bool: + def suppressed_by(self, msg: "DNSIncoming") -> bool: """Returns true if any answer in a message can suffice for the information held in this record.""" answers = msg.answers() @@ -221,7 +235,7 @@ def set_created_ttl(self, created: _float, ttl: Union[float, int]) -> None: self.created = created self.ttl = ttl - def write(self, out: 'DNSOutgoing') -> None: # pylint: disable=no-self-use + def write(self, out: "DNSOutgoing") -> None: # pylint: disable=no-self-use """Abstract method""" raise AbstractMethodException @@ -232,10 +246,9 @@ def to_string(self, other: Union[bytes, str]) -> str: class DNSAddress(DNSRecord): - """A DNS address record""" - __slots__ = ('_hash', 'address', 'scope_id') + __slots__ = ("_hash", "address", "scope_id") def __init__( self, @@ -252,7 +265,7 @@ def __init__( self.scope_id = scope_id self._hash = hash((self.key, type_, self.class_, address, scope_id)) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" out.write_string(self.address) @@ -276,7 +289,8 @@ def __repr__(self) -> str: try: return self.to_string( socket.inet_ntop( - socket.AF_INET6 if _is_v6_address(self.address) else socket.AF_INET, self.address + socket.AF_INET6 if _is_v6_address(self.address) else socket.AF_INET, + self.address, ) ) except (ValueError, OSError): @@ -284,23 +298,29 @@ def __repr__(self) -> str: class DNSHinfo(DNSRecord): - """A DNS host information record""" - __slots__ = ('_hash', 'cpu', 'os') + __slots__ = ("_hash", "cpu", "os") def __init__( - self, name: str, type_: int, class_: int, ttl: int, cpu: str, os: str, created: Optional[float] = None + self, + name: str, + type_: int, + class_: int, + ttl: int, + cpu: str, + os: str, + created: Optional[float] = None, ) -> None: super().__init__(name, type_, class_, ttl, created) self.cpu = cpu self.os = os self._hash = hash((self.key, type_, self.class_, cpu, os)) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" - out.write_character_string(self.cpu.encode('utf-8')) - out.write_character_string(self.os.encode('utf-8')) + out.write_character_string(self.cpu.encode("utf-8")) + out.write_character_string(self.os.encode("utf-8")) def __eq__(self, other: Any) -> bool: """Tests equality on cpu and os.""" @@ -308,7 +328,11 @@ def __eq__(self, other: Any) -> bool: def _eq(self, other) -> bool: # type: ignore[no-untyped-def] """Tests equality on cpu and os.""" - return self.cpu == other.cpu and self.os == other.os and self._dns_entry_matches(other) + return ( + self.cpu == other.cpu + and self.os == other.os + and self._dns_entry_matches(other) + ) def __hash__(self) -> int: """Hash to compare like DNSHinfo.""" @@ -320,13 +344,18 @@ def __repr__(self) -> str: class DNSPointer(DNSRecord): - """A DNS pointer record""" - __slots__ = ('_hash', 'alias', 'alias_key') + __slots__ = ("_hash", "alias", "alias_key") def __init__( - self, name: str, type_: int, class_: int, ttl: int, alias: str, created: Optional[float] = None + self, + name: str, + type_: int, + class_: int, + ttl: int, + alias: str, + created: Optional[float] = None, ) -> None: super().__init__(name, type_, class_, ttl, created) self.alias = alias @@ -343,7 +372,7 @@ def max_size_compressed(self) -> int: + _NAME_COMPRESSION_MIN_SIZE ) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" out.write_name(self.alias) @@ -365,19 +394,24 @@ def __repr__(self) -> str: class DNSText(DNSRecord): - """A DNS text record""" - __slots__ = ('_hash', 'text') + __slots__ = ("_hash", "text") def __init__( - self, name: str, type_: int, class_: int, ttl: int, text: bytes, created: Optional[float] = None + self, + name: str, + type_: int, + class_: int, + ttl: int, + text: bytes, + created: Optional[float] = None, ) -> None: super().__init__(name, type_, class_, ttl, created) self.text = text self._hash = hash((self.key, type_, self.class_, text)) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" out.write_string(self.text) @@ -401,10 +435,9 @@ def __repr__(self) -> str: class DNSService(DNSRecord): - """A DNS service record""" - __slots__ = ('_hash', 'priority', 'weight', 'port', 'server', 'server_key') + __slots__ = ("_hash", "priority", "weight", "port", "server", "server_key") def __init__( self, @@ -424,9 +457,11 @@ def __init__( self.port = port self.server = server self.server_key = server.lower() - self._hash = hash((self.key, type_, self.class_, priority, weight, port, self.server_key)) + self._hash = hash( + (self.key, type_, self.class_, priority, weight, port, self.server_key) + ) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" out.write_short(self.priority) out.write_short(self.weight) @@ -457,10 +492,9 @@ def __repr__(self) -> str: class DNSNsec(DNSRecord): - """A DNS NSEC record""" - __slots__ = ('_hash', 'next_name', 'rdtypes') + __slots__ = ("_hash", "next_name", "rdtypes") def __init__( self, @@ -477,9 +511,9 @@ def __init__( self.rdtypes = sorted(rdtypes) self._hash = hash((self.key, type_, self.class_, next_name, *self.rdtypes)) - def write(self, out: 'DNSOutgoing') -> None: + def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet.""" - bitmap = bytearray(b'\0' * 32) + bitmap = bytearray(b"\0" * 32) total_octets = 0 for rdtype in self.rdtypes: if rdtype > 255: # mDNS only supports window 0 @@ -516,7 +550,9 @@ def __hash__(self) -> int: def __repr__(self) -> str: """String representation""" return self.to_string( - self.next_name + "," + "|".join([self.get_type(type_) for type_ in self.rdtypes]) + self.next_name + + "," + + "|".join([self.get_type(type_) for type_ in self.rdtypes]) ) @@ -526,7 +562,7 @@ def __repr__(self) -> str: class DNSRRSet: """A set of dns records with a lookup to get the ttl.""" - __slots__ = ('_records', '_lookup') + __slots__ = ("_records", "_lookup") def __init__(self, records: List[DNSRecord]) -> None: """Create an RRset from records sets.""" diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 9e455003..6083c19a 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -45,20 +45,20 @@ class AsyncEngine: """An engine wraps sockets in the event loop.""" __slots__ = ( - 'loop', - 'zc', - 'protocols', - 'readers', - 'senders', - 'running_event', - '_listen_socket', - '_respond_sockets', - '_cleanup_timer', + "loop", + "zc", + "protocols", + "readers", + "senders", + "running_event", + "_listen_socket", + "_respond_sockets", + "_cleanup_timer", ) def __init__( self, - zeroconf: 'Zeroconf', + zeroconf: "Zeroconf", listen_socket: Optional[socket.socket], respond_sockets: List[socket.socket], ) -> None: @@ -72,7 +72,11 @@ def __init__( self._respond_sockets = respond_sockets self._cleanup_timer: Optional[asyncio.TimerHandle] = None - def setup(self, loop: asyncio.AbstractEventLoop, loop_thread_ready: Optional[threading.Event]) -> None: + def setup( + self, + loop: asyncio.AbstractEventLoop, + loop_thread_ready: Optional[threading.Event], + ) -> None: """Set up the instance.""" self.loop = loop self.running_event = asyncio.Event() @@ -102,19 +106,28 @@ async def _async_create_endpoints(self) -> None: for s in reader_sockets: transport, protocol = await loop.create_datagram_endpoint( - lambda: AsyncListener(self.zc), sock=s # type: ignore[arg-type, return-value] + lambda: AsyncListener(self.zc), # type: ignore[arg-type, return-value] + sock=s, ) self.protocols.append(cast(AsyncListener, protocol)) - self.readers.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + self.readers.append( + make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) + ) if s in sender_sockets: - self.senders.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) + self.senders.append( + make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) + ) def _async_cache_cleanup(self) -> None: """Periodic cache cleanup.""" now = current_time_millis() self.zc.question_history.async_expire(now) self.zc.record_manager.async_updates( - now, [RecordUpdate(record, record) for record in self.zc.cache.async_expire(now)] + now, + [ + RecordUpdate(record, record) + for record in self.zc.cache.async_expire(now) + ], ) self.zc.record_manager.async_updates_complete(False) self._async_schedule_next_cache_cleanup() @@ -123,7 +136,9 @@ def _async_schedule_next_cache_cleanup(self) -> None: """Schedule the next cache cleanup.""" loop = self.loop assert loop is not None - self._cleanup_timer = loop.call_at(loop.time() + _CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) + self._cleanup_timer = loop.call_at( + loop.time() + _CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup + ) async def _async_close(self) -> None: """Cancel and wait for the cleanup task to finish.""" diff --git a/src/zeroconf/_exceptions.py b/src/zeroconf/_exceptions.py index f4fcbd55..5eb58f79 100644 --- a/src/zeroconf/_exceptions.py +++ b/src/zeroconf/_exceptions.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/src/zeroconf/_handlers/__init__.py b/src/zeroconf/_handlers/__init__.py index 2ef4b15b..30920c6a 100644 --- a/src/zeroconf/_handlers/__init__.py +++ b/src/zeroconf/_handlers/__init__.py @@ -1,21 +1,21 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index a2dbd66a..74efee2c 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from operator import attrgetter @@ -34,7 +34,7 @@ MULTICAST_DELAY_RANDOM_INTERVAL = (20, 120) -NAME_GETTER = attrgetter('name') +NAME_GETTER = attrgetter("name") _FLAGS_QR_RESPONSE_AA = _FLAGS_QR_RESPONSE | _FLAGS_AA @@ -44,7 +44,7 @@ class QuestionAnswers: """A group of answers to a question.""" - __slots__ = ('ucast', 'mcast_now', 'mcast_aggregate', 'mcast_aggregate_last_second') + __slots__ = ("ucast", "mcast_now", "mcast_aggregate", "mcast_aggregate_last_second") def __init__( self, @@ -62,24 +62,31 @@ def __init__( def __repr__(self) -> str: """Return a string representation of this QuestionAnswers.""" return ( - f'QuestionAnswers(ucast={self.ucast}, mcast_now={self.mcast_now}, ' - f'mcast_aggregate={self.mcast_aggregate}, ' - f'mcast_aggregate_last_second={self.mcast_aggregate_last_second})' + f"QuestionAnswers(ucast={self.ucast}, mcast_now={self.mcast_now}, " + f"mcast_aggregate={self.mcast_aggregate}, " + f"mcast_aggregate_last_second={self.mcast_aggregate_last_second})" ) class AnswerGroup: """A group of answers scheduled to be sent at the same time.""" - __slots__ = ('send_after', 'send_before', 'answers') + __slots__ = ("send_after", "send_before", "answers") - def __init__(self, send_after: float_, send_before: float_, answers: _AnswerWithAdditionalsType) -> None: + def __init__( + self, + send_after: float_, + send_before: float_, + answers: _AnswerWithAdditionalsType, + ) -> None: self.send_after = send_after # Must be sent after this time self.send_before = send_before # Must be sent before this time self.answers = answers -def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> DNSOutgoing: +def construct_outgoing_multicast_answers( + answers: _AnswerWithAdditionalsType, +) -> DNSOutgoing: """Add answers and additionals to a DNSOutgoing.""" out = DNSOutgoing(_FLAGS_QR_RESPONSE_AA, True) _add_answers_additionals(out, answers) @@ -87,7 +94,10 @@ def construct_outgoing_multicast_answers(answers: _AnswerWithAdditionalsType) -> def construct_outgoing_unicast_answers( - answers: _AnswerWithAdditionalsType, ucast_source: bool, questions: List[DNSQuestion], id_: int_ + answers: _AnswerWithAdditionalsType, + ucast_source: bool, + questions: List[DNSQuestion], + id_: int_, ) -> DNSOutgoing: """Add answers and additionals to a DNSOutgoing.""" out = DNSOutgoing(_FLAGS_QR_RESPONSE_AA, False, id_) @@ -99,7 +109,9 @@ def construct_outgoing_unicast_answers( return out -def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: +def _add_answers_additionals( + out: DNSOutgoing, answers: _AnswerWithAdditionalsType +) -> None: # Find additionals and suppress any additionals that are already in answers sending: Set[DNSRecord] = set(answers) # Answers are sorted to group names together to increase the chance diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index 23288d18..49242540 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import random @@ -53,7 +53,9 @@ class MulticastOutgoingQueue: "_aggregation_delay", ) - def __init__(self, zeroconf: 'Zeroconf', additional_delay: _int, max_aggregation_delay: _int) -> None: + def __init__( + self, zeroconf: "Zeroconf", additional_delay: _int, max_aggregation_delay: _int + ) -> None: self.zc = zeroconf self.queue: deque[AnswerGroup] = deque() # Additional delay is used to implement @@ -69,7 +71,9 @@ def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: loop = self.zc.loop if TYPE_CHECKING: assert loop is not None - random_int = RAND_INT(self._multicast_delay_random_min, self._multicast_delay_random_max) + random_int = RAND_INT( + self._multicast_delay_random_min, self._multicast_delay_random_max + ) random_delay = random_int + self._additional_delay send_after = now + random_delay send_before = now + self._aggregation_delay + self._additional_delay @@ -83,7 +87,9 @@ def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: last_group.answers.update(answers) return else: - loop.call_at(loop.time() + millis_to_seconds(random_delay), self.async_ready) + loop.call_at( + loop.time() + millis_to_seconds(random_delay), self.async_ready + ) self.queue.append(AnswerGroup(send_after, send_before, answers)) def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> None: @@ -103,7 +109,10 @@ def async_ready(self) -> None: if len(self.queue) > 1 and self.queue[0].send_before > now: # There is more than one answer in the queue, # delay until we have to send it (first answer group reaches send_before) - loop.call_at(loop.time() + millis_to_seconds(self.queue[0].send_before - now), self.async_ready) + loop.call_at( + loop.time() + millis_to_seconds(self.queue[0].send_before - now), + self.async_ready, + ) return answers: _AnswerWithAdditionalsType = {} @@ -114,7 +123,10 @@ def async_ready(self) -> None: if len(self.queue): # If there are still groups in the queue that are not ready to send # be sure we schedule them to go out later - loop.call_at(loop.time() + millis_to_seconds(self.queue[0].send_after - now), self.async_ready) + loop.call_at( + loop.time() + millis_to_seconds(self.queue[0].send_after - now), + self.async_ready, + ) if answers: # pragma: no branch # If we have the same answer scheduled to go out, remove them diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index ba9c9e31..a2f5e9f5 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast @@ -71,7 +71,6 @@ class _AnswerStrategy: - __slots__ = ("question", "strategy_type", "types", "services") def __init__( @@ -103,7 +102,9 @@ class _QueryResponse: "_mcast_aggregate_last_second", ) - def __init__(self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float) -> None: + def __init__( + self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float + ) -> None: """Build a query response.""" self._is_probe = is_probe self._questions = questions @@ -158,8 +159,12 @@ def answers( ucast = {r: self._additionals[r] for r in self._ucast} mcast_now = {r: self._additionals[r] for r in self._mcast_now} mcast_aggregate = {r: self._additionals[r] for r in self._mcast_aggregate} - mcast_aggregate_last_second = {r: self._additionals[r] for r in self._mcast_aggregate_last_second} - return QuestionAnswers(ucast, mcast_now, mcast_aggregate, mcast_aggregate_last_second) + mcast_aggregate_last_second = { + r: self._additionals[r] for r in self._mcast_aggregate_last_second + } + return QuestionAnswers( + ucast, mcast_now, mcast_aggregate, mcast_aggregate_last_second + ) def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: """Check to see if a record has been mcasted recently. @@ -185,15 +190,24 @@ def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = self._cache.async_get_unique(record) - return bool(maybe_entry is not None and self._now - maybe_entry.created < _ONE_SECOND) + return bool( + maybe_entry is not None and self._now - maybe_entry.created < _ONE_SECOND + ) class QueryHandler: """Query the ServiceRegistry.""" - __slots__ = ("zc", "registry", "cache", "question_history", "out_queue", "out_delay_queue") + __slots__ = ( + "zc", + "registry", + "cache", + "question_history", + "out_queue", + "out_delay_queue", + ) - def __init__(self, zc: 'Zeroconf') -> None: + def __init__(self, zc: "Zeroconf") -> None: """Init the query handler.""" self.zc = zc self.registry = zc.registry @@ -203,7 +217,10 @@ def __init__(self, zc: 'Zeroconf') -> None: self.out_delay_queue = zc.out_delay_queue def _add_service_type_enumeration_query_answers( - self, types: List[str], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet + self, + types: List[str], + answer_set: _AnswerWithAdditionalsType, + known_answers: DNSRRSet, ) -> None: """Provide an answer to a service type enumeration query. @@ -211,13 +228,21 @@ def _add_service_type_enumeration_query_answers( """ for stype in types: dns_pointer = DNSPointer( - _SERVICE_TYPE_ENUMERATION_NAME, _TYPE_PTR, _CLASS_IN, _DNS_OTHER_TTL, stype, 0.0 + _SERVICE_TYPE_ENUMERATION_NAME, + _TYPE_PTR, + _CLASS_IN, + _DNS_OTHER_TTL, + stype, + 0.0, ) if not known_answers.suppresses(dns_pointer): answer_set[dns_pointer] = set() def _add_pointer_answers( - self, services: List[ServiceInfo], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet + self, + services: List[ServiceInfo], + answer_set: _AnswerWithAdditionalsType, + known_answers: DNSRRSet, ) -> None: """Answer PTR/ANY question.""" for service in services: @@ -253,12 +278,16 @@ def _add_address_answers( missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types if answers: if missing_types: - assert service.server is not None, "Service server must be set for NSEC record." + assert ( + service.server is not None + ), "Service server must be set for NSEC record." additionals.add(service._dns_nsec(list(missing_types), None)) for answer in answers: answer_set[answer] = additionals elif type_ in missing_types: - assert service.server is not None, "Service server must be set for NSEC record." + assert ( + service.server is not None + ), "Service server must be set for NSEC record." answer_set[service._dns_nsec(list(missing_types), None)] = set() def _answer_question( @@ -273,11 +302,15 @@ def _answer_question( answer_set: _AnswerWithAdditionalsType = {} if strategy_type == _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION: - self._add_service_type_enumeration_query_answers(types, answer_set, known_answers) + self._add_service_type_enumeration_query_answers( + types, answer_set, known_answers + ) elif strategy_type == _ANSWER_STRATEGY_POINTER: self._add_pointer_answers(services, answer_set, known_answers) elif strategy_type == _ANSWER_STRATEGY_ADDRESS: - self._add_address_answers(services, answer_set, known_answers, question.type) + self._add_address_answers( + services, answer_set, known_answers, question.type + ) elif strategy_type == _ANSWER_STRATEGY_SERVICE: # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.2. @@ -334,9 +367,15 @@ def async_response( # pylint: disable=unused-argument if not is_unicast: if known_answers_set is None: # pragma: no branch known_answers_set = known_answers.lookup_set() - self.question_history.add_question_at_time(question, now, known_answers_set) + self.question_history.add_question_at_time( + question, now, known_answers_set + ) answer_set = self._answer_question( - question, strategy.strategy_type, strategy.types, strategy.services, known_answers + question, + strategy.strategy_type, + strategy.types, + strategy.services, + known_answers, ) if not ucast_source and is_unicast: query_res.add_qu_question_response(answer_set) @@ -364,7 +403,10 @@ def _get_answer_strategies( if types: strategies.append( _AnswerStrategy( - question, _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION, types, _EMPTY_SERVICES_LIST + question, + _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION, + types, + _EMPTY_SERVICES_LIST, ) ) return strategies @@ -373,14 +415,18 @@ def _get_answer_strategies( services = self.registry.async_get_infos_type(question_lower_name) if services: strategies.append( - _AnswerStrategy(question, _ANSWER_STRATEGY_POINTER, _EMPTY_TYPES_LIST, services) + _AnswerStrategy( + question, _ANSWER_STRATEGY_POINTER, _EMPTY_TYPES_LIST, services + ) ) if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): services = self.registry.async_get_infos_server(question_lower_name) if services: strategies.append( - _AnswerStrategy(question, _ANSWER_STRATEGY_ADDRESS, _EMPTY_TYPES_LIST, services) + _AnswerStrategy( + question, _ANSWER_STRATEGY_ADDRESS, _EMPTY_TYPES_LIST, services + ) ) if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): @@ -388,11 +434,21 @@ def _get_answer_strategies( if service is not None: if type_ in (_TYPE_SRV, _TYPE_ANY): strategies.append( - _AnswerStrategy(question, _ANSWER_STRATEGY_SERVICE, _EMPTY_TYPES_LIST, [service]) + _AnswerStrategy( + question, + _ANSWER_STRATEGY_SERVICE, + _EMPTY_TYPES_LIST, + [service], + ) ) if type_ in (_TYPE_TXT, _TYPE_ANY): strategies.append( - _AnswerStrategy(question, _ANSWER_STRATEGY_TEXT, _EMPTY_TYPES_LIST, [service]) + _AnswerStrategy( + question, + _ANSWER_STRATEGY_TEXT, + _EMPTY_TYPES_LIST, + [service], + ) ) return strategies @@ -421,17 +477,23 @@ def handle_assembled_query( if question_answers.ucast: questions = first_packet._questions id_ = first_packet.id - out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) + out = construct_outgoing_unicast_answers( + question_answers.ucast, ucast_source, questions, id_ + ) # When sending unicast, only send back the reply # via the same socket that it was recieved from # as we know its reachable from that socket self.zc.async_send(out, addr, port, v6_flow_scope, transport) if question_answers.mcast_now: - self.zc.async_send(construct_outgoing_multicast_answers(question_answers.mcast_now)) + self.zc.async_send( + construct_outgoing_multicast_answers(question_answers.mcast_now) + ) if question_answers.mcast_aggregate: self.out_queue.async_add(first_packet.now, question_answers.mcast_aggregate) if question_answers.mcast_aggregate_last_second: # https://datatracker.ietf.org/doc/html/rfc6762#section-14 # If we broadcast it in the last second, we have to delay # at least a second before we send it again - self.out_delay_queue.async_add(first_packet.now, question_answers.mcast_aggregate_last_second) + self.out_delay_queue.async_add( + first_packet.now, question_answers.mcast_aggregate_last_second + ) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 70f2e5e1..86286dec 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast @@ -42,7 +42,7 @@ class RecordManager: __slots__ = ("zc", "cache", "listeners") - def __init__(self, zeroconf: 'Zeroconf') -> None: + def __init__(self, zeroconf: "Zeroconf") -> None: """Init the record manager.""" self.zc = zeroconf self.cache = zeroconf.cache @@ -97,7 +97,11 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # level of rate limit and safe guards so we use 1/4 of the recommended value. record_type = record.type record_ttl = record.ttl - if record_ttl and record_type == _TYPE_PTR and record_ttl < _DNS_PTR_MIN_TTL: + if ( + record_ttl + and record_type == _TYPE_PTR + and record_ttl < _DNS_PTR_MIN_TTL + ): log.debug( "Increasing effective ttl of %s to minimum of %s to protect against excessive refreshes.", record, @@ -128,7 +132,9 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) + cache.async_mark_unique_records_older_than_1s_to_expire( + unique_types, answers, now + ) if updates: self.async_updates(now, updates) @@ -161,7 +167,9 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: self.async_updates_complete(new) def async_add_listener( - self, listener: RecordUpdateListener, question: Optional[Union[DNSQuestion, List[DNSQuestion]]] + self, + listener: RecordUpdateListener, + question: Optional[Union[DNSQuestion, List[DNSQuestion]]], ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -212,4 +220,4 @@ def async_remove_listener(self, listener: RecordUpdateListener) -> None: self.listeners.remove(listener) self.zc.async_notify_all() except ValueError as e: - log.exception('Failed to remove listener: %r', e) + log.exception("Failed to remove listener: %r", e) diff --git a/src/zeroconf/_history.py b/src/zeroconf/_history.py index db6a394d..2e58b14e 100644 --- a/src/zeroconf/_history.py +++ b/src/zeroconf/_history.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import Dict, List, Set, Tuple @@ -38,11 +38,15 @@ def __init__(self) -> None: """Init a new QuestionHistory.""" self._history: Dict[DNSQuestion, Tuple[float, Set[DNSRecord]]] = {} - def add_question_at_time(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> None: + def add_question_at_time( + self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord] + ) -> None: """Remember a question with known answers.""" self._history[question] = (now, known_answers) - def suppresses(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> bool: + def suppresses( + self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord] + ) -> bool: """Check to see if a question should be suppressed. https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 0f8a8cac..2956ad52 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -47,7 +47,6 @@ class AsyncListener: - """A Listener is used by this module to listen on the multicast group to which DNS messages are sent, allowing the implementation to cache information as it arrives. @@ -56,20 +55,20 @@ class AsyncListener: the read() method called when a socket is available for reading.""" __slots__ = ( - 'zc', - '_registry', - '_record_manager', + "zc", + "_registry", + "_record_manager", "_query_handler", - 'data', - 'last_time', - 'last_message', - 'transport', - 'sock_description', - '_deferred', - '_timers', + "data", + "last_time", + "last_message", + "transport", + "sock_description", + "_deferred", + "_timers", ) - def __init__(self, zc: 'Zeroconf') -> None: + def __init__(self, zc: "Zeroconf") -> None: self.zc = zc self._registry = zc.registry self._record_manager = zc.record_manager @@ -120,7 +119,7 @@ def _process_datagram_at_time( # Guard against duplicate packets if debug: log.debug( - 'Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]', + "Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]", addrs, self.sock_description, data_len, @@ -140,7 +139,9 @@ def _process_datagram_at_time( # https://github.com/python/mypy/issues/1178 addr, port, flow, scope = addrs # type: ignore if debug: # pragma: no branch - log.debug('IPv6 scope_id %d associated to the receiving interface', scope) + log.debug( + "IPv6 scope_id %d associated to the receiving interface", scope + ) v6_flow_scope = (flow, scope) addr_port = (addr, port) @@ -151,7 +152,7 @@ def _process_datagram_at_time( if msg.valid is True: if debug: log.debug( - 'Received from %r:%r [socket %s]: %r (%d bytes) as [%r]', + "Received from %r:%r [socket %s]: %r (%d bytes) as [%r]", addr, port, self.sock_description, @@ -162,7 +163,7 @@ def _process_datagram_at_time( else: if debug: log.debug( - 'Received from %r:%r [socket %s]: (%d bytes) [%r]', + "Received from %r:%r [socket %s]: (%d bytes) [%r]", addr, port, self.sock_description, @@ -208,7 +209,13 @@ def handle_query_or_defer( assert loop is not None self._cancel_any_timers_for_addr(addr) self._timers[addr] = loop.call_at( - loop.time() + delay, self._respond_query, None, addr, port, transport, v6_flow_scope + loop.time() + delay, + self._respond_query, + None, + addr, + port, + transport, + v6_flow_scope, ) def _cancel_any_timers_for_addr(self, addr: _str) -> None: @@ -230,7 +237,9 @@ def _respond_query( if msg: packets.append(msg) - self._query_handler.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) + self._query_handler.handle_assembled_query( + packets, addr, port, transport, v6_flow_scope + ) def error_received(self, exc: Exception) -> None: """Likely socket closed or IPv6.""" @@ -242,9 +251,13 @@ def error_received(self, exc: Exception) -> None: QuietLogger.log_exception_once(exc, msg_str, exc) def connection_made(self, transport: asyncio.BaseTransport) -> None: - wrapped_transport = make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) + wrapped_transport = make_wrapped_transport( + cast(asyncio.DatagramTransport, transport) + ) self.transport = wrapped_transport - self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" + self.sock_description = ( + f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" + ) def connection_lost(self, exc: Optional[Exception]) -> None: """Handle connection lost.""" diff --git a/src/zeroconf/_logger.py b/src/zeroconf/_logger.py index b0e66bc9..9e726107 100644 --- a/src/zeroconf/_logger.py +++ b/src/zeroconf/_logger.py @@ -1,31 +1,31 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - ) - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine + ) +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import logging import sys from typing import Any, Dict, Union, cast -log = logging.getLogger(__name__.split('.', maxsplit=1)[0]) +log = logging.getLogger(__name__.split(".", maxsplit=1)[0]) log.addHandler(logging.NullHandler()) @@ -50,7 +50,7 @@ def log_exception_warning(cls, *logger_data: Any) -> None: logger = log.warning else: logger = log.debug - logger(*(logger_data or ['Exception occurred']), exc_info=True) + logger(*(logger_data or ["Exception occurred"]), exc_info=True) @classmethod def log_exception_debug(cls, *logger_data: Any) -> None: @@ -61,7 +61,7 @@ def log_exception_debug(cls, *logger_data: Any) -> None: # log the trace only on the first time cls._seen_logs[exc_str] = exc_info log_exc_info = True - log.debug(*(logger_data or ['Exception occurred']), exc_info=log_exc_info) + log.debug(*(logger_data or ["Exception occurred"]), exc_info=log_exc_info) @classmethod def log_warning_once(cls, *args: Any) -> None: diff --git a/src/zeroconf/_protocol/__init__.py b/src/zeroconf/_protocol/__init__.py index 2ef4b15b..30920c6a 100644 --- a/src/zeroconf/_protocol/__init__.py +++ b/src/zeroconf/_protocol/__init__.py @@ -1,21 +1,21 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 9e208b63..0ad6efce 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import struct @@ -71,24 +71,24 @@ class DNSIncoming: __slots__ = ( "_did_read_others", - 'flags', - 'offset', - 'data', - 'view', - '_data_len', - '_name_cache', - '_questions', - '_answers', - 'id', - '_num_questions', - '_num_answers', - '_num_authorities', - '_num_additionals', - 'valid', - 'now', - 'scope_id', - 'source', - '_has_qu_question', + "flags", + "offset", + "data", + "view", + "_data_len", + "_name_cache", + "_questions", + "_answers", + "id", + "_num_questions", + "_num_answers", + "_num_authorities", + "_num_additionals", + "valid", + "now", + "scope_id", + "source", + "_has_qu_question", ) def __init__( @@ -122,7 +122,7 @@ def __init__( self._initial_parse() except DECODE_EXCEPTIONS: self._log_exception_debug( - 'Received invalid packet from %s at offset %d while unpacking %r', + "Received invalid packet from %s at offset %d while unpacking %r", self.source, self.offset, self.data, @@ -187,7 +187,7 @@ def _log_exception_debug(cls, *logger_data: Any) -> None: # log the trace only on the first time _seen_logs[exc_str] = exc_info log_exc_info = True - log.debug(*(logger_data or ['Exception occurred']), exc_info=log_exc_info) + log.debug(*(logger_data or ["Exception occurred"]), exc_info=log_exc_info) def answers(self) -> List[DNSRecord]: """Answers in the packet.""" @@ -196,7 +196,7 @@ def answers(self) -> List[DNSRecord]: self._read_others() except DECODE_EXCEPTIONS: self._log_exception_debug( - 'Received invalid packet from %s at offset %d while unpacking %r', + "Received invalid packet from %s at offset %d while unpacking %r", self.source, self.offset, self.data, @@ -208,17 +208,17 @@ def is_probe(self) -> bool: return self._num_authorities > 0 def __repr__(self) -> str: - return '' % ', '.join( + return "" % ", ".join( [ - 'id=%s' % self.id, - 'flags=%s' % self.flags, - 'truncated=%s' % self.truncated, - 'n_q=%s' % self._num_questions, - 'n_ans=%s' % self._num_answers, - 'n_auth=%s' % self._num_authorities, - 'n_add=%s' % self._num_additionals, - 'questions=%s' % self._questions, - 'answers=%s' % self.answers(), + "id=%s" % self.id, + "flags=%s" % self.flags, + "truncated=%s" % self.truncated, + "n_q=%s" % self._num_questions, + "n_ans=%s" % self._num_answers, + "n_auth=%s" % self._num_authorities, + "n_add=%s" % self._num_additionals, + "questions=%s" % self._questions, + "answers=%s" % self.answers(), ] ) @@ -255,7 +255,7 @@ def _read_character_string(self) -> str: """Reads a character string from the packet""" length = self.view[self.offset] self.offset += 1 - info = self.data[self.offset : self.offset + length].decode('utf-8', 'replace') + info = self.data[self.offset : self.offset + length].decode("utf-8", "replace") self.offset += length return info @@ -279,7 +279,12 @@ def _read_others(self) -> None: # ttl is an unsigned long in network order https://www.rfc-editor.org/errata/eid2130 type_ = view[offset] << 8 | view[offset + 1] class_ = view[offset + 2] << 8 | view[offset + 3] - ttl = view[offset + 4] << 24 | view[offset + 5] << 16 | view[offset + 6] << 8 | view[offset + 7] + ttl = ( + view[offset + 4] << 24 + | view[offset + 5] << 16 + | view[offset + 6] << 8 + | view[offset + 7] + ) length = view[offset + 8] << 8 | view[offset + 9] end = self.offset + length rec = None @@ -291,7 +296,7 @@ def _read_others(self) -> None: # above would fail and hit the exception catch in read_others self.offset = end log.debug( - 'Unable to parse; skipping record for %s with type %s at offset %d while unpacking %r', + "Unable to parse; skipping record for %s with type %s at offset %d while unpacking %r", domain, _TYPES.get(type_, type_), self.offset, @@ -306,11 +311,15 @@ def _read_record( ) -> Optional[DNSRecord]: """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: - return DNSAddress(domain, type_, class_, ttl, self._read_string(4), None, self.now) + return DNSAddress( + domain, type_, class_, ttl, self._read_string(4), None, self.now + ) if type_ in (_TYPE_CNAME, _TYPE_PTR): return DNSPointer(domain, type_, class_, ttl, self._read_name(), self.now) if type_ == _TYPE_TXT: - return DNSText(domain, type_, class_, ttl, self._read_string(length), self.now) + return DNSText( + domain, type_, class_, ttl, self._read_string(length), self.now + ) if type_ == _TYPE_SRV: view = self.view offset = self.offset @@ -341,7 +350,15 @@ def _read_record( self.now, ) if type_ == _TYPE_AAAA: - return DNSAddress(domain, type_, class_, ttl, self._read_string(16), self.scope_id, self.now) + return DNSAddress( + domain, + type_, + class_, + ttl, + self._read_string(16), + self.scope_id, + self.now, + ) if type_ == _TYPE_NSEC: name_start = self.offset return DNSNsec( @@ -382,7 +399,9 @@ def _read_name(self) -> str: labels: List[str] = [] seen_pointers: Set[int] = set() original_offset = self.offset - self.offset = self._decode_labels_at_offset(original_offset, labels, seen_pointers) + self.offset = self._decode_labels_at_offset( + original_offset, labels, seen_pointers + ) self._name_cache[original_offset] = labels name = ".".join(labels) + "." if len(name) > MAX_NAME_LENGTH: @@ -391,7 +410,9 @@ def _read_name(self) -> str: ) return name - def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: Set[int]) -> int: + def _decode_labels_at_offset( + self, off: _int, labels: List[str], seen_pointers: Set[int] + ) -> int: # This is a tight loop that is called frequently, small optimizations can make a difference. view = self.view while off < self._data_len: @@ -401,7 +422,9 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: if length < 0x40: label_idx = off + DNS_COMPRESSION_HEADER_LEN - labels.append(self.data[label_idx : label_idx + length].decode('utf-8', 'replace')) + labels.append( + self.data[label_idx : label_idx + length].decode("utf-8", "replace") + ) off += DNS_COMPRESSION_HEADER_LEN + length continue @@ -439,4 +462,6 @@ def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: ) return off + DNS_COMPRESSION_POINTER_LEN - raise IncomingDecodeError(f"Corrupt packet received while decoding name from {self.source}") + raise IncomingDecodeError( + f"Corrupt packet received while decoding name from {self.source}" + ) diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index f45c3935..66b526cc 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import enum @@ -50,9 +50,9 @@ DNSRecord_ = DNSRecord -PACK_BYTE = Struct('>B').pack -PACK_SHORT = Struct('>H').pack -PACK_LONG = Struct('>L').pack +PACK_BYTE = Struct(">B").pack +PACK_SHORT = Struct(">H").pack +PACK_LONG = Struct(">L").pack SHORT_CACHE_MAX = 128 @@ -74,24 +74,23 @@ class State(enum.Enum): class DNSOutgoing: - """Object representation of an outgoing packet""" __slots__ = ( - 'flags', - 'finished', - 'id', - 'multicast', - 'packets_data', - 'names', - 'data', - 'size', - 'allow_long', - 'state', - 'questions', - 'answers', - 'authorities', - 'additionals', + "flags", + "finished", + "id", + "multicast", + "packets_data", + "names", + "data", + "size", + "allow_long", + "state", + "questions", + "answers", + "authorities", + "additionals", ) def __init__(self, flags: int, multicast: bool = True, id_: int = 0) -> None: @@ -129,14 +128,14 @@ def _reset_for_next_packet(self) -> None: self.allow_long = True def __repr__(self) -> str: - return '' % ', '.join( + return "" % ", ".join( [ - 'multicast=%s' % self.multicast, - 'flags=%s' % self.flags, - 'questions=%s' % self.questions, - 'answers=%s' % self.answers, - 'authorities=%s' % self.authorities, - 'additionals=%s' % self.additionals, + "multicast=%s" % self.multicast, + "flags=%s" % self.flags, + "questions=%s" % self.questions, + "answers=%s" % self.answers, + "authorities=%s" % self.authorities, + "additionals=%s" % self.additionals, ] ) @@ -152,7 +151,9 @@ def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: def add_answer_at_time(self, record: Optional[DNSRecord], now: float_) -> None: """Adds an answer if it does not expire by a certain time""" now_double = now - if record is not None and (now_double == 0 or not record.is_expired(now_double)): + if record is not None and ( + now_double == 0 or not record.is_expired(now_double) + ): self.answers.append((record, now)) def add_authorative_answer(self, record: DNSPointer) -> None: @@ -238,7 +239,7 @@ def write_string(self, value: bytes_) -> None: def _write_utf(self, s: str_) -> None: """Writes a UTF-8 string of a given length to the packet""" - utfstr = s.encode('utf-8') + utfstr = s.encode("utf-8") length = len(utfstr) if length > 64: raise NamePartTooLongException @@ -268,7 +269,7 @@ def write_name(self, name: str_) -> None: """ # split name into each label - if name.endswith('.'): + if name.endswith("."): name = name[:-1] index = self.names.get(name, 0) @@ -277,21 +278,23 @@ def write_name(self, name: str_) -> None: return start_size = self.size - labels = name.split('.') + labels = name.split(".") # Write each new label or a pointer to the existing one in the packet self.names[name] = start_size self._write_utf(labels[0]) name_length = 0 for count in range(1, len(labels)): - partial_name = '.'.join(labels[count:]) + partial_name = ".".join(labels[count:]) index = self.names.get(partial_name, 0) if index: self._write_link_to_name(index) return if name_length == 0: - name_length = len(name.encode('utf-8')) - self.names[partial_name] = start_size + name_length - len(partial_name.encode('utf-8')) + name_length = len(name.encode("utf-8")) + self.names[partial_name] = ( + start_size + name_length - len(partial_name.encode("utf-8")) + ) self._write_utf(labels[count]) # this is the end of a name @@ -346,7 +349,9 @@ def _write_record(self, record: DNSRecord_, now: float_) -> bool: self._replace_short(index, length) return self._check_data_limit_or_rollback(start_data_length, start_size) - def _check_data_limit_or_rollback(self, start_data_length: int_, start_size: int_) -> bool: + def _check_data_limit_or_rollback( + self, start_data_length: int_, start_size: int_ + ) -> bool: """Check data limit, if we go over, then rollback and return False.""" len_limit = _MAX_MSG_ABSOLUTE if self.allow_long else _MAX_MSG_TYPICAL self.allow_long = False @@ -355,12 +360,18 @@ def _check_data_limit_or_rollback(self, start_data_length: int_, start_size: int return True if LOGGING_IS_ENABLED_FOR(LOGGING_DEBUG): # pragma: no branch - log.debug("Reached data limit (size=%d) > (limit=%d) - rolling back", self.size, len_limit) + log.debug( + "Reached data limit (size=%d) > (limit=%d) - rolling back", + self.size, + len_limit, + ) del self.data[start_data_length:] self.size = start_size start_size_int = start_size - rollback_names = [name for name, idx in self.names.items() if idx >= start_size_int] + rollback_names = [ + name for name, idx in self.names.items() if idx >= start_size_int + ] for name in rollback_names: del self.names[name] return False @@ -381,7 +392,9 @@ def _write_answers_from_offset(self, answer_offset: int_) -> int: answers_written += 1 return answers_written - def _write_records_from_offset(self, records: Sequence[DNSRecord], offset: int_) -> int: + def _write_records_from_offset( + self, records: Sequence[DNSRecord], offset: int_ + ) -> int: records_written = 0 for record in records[offset:]: if not self._write_record(record, 0): @@ -390,7 +403,11 @@ def _write_records_from_offset(self, records: Sequence[DNSRecord], offset: int_) return records_written def _has_more_to_add( - self, questions_offset: int_, answer_offset: int_, authority_offset: int_, additional_offset: int_ + self, + questions_offset: int_, + answer_offset: int_, + authority_offset: int_, + additional_offset: int_, ) -> bool: """Check if all questions, answers, authority, and additionals have been written to the packet.""" return ( @@ -441,8 +458,12 @@ def packets(self) -> List[bytes]: questions_written = self._write_questions_from_offset(questions_offset) answers_written = self._write_answers_from_offset(answer_offset) - authorities_written = self._write_records_from_offset(self.authorities, authority_offset) - additionals_written = self._write_records_from_offset(self.additionals, additional_offset) + authorities_written = self._write_records_from_offset( + self.authorities, authority_offset + ) + additionals_written = self._write_records_from_offset( + self.additionals, additional_offset + ) made_progress = bool(self.data) @@ -481,7 +502,7 @@ def packets(self) -> List[bytes]: else: self._insert_short_at_start(self.id) - packets_data.append(b''.join(self.data)) + packets_data.append(b"".join(self.data)) if not made_progress: # Generating an empty packet is not a desirable outcome, but currently diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index 8e0e4bdb..880b7a1b 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import Optional diff --git a/src/zeroconf/_services/__init__.py b/src/zeroconf/_services/__init__.py index cf54d7f0..9812c6f3 100644 --- a/src/zeroconf/_services/__init__.py +++ b/src/zeroconf/_services/__init__.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import enum @@ -35,18 +35,18 @@ class ServiceStateChange(enum.Enum): class ServiceListener: - def add_service(self, zc: 'Zeroconf', type_: str, name: str) -> None: + def add_service(self, zc: "Zeroconf", type_: str, name: str) -> None: raise NotImplementedError() - def remove_service(self, zc: 'Zeroconf', type_: str, name: str) -> None: + def remove_service(self, zc: "Zeroconf", type_: str, name: str) -> None: raise NotImplementedError() - def update_service(self, zc: 'Zeroconf', type_: str, name: str) -> None: + def update_service(self, zc: "Zeroconf", type_: str, name: str) -> None: raise NotImplementedError() class Signal: - __slots__ = ('_handlers',) + __slots__ = ("_handlers",) def __init__(self) -> None: self._handlers: List[Callable[..., None]] = [] @@ -56,20 +56,24 @@ def fire(self, **kwargs: Any) -> None: h(**kwargs) @property - def registration_interface(self) -> 'SignalRegistrationInterface': + def registration_interface(self) -> "SignalRegistrationInterface": return SignalRegistrationInterface(self._handlers) class SignalRegistrationInterface: - __slots__ = ('_handlers',) + __slots__ = ("_handlers",) def __init__(self, handlers: List[Callable[..., None]]) -> None: self._handlers = handlers - def register_handler(self, handler: Callable[..., None]) -> 'SignalRegistrationInterface': + def register_handler( + self, handler: Callable[..., None] + ) -> "SignalRegistrationInterface": self._handlers.append(handler) return self - def unregister_handler(self, handler: Callable[..., None]) -> 'SignalRegistrationInterface': + def unregister_handler( + self, handler: Callable[..., None] + ) -> "SignalRegistrationInterface": self._handlers.remove(handler) return self diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 2ff66074..1f0524f3 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -105,11 +105,22 @@ class _ScheduledPTRQuery: - - __slots__ = ('alias', 'name', 'ttl', 'cancelled', 'expire_time_millis', 'when_millis') + __slots__ = ( + "alias", + "name", + "ttl", + "cancelled", + "expire_time_millis", + "when_millis", + ) def __init__( - self, alias: str, name: str, ttl: int, expire_time_millis: float, when_millis: float + self, + alias: str, + name: str, + ttl: int, + expire_time_millis: float, + when_millis: float, ) -> None: """Create a scheduled query.""" self.alias = alias @@ -144,13 +155,13 @@ def __repr__(self) -> str: ">" ) - def __lt__(self, other: '_ScheduledPTRQuery') -> bool: + def __lt__(self, other: "_ScheduledPTRQuery") -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis < other.when_millis return NotImplemented - def __le__(self, other: '_ScheduledPTRQuery') -> bool: + def __le__(self, other: "_ScheduledPTRQuery") -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis < other.when_millis or self.__eq__(other) @@ -162,13 +173,13 @@ def __eq__(self, other: Any) -> bool: return self.when_millis == other.when_millis return NotImplemented - def __ge__(self, other: '_ScheduledPTRQuery') -> bool: + def __ge__(self, other: "_ScheduledPTRQuery") -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis > other.when_millis or self.__eq__(other) return NotImplemented - def __gt__(self, other: '_ScheduledPTRQuery') -> bool: + def __gt__(self, other: "_ScheduledPTRQuery") -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis > other.when_millis @@ -178,7 +189,7 @@ def __gt__(self, other: '_ScheduledPTRQuery') -> bool: class _DNSPointerOutgoingBucket: """A DNSOutgoing bucket.""" - __slots__ = ('now_millis', 'out', 'bytes') + __slots__ = ("now_millis", "out", "bytes") def __init__(self, now_millis: float, multicast: bool) -> None: """Create a bucket to wrap a DNSOutgoing.""" @@ -186,7 +197,9 @@ def __init__(self, now_millis: float, multicast: bool) -> None: self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast) self.bytes = 0 - def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer]) -> None: + def add( + self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer] + ) -> None: """Add a new set of questions and known answers to the outgoing.""" self.out.add_question(question) for answer in answers: @@ -195,7 +208,9 @@ def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNS def group_ptr_queries_with_known_answers( - now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers + now: float_, + multicast: bool_, + question_with_known_answers: _QuestionWithKnownAnswers, ) -> List[DNSOutgoing]: """Aggregate queries so that as many known answers as possible fit in the same packet without having known answers spill over into the next packet unless the @@ -205,11 +220,15 @@ def group_ptr_queries_with_known_answers( so we try to keep all the known answers in the same packet as the questions. """ - return _group_ptr_queries_with_known_answers(now, multicast, question_with_known_answers) + return _group_ptr_queries_with_known_answers( + now, multicast, question_with_known_answers + ) def _group_ptr_queries_with_known_answers( - now_millis: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers + now_millis: float_, + multicast: bool_, + question_with_known_answers: _QuestionWithKnownAnswers, ) -> List[DNSOutgoing]: """Inner wrapper for group_ptr_queries_with_known_answers.""" # This is the maximum size the query + known answers can be with name compression. @@ -218,7 +237,10 @@ def _group_ptr_queries_with_known_answers( # goal of this algorithm is to quickly bucket the query + known answers without # the overhead of actually constructing the packets. query_by_size: Dict[DNSQuestion, int] = { - question: (question.max_size + sum(answer.max_size_compressed for answer in known_answers)) + question: ( + question.max_size + + sum(answer.max_size_compressed for answer in known_answers) + ) for question, known_answers in question_with_known_answers.items() } max_bucket_size = _MAX_MSG_TYPICAL - _DNS_PACKET_HEADER_LEN @@ -246,7 +268,7 @@ def _group_ptr_queries_with_known_answers( def generate_service_query( - zc: 'Zeroconf', + zc: "Zeroconf", now_millis: float_, types_: Set[str], multicast: bool, @@ -254,7 +276,9 @@ def generate_service_query( ) -> List[DNSOutgoing]: """Generate a service query for sending with zeroconf.send.""" questions_with_known_answers: _QuestionWithKnownAnswers = {} - qu_question = not multicast if question_type is None else question_type is QU_QUESTION + qu_question = ( + not multicast if question_type is None else question_type is QU_QUESTION + ) question_history = zc.question_history cache = zc.cache for type_ in types_: @@ -265,7 +289,9 @@ def generate_service_query( for record in cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) if not record.is_stale(now_millis) } - if not qu_question and question_history.suppresses(question, now_millis, known_answers): + if not qu_question and question_history.suppresses( + question, now_millis, known_answers + ): log.debug("Asking %s was suppressed by the question history", question) continue if TYPE_CHECKING: @@ -276,12 +302,14 @@ def generate_service_query( if not qu_question: question_history.add_question_at_time(question, now_millis, known_answers) - return _group_ptr_queries_with_known_answers(now_millis, multicast, questions_with_known_answers) + return _group_ptr_queries_with_known_answers( + now_millis, multicast, questions_with_known_answers + ) def _on_change_dispatcher( listener: ServiceListener, - zeroconf: 'Zeroconf', + zeroconf: "Zeroconf", service_type: str, name: str, state_change: ServiceStateChange, @@ -290,10 +318,12 @@ def _on_change_dispatcher( getattr(listener, _ON_CHANGE_DISPATCH[state_change])(zeroconf, service_type, name) -def _service_state_changed_from_listener(listener: ServiceListener) -> Callable[..., None]: +def _service_state_changed_from_listener( + listener: ServiceListener, +) -> Callable[..., None]: """Generate a service_state_changed handlers from a listener.""" assert listener is not None - if not hasattr(listener, 'update_service'): + if not hasattr(listener, "update_service"): warnings.warn( "%r has no update_service method. Provide one (it can be empty if you " "don't care about the updates), it'll become mandatory." % (listener,), @@ -310,20 +340,20 @@ class QueryScheduler: """ __slots__ = ( - '_zc', - '_types', - '_addr', - '_port', - '_multicast', - '_first_random_delay_interval', - '_min_time_between_queries_millis', - '_loop', - '_startup_queries_sent', - '_next_scheduled_for_alias', - '_query_heap', - '_next_run', - '_clock_resolution_millis', - '_question_type', + "_zc", + "_types", + "_addr", + "_port", + "_multicast", + "_first_random_delay_interval", + "_min_time_between_queries_millis", + "_loop", + "_startup_queries_sent", + "_next_scheduled_for_alias", + "_query_heap", + "_next_run", + "_clock_resolution_millis", + "_question_type", ) def __init__( @@ -349,7 +379,9 @@ def __init__( self._next_scheduled_for_alias: Dict[str, _ScheduledPTRQuery] = {} self._query_heap: list[_ScheduledPTRQuery] = [] self._next_run: Optional[asyncio.TimerHandle] = None - self._clock_resolution_millis = time.get_clock_info('monotonic').resolution * 1000 + self._clock_resolution_millis = ( + time.get_clock_info("monotonic").resolution * 1000 + ) self._question_type = question_type def start(self, loop: asyncio.AbstractEventLoop) -> None: @@ -362,7 +394,9 @@ def start(self, loop: asyncio.AbstractEventLoop) -> None: also delay the first query of the series by a randomly chosen amount in the range 20-120 ms. """ - start_delay = millis_to_seconds(random.randint(*self._first_random_delay_interval)) + start_delay = millis_to_seconds( + random.randint(*self._first_random_delay_interval) + ) self._loop = loop self._next_run = loop.call_later(start_delay, self._process_startup_queries) @@ -375,7 +409,10 @@ def stop(self) -> None: self._query_heap.clear() def _schedule_ptr_refresh( - self, pointer: DNSPointer, expire_time_millis: float_, refresh_time_millis: float_ + self, + pointer: DNSPointer, + expire_time_millis: float_, + refresh_time_millis: float_, ) -> None: """Schedule a query for a pointer.""" ttl = int(pointer.ttl) if isinstance(pointer.ttl, float) else pointer.ttl @@ -414,7 +451,10 @@ def reschedule_ptr_first_refresh(self, pointer: DNSPointer) -> None: self._schedule_ptr_refresh(pointer, expire_time_millis, refresh_time_millis) def schedule_rescue_query( - self, query: _ScheduledPTRQuery, now_millis: float_, additional_percentage: float_ + self, + query: _ScheduledPTRQuery, + now_millis: float_, + additional_percentage: float_, ) -> None: """Reschedule a query for a pointer at an additional percentage of expiration.""" ttl_millis = query.ttl * 1000 @@ -426,7 +466,11 @@ def schedule_rescue_query( # tried to rescue the record and failed return scheduled_ptr_query = _ScheduledPTRQuery( - query.alias, query.name, query.ttl, query.expire_time_millis, next_query_time + query.alias, + query.name, + query.ttl, + query.expire_time_millis, + next_query_time, ) self._schedule_ptr_query(scheduled_ptr_query) @@ -441,7 +485,9 @@ def _process_startup_queries(self) -> None: now_millis = current_time_millis() # At first we will send STARTUP_QUERIES queries to get the cache populated - self.async_send_ready_queries(self._startup_queries_sent == 0, now_millis, self._types) + self.async_send_ready_queries( + self._startup_queries_sent == 0, now_millis, self._types + ) self._startup_queries_sent += 1 # Once we finish sending the initial queries we will @@ -454,7 +500,9 @@ def _process_startup_queries(self) -> None: ) return - self._next_run = self._loop.call_later(self._startup_queries_sent**2, self._process_startup_queries) + self._next_run = self._loop.call_later( + self._startup_queries_sent**2, self._process_startup_queries + ) def _process_ready_types(self) -> None: """Generate a list of ready types that is due and schedule the next time.""" @@ -495,7 +543,9 @@ def _process_ready_types(self) -> None: schedule_rescue.append(query) for query in schedule_rescue: - self.schedule_rescue_query(query, now_millis, RESCUE_RECORD_RETRY_TTL_PERCENTAGE) + self.schedule_rescue_query( + query, now_millis, RESCUE_RECORD_RETRY_TTL_PERCENTAGE + ) if ready_types: self.async_send_ready_queries(False, now_millis, ready_types) @@ -507,7 +557,9 @@ def _process_ready_types(self) -> None: else: next_when_millis = next_time_millis - self._next_run = self._loop.call_at(millis_to_seconds(next_when_millis), self._process_ready_types) + self._next_run = self._loop.call_at( + millis_to_seconds(next_when_millis), self._process_ready_types + ) def async_send_ready_queries( self, first_request: bool, now_millis: float_, ready_types: Set[str] @@ -517,8 +569,14 @@ def async_send_ready_queries( # https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 since we are # just starting up and we know our cache is likely empty. This ensures # the next outgoing will be sent with the known answers list. - question_type = QU_QUESTION if self._question_type is None and first_request else self._question_type - outs = generate_service_query(self._zc, now_millis, ready_types, self._multicast, question_type) + question_type = ( + QU_QUESTION + if self._question_type is None and first_request + else self._question_type + ) + outs = generate_service_query( + self._zc, now_millis, ready_types, self._multicast, question_type + ) if outs: for out in outs: self._zc.async_send(out, self._addr, self._port) @@ -528,20 +586,20 @@ class _ServiceBrowserBase(RecordUpdateListener): """Base class for ServiceBrowser.""" __slots__ = ( - 'types', - 'zc', - '_cache', - '_loop', - '_pending_handlers', - '_service_state_changed', - 'query_scheduler', - 'done', - '_query_sender_task', + "types", + "zc", + "_cache", + "_loop", + "_pending_handlers", + "_service_state_changed", + "query_scheduler", + "done", + "_query_sender_task", ) def __init__( self, - zc: 'Zeroconf', + zc: "Zeroconf", type_: Union[str, list], handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, listener: Optional[ServiceListener] = None, @@ -567,7 +625,7 @@ def __init__( remove_service() methods called when this browser discovers changes in the services availability. """ - assert handlers or listener, 'You need to specify at least one handler' + assert handlers or listener, "You need to specify at least one handler" self.types: Set[str] = set(type_ if isinstance(type_, list) else [type_]) for check_type_ in self.types: # Will generate BadTypeInNameException on a bad name @@ -591,8 +649,8 @@ def __init__( self.done = False self._query_sender_task: Optional[asyncio.Task] = None - if hasattr(handlers, 'add_service'): - listener = cast('ServiceListener', handlers) + if hasattr(handlers, "add_service"): + listener = cast("ServiceListener", handlers) handlers = None handlers = cast(List[Callable[..., None]], handlers or []) @@ -609,9 +667,13 @@ def _async_start(self) -> None: Must be called by uses of this base class after they have finished setting their properties. """ - self.zc.async_add_listener(self, [DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) for type_ in self.types]) + self.zc.async_add_listener( + self, [DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) for type_ in self.types] + ) # Only start queries after the listener is installed - self._query_sender_task = asyncio.ensure_future(self._async_start_query_sender()) + self._query_sender_task = asyncio.ensure_future( + self._async_start_query_sender() + ) @property def service_state_changed(self) -> SignalRegistrationInterface: @@ -620,7 +682,9 @@ def service_state_changed(self) -> SignalRegistrationInterface: def _names_matching_types(self, names: Iterable[str]) -> List[Tuple[str, str]]: """Return the type and name for records matching the types we are browsing.""" return [ - (type_, name) for name in names for type_ in self.types.intersection(cached_possible_types(name)) + (type_, name) + for name in names + for type_ in self.types.intersection(cached_possible_types(name)) ] def _enqueue_callback( @@ -638,11 +702,16 @@ def _enqueue_callback( state_change is SERVICE_STATE_CHANGE_REMOVED and self._pending_handlers.get(key) is not SERVICE_STATE_CHANGE_ADDED ) - or (state_change is SERVICE_STATE_CHANGE_UPDATED and key not in self._pending_handlers) + or ( + state_change is SERVICE_STATE_CHANGE_UPDATED + and key not in self._pending_handlers + ) ): self._pending_handlers[key] = state_change - def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] + ) -> None: """Callback invoked by Zeroconf when new information arrives. Updates information required by browser in the Zeroconf cache. @@ -660,12 +729,18 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record if TYPE_CHECKING: record = cast(DNSPointer, record) pointer = record - for type_ in self.types.intersection(cached_possible_types(pointer.name)): + for type_ in self.types.intersection( + cached_possible_types(pointer.name) + ): if old_record is None: - self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias) + self._enqueue_callback( + SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias + ) self.query_scheduler.reschedule_ptr_first_refresh(pointer) elif pointer.is_expired(now): - self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias) + self._enqueue_callback( + SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias + ) self.query_scheduler.cancel_ptr_refresh(pointer) else: self.query_scheduler.reschedule_ptr_first_refresh(pointer) @@ -677,7 +752,10 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record if record_type in _ADDRESS_RECORD_TYPES: cache = self._cache - names = {service.name for service in cache.async_entries_with_server(record.name)} + names = { + service.name + for service in cache.async_entries_with_server(record.name) + } # Iterate through the DNSCache and callback any services that use this address for type_, name in self._names_matching_types(names): self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) @@ -699,7 +777,9 @@ def async_update_records_complete(self) -> None: self._fire_service_state_changed_event(pending) self._pending_handlers.clear() - def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], ServiceStateChange]) -> None: + def _fire_service_state_changed_event( + self, event: Tuple[Tuple[str, str], ServiceStateChange] + ) -> None: """Fire a service state changed event. When running with ServiceBrowser, this will happen in the dedicated @@ -721,7 +801,9 @@ def _async_cancel(self) -> None: self.done = True self.query_scheduler.stop() self.zc.async_remove_listener(self) - assert self._query_sender_task is not None, "Attempted to cancel a browser that was not started" + assert ( + self._query_sender_task is not None + ), "Attempted to cancel a browser that was not started" self._query_sender_task.cancel() self._query_sender_task = None @@ -741,7 +823,7 @@ class ServiceBrowser(_ServiceBrowserBase, threading.Thread): def __init__( self, - zc: 'Zeroconf', + zc: "Zeroconf", type_: Union[str, list], handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, listener: Optional[ServiceListener] = None, @@ -754,7 +836,9 @@ def __init__( if not zc.loop.is_running(): raise RuntimeError("The event loop is not running") threading.Thread.__init__(self) - super().__init__(zc, type_, handlers, listener, addr, port, delay, question_type) + super().__init__( + zc, type_, handlers, listener, addr, port, delay, question_type + ) # Add the queue before the listener is installed in _setup # to ensure that events run in the dedicated thread and do # not block the event loop @@ -763,8 +847,8 @@ def __init__( self.start() zc.loop.call_soon_threadsafe(self._async_start) self.name = "zeroconf-ServiceBrowser-{}-{}".format( - '-'.join([type_[:-7] for type_ in self.types]), - getattr(self, 'native_id', self.ident), + "-".join([type_[:-7] for type_ in self.types]), + getattr(self, "native_id", self.ident), ) def cancel(self) -> None: @@ -793,7 +877,7 @@ def async_update_records_complete(self) -> None: self.queue.put(pending) self._pending_handlers.clear() - def __enter__(self) -> 'ServiceBrowser': + def __enter__(self) -> "ServiceBrowser": return self def __exit__( # pylint: disable=useless-return diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 6d68de83..66313afc 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -168,7 +168,7 @@ def __init__( port: Optional[int] = None, weight: int = 0, priority: int = 0, - properties: Union[bytes, Dict] = b'', + properties: Union[bytes, Dict] = b"", server: Optional[str] = None, host_ttl: int = _DNS_HOST_TTL, other_ttl: int = _DNS_OTHER_TTL, @@ -179,11 +179,13 @@ def __init__( ) -> None: # Accept both none, or one, but not both. if addresses is not None and parsed_addresses is not None: - raise TypeError("addresses and parsed_addresses cannot be provided together") + raise TypeError( + "addresses and parsed_addresses cannot be provided together" + ) if not type_.endswith(service_type_name(name, strict=False)): raise BadTypeInNameException self.interface_index = interface_index - self.text = b'' + self.text = b"" self.type = type_ self._name = name self.key = name.lower() @@ -249,7 +251,11 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - if IPADDRESS_SUPPORTS_SCOPE_ID and len(address) == 16 and self.interface_index is not None: + if ( + IPADDRESS_SUPPORTS_SCOPE_ID + and len(address) == 16 + and self.interface_index is not None + ): addr = ip_bytes_and_scope_to_address(address, self.interface_index) else: addr = cached_ip_addresses(address) @@ -293,7 +299,9 @@ def async_clear_cache(self) -> None: self._dns_text_cache = None self._get_address_and_nsec_records_cache = None - async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + async def async_wait( + self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None + ) -> None: """Calling task waits for a given number of milliseconds or until notified.""" if not self._new_records_futures: self._new_records_futures = set() @@ -351,7 +359,10 @@ def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: This means the first address will always be the most recently added address of the given IP version. """ - return [str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] + return [ + str_without_scope_id(addr) + for addr in self._ip_addresses_by_version_value(version.value) + ] def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: """Equivalent to parsed_addresses, with the exception that IPv6 Link-Local @@ -363,27 +374,31 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st This means the first address will always be the most recently added address of the given IP version. """ - return [str(addr) for addr in self._ip_addresses_by_version_value(version.value)] + return [ + str(addr) for addr in self._ip_addresses_by_version_value(version.value) + ] - def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: + def _set_properties( + self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]] + ) -> None: """Sets properties and text of this info from a dictionary""" list_: List[bytes] = [] properties_contain_str = False - result = b'' + result = b"" for key, value in properties.items(): if isinstance(key, str): - key = key.encode('utf-8') + key = key.encode("utf-8") properties_contain_str = True record = key if value is not None: if not isinstance(value, bytes): - value = str(value).encode('utf-8') + value = str(value).encode("utf-8") properties_contain_str = True - record += b'=' + value + record += b"=" + value list_.append(record) for item in list_: - result = b''.join((result, bytes((len(item),)), item)) + result = b"".join((result, bytes((len(item),)), item)) if not properties_contain_str: # If there are no str keys or values, we can use the properties # as-is, without decoding them, otherwise calling @@ -406,7 +421,9 @@ def _set_text(self, text: bytes) -> None: def _generate_decoded_properties(self) -> None: """Generates decoded properties from the properties""" self._decoded_properties = { - k.decode("ascii", "replace"): None if v is None else v.decode("utf-8", "replace") + k.decode("ascii", "replace"): None + if v is None + else v.decode("utf-8", "replace") for k, v in self.properties.items() } @@ -426,7 +443,7 @@ def _unpack_text_into_properties(self) -> None: length = text[index] index += 1 key_value = text[index : index + length] - key_sep_value = key_value.partition(b'=') + key_sep_value = key_value.partition(b"=") key = key_sep_value[0] if key not in properties: properties[key] = key_sep_value[2] or None @@ -439,7 +456,7 @@ def get_name(self) -> str: return self._name[: len(self._name) - len(self.type) - 1] def _get_ip_addresses_from_cache_lifo( - self, zc: 'Zeroconf', now: float_, type: int_ + self, zc: "Zeroconf", now: float_, type: int_ ) -> List[Union[IPv4Address, IPv6Address]]: """Set IPv6 addresses from the cache.""" address_list: List[Union[IPv4Address, IPv6Address]] = [] @@ -452,25 +469,33 @@ def _get_ip_addresses_from_cache_lifo( address_list.reverse() # Reverse to get LIFO order return address_list - def _set_ipv6_addresses_from_cache(self, zc: 'Zeroconf', now: float_) -> None: + def _set_ipv6_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: """Set IPv6 addresses from the cache.""" if TYPE_CHECKING: self._ipv6_addresses = cast( - "List[IPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) + "List[IPv6Address]", + self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA), ) else: - self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) + self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo( + zc, now, _TYPE_AAAA + ) - def _set_ipv4_addresses_from_cache(self, zc: 'Zeroconf', now: float_) -> None: + def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: """Set IPv4 addresses from the cache.""" if TYPE_CHECKING: self._ipv4_addresses = cast( - "List[IPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) + "List[IPv4Address]", + self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A), ) else: - self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) + self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo( + zc, now, _TYPE_A + ) - def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] + ) -> None: """Updates service information from a DNS record. This method will be run in the event loop. @@ -482,7 +507,9 @@ def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[Record if updated and new_records_futures: _resolve_all_futures_to_none(new_records_futures) - def _process_record_threadsafe(self, zc: 'Zeroconf', record: DNSRecord, now: float_) -> bool: + def _process_record_threadsafe( + self, zc: "Zeroconf", record: DNSRecord, now: float_ + ) -> bool: """Thread safe record updating. Returns True if a new record was added. @@ -664,11 +691,15 @@ def _dns_text(self, override_ttl: Optional[int]) -> DNSText: self._dns_text_cache = record return record - def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) -> DNSNsec: + def dns_nsec( + self, missing_types: List[int], override_ttl: Optional[int] = None + ) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return self._dns_nsec(missing_types, override_ttl) - def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DNSNsec: + def _dns_nsec( + self, missing_types: List[int], override_ttl: Optional[int] + ) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -680,11 +711,15 @@ def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DN 0.0, ) - def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Set[DNSRecord]: + def get_address_and_nsec_records( + self, override_ttl: Optional[int] = None + ) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" return self._get_address_and_nsec_records(override_ttl) - def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSRecord]: + def _get_address_and_nsec_records( + self, override_ttl: Optional[int] + ) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" cacheable = override_ttl is None if self._get_address_and_nsec_records_cache is not None and cacheable: @@ -695,19 +730,26 @@ def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSR missing_types.discard(dns_address.type) records.add(dns_address) if missing_types: - assert self.server is not None, "Service server must be set for NSEC record." + assert ( + self.server is not None + ), "Service server must be set for NSEC record." records.add(self._dns_nsec(list(missing_types), override_ttl)) if cacheable: self._get_address_and_nsec_records_cache = records return records - def _get_address_records_from_cache_by_type(self, zc: 'Zeroconf', _type: int_) -> List[DNSAddress]: + def _get_address_records_from_cache_by_type( + self, zc: "Zeroconf", _type: int_ + ) -> List[DNSAddress]: """Get the addresses from the cache.""" if self.server_key is None: return [] cache = zc.cache if TYPE_CHECKING: - records = cast("List[DNSAddress]", cache.get_all_by_details(self.server_key, _type, _CLASS_IN)) + records = cast( + "List[DNSAddress]", + cache.get_all_by_details(self.server_key, _type, _CLASS_IN), + ) else: records = cache.get_all_by_details(self.server_key, _type, _CLASS_IN) return records @@ -721,14 +763,14 @@ def set_server_if_missing(self) -> None: self.server = self._name self.server_key = self.key - def load_from_cache(self, zc: 'Zeroconf', now: Optional[float_] = None) -> bool: + def load_from_cache(self, zc: "Zeroconf", now: Optional[float_] = None) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. """ return self._load_from_cache(zc, now or current_time_millis()) - def _load_from_cache(self, zc: 'Zeroconf', now: float_) -> bool: + def _load_from_cache(self, zc: "Zeroconf", now: float_) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. @@ -754,11 +796,13 @@ def _load_from_cache(self, zc: 'Zeroconf', now: float_) -> bool: @property def _is_complete(self) -> bool: """The ServiceInfo has all expected properties.""" - return bool(self.text is not None and (self._ipv4_addresses or self._ipv6_addresses)) + return bool( + self.text is not None and (self._ipv4_addresses or self._ipv6_addresses) + ) def request( self, - zc: 'Zeroconf', + zc: "Zeroconf", timeout: float, question_type: Optional[DNSQuestionType] = None, addr: Optional[str] = None, @@ -782,7 +826,9 @@ def request( raise RuntimeError("Use AsyncServiceInfo.async_request from the event loop") return bool( run_coro_with_timeout( - self.async_request(zc, timeout, question_type, addr, port), zc.loop, timeout + self.async_request(zc, timeout, question_type, addr, port), + zc.loop, + timeout, ) ) @@ -794,7 +840,7 @@ def _get_random_delay(self) -> int_: async def async_request( self, - zc: 'Zeroconf', + zc: "Zeroconf", timeout: float, question_type: Optional[DNSQuestionType] = None, addr: Optional[str] = None, @@ -837,7 +883,9 @@ async def async_request( if last <= now: return False if next_ <= now: - this_question_type = question_type or QU_QUESTION if first_request else QM_QUESTION + this_question_type = ( + question_type or QU_QUESTION if first_request else QM_QUESTION + ) out = self._generate_request_query(zc, now, this_question_type) first_request = False if out.questions: @@ -849,7 +897,10 @@ async def async_request( zc.async_send(out, addr, port) next_ = now + delay next_ += self._get_random_delay() - if this_question_type is QM_QUESTION and delay < _DUPLICATE_QUESTION_INTERVAL: + if ( + this_question_type is QM_QUESTION + and delay < _DUPLICATE_QUESTION_INTERVAL + ): # If we just asked a QM question, we need to # wait at least the duplicate question interval # before asking another QM question otherwise @@ -878,7 +929,9 @@ def _add_question_with_known_answers( ) -> None: """Add a question with known answers if its not suppressed.""" known_answers = { - answer for answer in cache.get_all_by_details(name, type_, class_) if not answer.is_stale(now) + answer + for answer in cache.get_all_by_details(name, type_, class_) + if not answer.is_stale(now) } if skip_if_known_answers and known_answers: return @@ -894,7 +947,7 @@ def _add_question_with_known_answers( out.add_answer_at_time(answer, now) def _generate_request_query( - self, zc: 'Zeroconf', now: float_, question_type: DNSQuestionType + self, zc: "Zeroconf", now: float_, question_type: DNSQuestionType ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) @@ -919,20 +972,20 @@ def _generate_request_query( def __repr__(self) -> str: """String representation""" - return '{}({})'.format( + return "{}({})".format( type(self).__name__, - ', '.join( - f'{name}={getattr(self, name)!r}' + ", ".join( + f"{name}={getattr(self, name)!r}" for name in ( - 'type', - 'name', - 'addresses', - 'port', - 'weight', - 'priority', - 'server', - 'properties', - 'interface_index', + "type", + "name", + "addresses", + "port", + "weight", + "priority", + "server", + "properties", + "interface_index", ) ), ) diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 261e8e9c..2d4f3f8e 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import Dict, List, Optional, Union @@ -79,7 +79,9 @@ def async_get_infos_server(self, server: str) -> List[ServiceInfo]: """Return all ServiceInfo matching server.""" return self._async_get_by_index(self.servers, server) - def _async_get_by_index(self, records: Dict[str, List], key: _str) -> List[ServiceInfo]: + def _async_get_by_index( + self, records: Dict[str, List], key: _str + ) -> List[ServiceInfo]: """Return all ServiceInfo matching the index.""" record_list = records.get(key) if record_list is None: diff --git a/src/zeroconf/_services/types.py b/src/zeroconf/_services/types.py index 70db2d60..9793ae48 100644 --- a/src/zeroconf/_services/types.py +++ b/src/zeroconf/_services/types.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import time @@ -69,7 +69,9 @@ def find( """ local_zc = zc or Zeroconf(interfaces=interfaces, ip_version=ip_version) listener = cls() - browser = ServiceBrowser(local_zc, _SERVICE_TYPE_ENUMERATION_NAME, listener=listener) + browser = ServiceBrowser( + local_zc, _SERVICE_TYPE_ENUMERATION_NAME, listener=listener + ) # wait for responses time.sleep(timeout) diff --git a/src/zeroconf/_transport.py b/src/zeroconf/_transport.py index c37af2ef..f28c0029 100644 --- a/src/zeroconf/_transport.py +++ b/src/zeroconf/_transport.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -29,11 +29,11 @@ class _WrappedTransport: """A wrapper for transports.""" __slots__ = ( - 'transport', - 'is_ipv6', - 'sock', - 'fileno', - 'sock_name', + "transport", + "is_ipv6", + "sock", + "fileno", + "sock_name", ) def __init__( @@ -57,7 +57,7 @@ def __init__( def make_wrapped_transport(transport: asyncio.DatagramTransport) -> _WrappedTransport: """Make a wrapped transport.""" - sock: socket.socket = transport.get_extra_info('socket') + sock: socket.socket = transport.get_extra_info("socket") return _WrappedTransport( transport=transport, is_ipv6=sock.family == socket.AF_INET6, diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index 42fa8285..eda89df4 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from typing import TYPE_CHECKING, List @@ -40,16 +40,20 @@ class RecordUpdateListener: """ def update_record( # pylint: disable=no-self-use - self, zc: 'Zeroconf', now: float, record: DNSRecord + self, zc: "Zeroconf", now: float, record: DNSRecord ) -> None: """Update a single record. This method is deprecated and will be removed in a future version. update_records should be implemented instead. """ - raise RuntimeError("update_record is deprecated and will be removed in a future version.") + raise RuntimeError( + "update_record is deprecated and will be removed in a future version." + ) - def async_update_records(self, zc: 'Zeroconf', now: float_, records: List[RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] + ) -> None: """Update multiple records in one shot. All records that are received in a single packet are passed diff --git a/src/zeroconf/_utils/__init__.py b/src/zeroconf/_utils/__init__.py index 2ef4b15b..30920c6a 100644 --- a/src/zeroconf/_utils/__init__.py +++ b/src/zeroconf/_utils/__init__.py @@ -1,21 +1,21 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index 358ef37e..c2e66277 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import asyncio @@ -29,7 +29,7 @@ if sys.version_info[:2] < (3, 11): from async_timeout import timeout as asyncio_timeout else: - from asyncio import timeout as asyncio_timeout + from asyncio import timeout as asyncio_timeout # type: ignore[attr-defined] from .._exceptions import EventLoopBlocked from ..const import _LOADED_SYSTEM_TIMEOUT @@ -60,7 +60,9 @@ async def wait_for_future_set_or_timeout( """Wait for a future or timeout (in milliseconds).""" future = loop.create_future() future_set.add(future) - handle = loop.call_later(millis_to_seconds(timeout), _set_future_none_if_not_done, future) + handle = loop.call_later( + millis_to_seconds(timeout), _set_future_none_if_not_done, future + ) try: await future finally: @@ -98,7 +100,9 @@ async def await_awaitable(aw: Awaitable) -> None: await task -def run_coro_with_timeout(aw: Coroutine, loop: asyncio.AbstractEventLoop, timeout: float) -> Any: +def run_coro_with_timeout( + aw: Coroutine, loop: asyncio.AbstractEventLoop, timeout: float +) -> Any: """Run a coroutine with a timeout. The timeout should only be used as a safeguard to prevent @@ -120,13 +124,15 @@ def run_coro_with_timeout(aw: Coroutine, loop: asyncio.AbstractEventLoop, timeou def shutdown_loop(loop: asyncio.AbstractEventLoop) -> None: """Wait for pending tasks and stop an event loop.""" pending_tasks = set( - asyncio.run_coroutine_threadsafe(_async_get_all_tasks(loop), loop).result(_GET_ALL_TASKS_TIMEOUT) + asyncio.run_coroutine_threadsafe(_async_get_all_tasks(loop), loop).result( + _GET_ALL_TASKS_TIMEOUT + ) ) pending_tasks -= {task for task in pending_tasks if task.done()} if pending_tasks: - asyncio.run_coroutine_threadsafe(_wait_for_loop_tasks(pending_tasks), loop).result( - _WAIT_FOR_LOOP_TASKS_TIMEOUT - ) + asyncio.run_coroutine_threadsafe( + _wait_for_loop_tasks(pending_tasks), loop + ).result(_WAIT_FOR_LOOP_TASKS_TIMEOUT) loop.call_soon_threadsafe(loop.stop) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index ba137955..d4ba708e 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -1,24 +1,25 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ + import sys from functools import lru_cache from ipaddress import AddressValueError, IPv4Address, IPv6Address, NetmaskValueError @@ -33,7 +34,6 @@ class ZeroconfIPv4Address(IPv4Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified") def __init__(self, *args: Any, **kwargs: Any) -> None: @@ -59,7 +59,6 @@ def is_unspecified(self) -> bool: class ZeroconfIPv6Address(IPv6Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified") def __init__(self, *args: Any, **kwargs: Any) -> None: @@ -85,7 +84,9 @@ def is_unspecified(self) -> bool: @lru_cache(maxsize=512) -def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4Address, IPv6Address]]: +def _cached_ip_addresses( + address: Union[str, bytes, int], +) -> Optional[Union[IPv4Address, IPv6Address]]: """Cache IP addresses.""" try: return ZeroconfIPv4Address(address) @@ -102,19 +103,25 @@ def _cached_ip_addresses(address: Union[str, bytes, int]) -> Optional[Union[IPv4 cached_ip_addresses = cached_ip_addresses_wrapper -def get_ip_address_object_from_record(record: DNSAddress) -> Optional[Union[IPv4Address, IPv6Address]]: +def get_ip_address_object_from_record( + record: DNSAddress, +) -> Optional[Union[IPv4Address, IPv6Address]]: """Get the IP address object from the record.""" if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id: return ip_bytes_and_scope_to_address(record.address, record.scope_id) return cached_ip_addresses_wrapper(record.address) -def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: +def ip_bytes_and_scope_to_address( + address: bytes_, scope: int_ +) -> Optional[Union[IPv4Address, IPv6Address]]: """Convert the bytes and scope to an IP address object.""" base_address = cached_ip_addresses_wrapper(address) if base_address is not None and base_address.is_link_local: # Avoid expensive __format__ call by using PyUnicode_Join - return cached_ip_addresses_wrapper("".join((str(base_address), "%", str(scope)))) + return cached_ip_addresses_wrapper( + "".join((str(base_address), "%", str(scope))) + ) return base_address @@ -122,7 +129,7 @@ def str_without_scope_id(addr: Union[IPv4Address, IPv6Address]) -> str: """Return the string representation of the address without the scope id.""" if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: address_str = str(addr) - return address_str.partition('%')[0] + return address_str.partition("%")[0] return str(addr) diff --git a/src/zeroconf/_utils/name.py b/src/zeroconf/_utils/name.py index adccb3e5..3f923cfd 100644 --- a/src/zeroconf/_utils/name.py +++ b/src/zeroconf/_utils/name.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ from functools import lru_cache @@ -83,7 +83,7 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis raise BadTypeInNameException("Full name (%s) must be > 256 bytes" % type_) if type_.endswith((_TCP_PROTOCOL_LOCAL_TRAILER, _NONTCP_PROTOCOL_LOCAL_TRAILER)): - remaining = type_[: -len(_TCP_PROTOCOL_LOCAL_TRAILER)].split('.') + remaining = type_[: -len(_TCP_PROTOCOL_LOCAL_TRAILER)].split(".") trailer = type_[-len(_TCP_PROTOCOL_LOCAL_TRAILER) :] has_protocol = True elif strict: @@ -92,7 +92,7 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis % (type_, _TCP_PROTOCOL_LOCAL_TRAILER, _NONTCP_PROTOCOL_LOCAL_TRAILER) ) elif type_.endswith(_LOCAL_TRAILER): - remaining = type_[: -len(_LOCAL_TRAILER)].split('.') + remaining = type_[: -len(_LOCAL_TRAILER)].split(".") trailer = type_[-len(_LOCAL_TRAILER) + 1 :] has_protocol = False else: @@ -106,56 +106,67 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis if len(remaining) == 1 and len(remaining[0]) == 0: raise BadTypeInNameException("Type '%s' must not start with '.'" % type_) - if service_name[0] != '_': - raise BadTypeInNameException("Service name (%s) must start with '_'" % service_name) + if service_name[0] != "_": + raise BadTypeInNameException( + "Service name (%s) must start with '_'" % service_name + ) test_service_name = service_name[1:] if strict and len(test_service_name) > 15: # https://datatracker.ietf.org/doc/html/rfc6763#section-7.2 - raise BadTypeInNameException("Service name (%s) must be <= 15 bytes" % test_service_name) + raise BadTypeInNameException( + "Service name (%s) must be <= 15 bytes" % test_service_name + ) - if '--' in test_service_name: - raise BadTypeInNameException("Service name (%s) must not contain '--'" % test_service_name) + if "--" in test_service_name: + raise BadTypeInNameException( + "Service name (%s) must not contain '--'" % test_service_name + ) - if '-' in (test_service_name[0], test_service_name[-1]): + if "-" in (test_service_name[0], test_service_name[-1]): raise BadTypeInNameException( "Service name (%s) may not start or end with '-'" % test_service_name ) if not _HAS_A_TO_Z.search(test_service_name): raise BadTypeInNameException( - "Service name (%s) must contain at least one letter (eg: 'A-Z')" % test_service_name + "Service name (%s) must contain at least one letter (eg: 'A-Z')" + % test_service_name ) allowed_characters_re = ( - _HAS_ONLY_A_TO_Z_NUM_HYPHEN if strict else _HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE + _HAS_ONLY_A_TO_Z_NUM_HYPHEN + if strict + else _HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE ) if not allowed_characters_re.search(test_service_name): raise BadTypeInNameException( "Service name (%s) must contain only these characters: " - "A-Z, a-z, 0-9, hyphen ('-')%s" % (test_service_name, "" if strict else ", underscore ('_')") + "A-Z, a-z, 0-9, hyphen ('-')%s" + % (test_service_name, "" if strict else ", underscore ('_')") ) else: - service_name = '' + service_name = "" - if remaining and remaining[-1] == '_sub': + if remaining and remaining[-1] == "_sub": remaining.pop() if len(remaining) == 0 or len(remaining[0]) == 0: raise BadTypeInNameException("_sub requires a subtype name") if len(remaining) > 1: - remaining = ['.'.join(remaining)] + remaining = [".".join(remaining)] if remaining: - length = len(remaining[0].encode('utf-8')) + length = len(remaining[0].encode("utf-8")) if length > 63: raise BadTypeInNameException("Too long: '%s'" % remaining[0]) if _HAS_ASCII_CONTROL_CHARS.search(remaining[0]): raise BadTypeInNameException( - "Ascii control character 0x00-0x1F and 0x7F illegal in '%s'" % remaining[0] + "Ascii control character 0x00-0x1F and 0x7F illegal in '%s'" + % remaining[0] ) return service_name + trailer @@ -163,14 +174,14 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis def possible_types(name: str) -> Set[str]: """Build a set of all possible types from a fully qualified name.""" - labels = name.split('.') + labels = name.split(".") label_count = len(labels) types = set() for count in range(label_count): parts = labels[label_count - count - 4 :] - if not parts[0].startswith('_'): + if not parts[0].startswith("_"): break - types.add('.'.join(parts)) + types.add(".".join(parts)) return types diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index cc4754ab..fbac9fe7 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import enum @@ -40,7 +40,9 @@ class InterfaceChoice(enum.Enum): All = 2 -InterfacesType = Union[Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], InterfaceChoice] +InterfacesType = Union[ + Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], InterfaceChoice +] @enum.unique @@ -65,37 +67,59 @@ def _is_v6_address(addr: bytes) -> bool: def _encode_address(address: str) -> bytes: - is_ipv6 = ':' in address + is_ipv6 = ":" in address address_family = socket.AF_INET6 if is_ipv6 else socket.AF_INET return socket.inet_pton(address_family, address) def get_all_addresses() -> List[str]: - return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) + return list( + { + addr.ip + for iface in ifaddr.get_adapters() + for addr in iface.ips + if addr.is_IPv4 + } + ) def get_all_addresses_v6() -> List[Tuple[Tuple[str, int, int], int]]: # IPv6 multicast uses positive indexes for interfaces # TODO: What about multi-address interfaces? return list( - {(addr.ip, iface.index) for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv6} + { + (addr.ip, iface.index) + for iface in ifaddr.get_adapters() + for addr in iface.ips + if addr.is_IPv6 + } ) -def ip6_to_address_and_index(adapters: List[Any], ip: str) -> Tuple[Tuple[str, int, int], int]: - if '%' in ip: - ip = ip[: ip.index('%')] # Strip scope_id. +def ip6_to_address_and_index( + adapters: List[Any], ip: str +) -> Tuple[Tuple[str, int, int], int]: + if "%" in ip: + ip = ip[: ip.index("%")] # Strip scope_id. ipaddr = ipaddress.ip_address(ip) for adapter in adapters: for adapter_ip in adapter.ips: # IPv6 addresses are represented as tuples - if isinstance(adapter_ip.ip, tuple) and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr: - return (cast(Tuple[str, int, int], adapter_ip.ip), cast(int, adapter.index)) + if ( + isinstance(adapter_ip.ip, tuple) + and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr + ): + return ( + cast(Tuple[str, int, int], adapter_ip.ip), + cast(int, adapter.index), + ) - raise RuntimeError('No adapter found for IP address %s' % ip) + raise RuntimeError("No adapter found for IP address %s" % ip) -def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str, int, int]: +def interface_index_to_ip6_address( + adapters: List[Any], index: int +) -> Tuple[str, int, int]: for adapter in adapters: if adapter.index == index: for adapter_ip in adapter.ips: @@ -103,11 +127,11 @@ def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str if isinstance(adapter_ip.ip, tuple): return cast(Tuple[str, int, int], adapter_ip.ip) - raise RuntimeError('No adapter found for index %s' % index) + raise RuntimeError("No adapter found for index %s" % index) def ip6_addresses_to_indexes( - interfaces: Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]] + interfaces: Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], ) -> List[Tuple[Tuple[str, int, int], int]]: """Convert IPv6 interface addresses to interface indexes. @@ -141,9 +165,9 @@ def normalize_interface_choice( if choice is InterfaceChoice.Default: if ip_version != IPVersion.V4Only: # IPv6 multicast uses interface 0 to mean the default - result.append((('', 0, 0), 0)) + result.append((("", 0, 0), 0)) if ip_version != IPVersion.V6Only: - result.append('0.0.0.0') + result.append("0.0.0.0") elif choice is InterfaceChoice.All: if ip_version != IPVersion.V4Only: result.extend(get_all_addresses_v6()) @@ -151,11 +175,16 @@ def normalize_interface_choice( result.extend(get_all_addresses()) if not result: raise RuntimeError( - 'No interfaces to listen on, check that any interfaces have IP version %s' % ip_version + "No interfaces to listen on, check that any interfaces have IP version %s" + % ip_version ) elif isinstance(choice, list): # First, take IPv4 addresses. - result = [i for i in choice if isinstance(i, str) and ipaddress.ip_address(i).version == 4] + result = [ + i + for i in choice + if isinstance(i, str) and ipaddress.ip_address(i).version == 4 + ] # Unlike IP_ADD_MEMBERSHIP, IPV6_JOIN_GROUP requires interface indexes. result += ip6_addresses_to_indexes(choice) else: @@ -168,7 +197,9 @@ def disable_ipv6_only_or_raise(s: socket.socket) -> None: try: s.setsockopt(_IPPROTO_IPV6, socket.IPV6_V6ONLY, False) except OSError: - log.error('Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6') + log.error( + "Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6" + ) raise @@ -181,7 +212,7 @@ def set_so_reuseport_if_available(s: socket.socket) -> None: # versions of Python have SO_REUSEPORT available. # Catch OSError and socket.error for kernel versions <3.9 because lacking # SO_REUSEPORT support. - if not hasattr(socket, 'SO_REUSEPORT'): + if not hasattr(socket, "SO_REUSEPORT"): return try: @@ -192,19 +223,23 @@ def set_so_reuseport_if_available(s: socket.socket) -> None: def set_mdns_port_socket_options_for_ip_version( - s: socket.socket, bind_addr: Union[Tuple[str], Tuple[str, int, int]], ip_version: IPVersion + s: socket.socket, + bind_addr: Union[Tuple[str], Tuple[str, int, int]], + ip_version: IPVersion, ) -> None: """Set ttl/hops and loop for mdns port.""" if ip_version != IPVersion.V6Only: - ttl = struct.pack(b'B', 255) - loop = struct.pack(b'B', 1) + ttl = struct.pack(b"B", 255) + loop = struct.pack(b"B", 1) # OpenBSD needs the ttl and loop values for the IP_MULTICAST_TTL and # IP_MULTICAST_LOOP socket options as an unsigned char. try: s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, loop) except OSError as e: - if bind_addr[0] != '' or get_errno(e) != errno.EINVAL: # Fails to set on MacOS + if ( + bind_addr[0] != "" or get_errno(e) != errno.EINVAL + ): # Fails to set on MacOS raise if ip_version != IPVersion.V4Only: @@ -220,13 +255,15 @@ def new_socket( apple_p2p: bool = False, ) -> Optional[socket.socket]: log.debug( - 'Creating new socket with port %s, ip_version %s, apple_p2p %s and bind_addr %r', + "Creating new socket with port %s, ip_version %s, apple_p2p %s and bind_addr %r", port, ip_version, apple_p2p, bind_addr, ) - socket_family = socket.AF_INET if ip_version == IPVersion.V4Only else socket.AF_INET6 + socket_family = ( + socket.AF_INET if ip_version == IPVersion.V4Only else socket.AF_INET6 + ) s = socket.socket(socket_family, socket.SOCK_DGRAM) if ip_version == IPVersion.All: @@ -249,12 +286,13 @@ def new_socket( except OSError as ex: if ex.errno == errno.EADDRNOTAVAIL: log.warning( - 'Address not available when binding to %s, ' 'it is expected to happen on some systems', + "Address not available when binding to %s, " + "it is expected to happen on some systems", bind_tup, ) return None raise - log.debug('Created socket %s', s) + log.debug("Created socket %s", s) return s @@ -265,57 +303,66 @@ def add_multicast_member( # This is based on assumptions in normalize_interface_choice is_v6 = isinstance(interface, tuple) err_einval = {errno.EINVAL} - if sys.platform == 'win32': + if sys.platform == "win32": # No WSAEINVAL definition in typeshed err_einval |= {cast(Any, errno).WSAEINVAL} # pylint: disable=no-member - log.debug('Adding %r (socket %d) to multicast group', interface, listen_socket.fileno()) + log.debug( + "Adding %r (socket %d) to multicast group", interface, listen_socket.fileno() + ) try: if is_v6: try: mdns_addr6_bytes = socket.inet_pton(socket.AF_INET6, _MDNS_ADDR6) except OSError: log.info( - 'Unable to translate IPv6 address when adding %s to multicast group, ' - 'this can happen if IPv6 is disabled on the system', + "Unable to translate IPv6 address when adding %s to multicast group, " + "this can happen if IPv6 is disabled on the system", interface, ) return False - iface_bin = struct.pack('@I', cast(int, interface[1])) + iface_bin = struct.pack("@I", cast(int, interface[1])) _value = mdns_addr6_bytes + iface_bin listen_socket.setsockopt(_IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, _value) else: - _value = socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(cast(str, interface)) - listen_socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, _value) + _value = socket.inet_aton(_MDNS_ADDR) + socket.inet_aton( + cast(str, interface) + ) + listen_socket.setsockopt( + socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, _value + ) except OSError as e: _errno = get_errno(e) if _errno == errno.EADDRINUSE: log.info( - 'Address in use when adding %s to multicast group, ' - 'it is expected to happen on some systems', + "Address in use when adding %s to multicast group, " + "it is expected to happen on some systems", interface, ) return False if _errno == errno.EADDRNOTAVAIL: log.info( - 'Address not available when adding %s to multicast ' - 'group, it is expected to happen on some systems', + "Address not available when adding %s to multicast " + "group, it is expected to happen on some systems", interface, ) return False if _errno in err_einval: - log.info('Interface of %s does not support multicast, ' 'it is expected in WSL', interface) + log.info( + "Interface of %s does not support multicast, " "it is expected in WSL", + interface, + ) return False if _errno == errno.ENOPROTOOPT: log.info( - 'Failed to set socket option on %s, this can happen if ' - 'the network adapter is in a disconnected state', + "Failed to set socket option on %s, this can happen if " + "the network adapter is in a disconnected state", interface, ) return False if is_v6 and _errno == errno.ENODEV: log.info( - 'Address in use when adding %s to multicast group, ' - 'it is expected to happen when the device does not have ipv6', + "Address in use when adding %s to multicast group, " + "it is expected to happen when the device does not have ipv6", interface, ) return False @@ -331,17 +378,23 @@ def new_respond_socket( respond_socket = new_socket( ip_version=(IPVersion.V6Only if is_v6 else IPVersion.V4Only), apple_p2p=apple_p2p, - bind_addr=cast(Tuple[Tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), + bind_addr=cast(Tuple[Tuple[str, int, int], int], interface)[0] + if is_v6 + else (cast(str, interface),), ) if not respond_socket: return None - log.debug('Configuring socket %s with multicast interface %s', respond_socket, interface) + log.debug( + "Configuring socket %s with multicast interface %s", respond_socket, interface + ) if is_v6: - iface_bin = struct.pack('@I', cast(int, interface[1])) + iface_bin = struct.pack("@I", cast(int, interface[1])) respond_socket.setsockopt(_IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_bin) else: respond_socket.setsockopt( - socket.IPPROTO_IP, socket.IP_MULTICAST_IF, socket.inet_aton(cast(str, interface)) + socket.IPPROTO_IP, + socket.IP_MULTICAST_IF, + socket.inet_aton(cast(str, interface)), ) return respond_socket @@ -355,7 +408,9 @@ def create_sockets( if unicast: listen_socket = None else: - listen_socket = new_socket(ip_version=ip_version, apple_p2p=apple_p2p, bind_addr=('',)) + listen_socket = new_socket( + ip_version=ip_version, apple_p2p=apple_p2p, bind_addr=("",) + ) normalized_interfaces = normalize_interface_choice(interfaces, ip_version) @@ -406,10 +461,14 @@ def autodetect_ip_version(interfaces: InterfacesType) -> IPVersion: """Auto detect the IP version when it is not provided.""" if isinstance(interfaces, list): has_v6 = any( - isinstance(i, int) or (isinstance(i, str) and ipaddress.ip_address(i).version == 6) + isinstance(i, int) + or (isinstance(i, str) and ipaddress.ip_address(i).version == 6) + for i in interfaces + ) + has_v4 = any( + isinstance(i, str) and ipaddress.ip_address(i).version == 4 for i in interfaces ) - has_v4 = any(isinstance(i, str) and ipaddress.ip_address(i).version == 4 for i in interfaces) if has_v4 and has_v6: return IPVersion.All if has_v6: diff --git a/src/zeroconf/_utils/time.py b/src/zeroconf/_utils/time.py index 600d9028..2ed8ca92 100644 --- a/src/zeroconf/_utils/time.py +++ b/src/zeroconf/_utils/time.py @@ -1,26 +1,25 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ - import time _float = float diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index b2daeb10..c2a51f94 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -1,24 +1,25 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ + import asyncio import contextlib from types import TracebackType # noqa # used in type hints @@ -62,7 +63,7 @@ class AsyncServiceBrowser(_ServiceBrowserBase): def __init__( self, - zeroconf: 'Zeroconf', + zeroconf: "Zeroconf", type_: Union[str, list], handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, listener: Optional[ServiceListener] = None, @@ -71,14 +72,16 @@ def __init__( delay: int = _BROWSER_TIME, question_type: Optional[DNSQuestionType] = None, ) -> None: - super().__init__(zeroconf, type_, handlers, listener, addr, port, delay, question_type) + super().__init__( + zeroconf, type_, handlers, listener, addr, port, delay, question_type + ) self._async_start() async def async_cancel(self) -> None: """Cancel the browser.""" self._async_cancel() - async def __aenter__(self) -> 'AsyncServiceBrowser': + async def __aenter__(self) -> "AsyncServiceBrowser": return self async def __aexit__( @@ -97,7 +100,7 @@ class AsyncZeroconfServiceTypes(ZeroconfServiceTypes): @classmethod async def async_find( cls, - aiozc: Optional['AsyncZeroconf'] = None, + aiozc: Optional["AsyncZeroconf"] = None, timeout: Union[int, float] = 5, interfaces: InterfacesType = InterfaceChoice.All, ip_version: Optional[IPVersion] = None, @@ -231,7 +234,11 @@ async def async_close(self) -> None: await self.zeroconf._async_close() # pylint: disable=protected-access async def async_get_service_info( - self, type_: str, name: str, timeout: int = 3000, question_type: Optional[DNSQuestionType] = None + self, + type_: str, + name: str, + timeout: int = 3000, + question_type: Optional[DNSQuestionType] = None, ) -> Optional[AsyncServiceInfo]: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, @@ -242,14 +249,20 @@ async def async_get_service_info( :param timeout: milliseconds to wait for a response :param question_type: The type of questions to ask (DNSQuestionType.QM or DNSQuestionType.QU) """ - return await self.zeroconf.async_get_service_info(type_, name, timeout, question_type) + return await self.zeroconf.async_get_service_info( + type_, name, timeout, question_type + ) - async def async_add_service_listener(self, type_: str, listener: ServiceListener) -> None: + async def async_add_service_listener( + self, type_: str, listener: ServiceListener + ) -> None: """Adds a listener for a particular service type. This object will then have its add_service and remove_service methods called when services of that type become available and unavailable.""" await self.async_remove_service_listener(listener) - self.async_browsers[listener] = AsyncServiceBrowser(self.zeroconf, type_, listener) + self.async_browsers[listener] = AsyncServiceBrowser( + self.zeroconf, type_, listener + ) async def async_remove_service_listener(self, listener: ServiceListener) -> None: """Removes a listener from the set that is currently listening.""" @@ -260,10 +273,13 @@ async def async_remove_service_listener(self, listener: ServiceListener) -> None async def async_remove_all_service_listeners(self) -> None: """Removes a listener from the set that is currently listening.""" await asyncio.gather( - *(self.async_remove_service_listener(listener) for listener in list(self.async_browsers)) + *( + self.async_remove_service_listener(listener) + for listener in list(self.async_browsers) + ) ) - async def __aenter__(self) -> 'AsyncZeroconf': + async def __aenter__(self) -> "AsyncZeroconf": return self async def __aexit__( diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index 73c60d3b..6c64e144 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -1,23 +1,23 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ import re @@ -31,7 +31,9 @@ _LISTENER_TIME = 200 # ms _BROWSER_TIME = 10000 # ms _DUPLICATE_PACKET_SUPPRESSION_INTERVAL = 1000 # ms -_DUPLICATE_QUESTION_INTERVAL = 999 # ms # Must be 1ms less than _DUPLICATE_PACKET_SUPPRESSION_INTERVAL +_DUPLICATE_QUESTION_INTERVAL = ( + 999 # ms # Must be 1ms less than _DUPLICATE_PACKET_SUPPRESSION_INTERVAL +) _CACHE_CLEANUP_INTERVAL = 10 # s _LOADED_SYSTEM_TIMEOUT = 10 # s _STARTUP_TIMEOUT = 9 # s must be lower than _LOADED_SYSTEM_TIMEOUT @@ -45,8 +47,8 @@ # Some DNS constants -_MDNS_ADDR = '224.0.0.251' -_MDNS_ADDR6 = 'ff02::fb' +_MDNS_ADDR = "224.0.0.251" +_MDNS_ADDR6 = "ff02::fb" _MDNS_PORT = 5353 _DNS_PORT = 53 _DNS_HOST_TTL = 120 # two minute for host records (A, SRV etc) as-per RFC6762 @@ -142,16 +144,16 @@ _ADDRESS_RECORD_TYPES = {_TYPE_A, _TYPE_AAAA} -_HAS_A_TO_Z = re.compile(r'[A-Za-z]') -_HAS_ONLY_A_TO_Z_NUM_HYPHEN = re.compile(r'^[A-Za-z0-9\-]+$') -_HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE = re.compile(r'^[A-Za-z0-9\-\_]+$') -_HAS_ASCII_CONTROL_CHARS = re.compile(r'[\x00-\x1f\x7f]') +_HAS_A_TO_Z = re.compile(r"[A-Za-z]") +_HAS_ONLY_A_TO_Z_NUM_HYPHEN = re.compile(r"^[A-Za-z0-9\-]+$") +_HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE = re.compile(r"^[A-Za-z0-9\-\_]+$") +_HAS_ASCII_CONTROL_CHARS = re.compile(r"[\x00-\x1f\x7f]") _EXPIRE_REFRESH_TIME_PERCENT = 75 -_LOCAL_TRAILER = '.local.' -_TCP_PROTOCOL_LOCAL_TRAILER = '._tcp.local.' -_NONTCP_PROTOCOL_LOCAL_TRAILER = '._udp.local.' +_LOCAL_TRAILER = ".local." +_TCP_PROTOCOL_LOCAL_TRAILER = "._tcp.local." +_NONTCP_PROTOCOL_LOCAL_TRAILER = "._udp.local." # https://datatracker.ietf.org/doc/html/rfc6763#section-9 _SERVICE_TYPE_ENUMERATION_NAME = "_services._dns-sd._udp.local." diff --git a/tests/__init__.py b/tests/__init__.py index cbba6073..1feebafb 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,24 +1,25 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - - This module provides a framework for the use of DNS Service Discovery - using IP multicast. - - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. - - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. - - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine + +This module provides a framework for the use of DNS Service Discovery +using IP multicast. + +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. + +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ + import asyncio import socket import time @@ -35,7 +36,9 @@ class QuestionHistoryWithoutSuppression(QuestionHistory): - def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: + def suppresses( + self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord] + ) -> bool: return False @@ -70,7 +73,7 @@ def has_working_ipv6(): sock = None try: sock = socket.socket(socket.AF_INET6) - sock.bind(('::1', 0)) + sock.bind(("::1", 0)) except Exception: return False finally: @@ -99,7 +102,6 @@ def time_changed_millis(millis: Optional[float] = None) -> None: mock_seconds_into_future = loop_time with mock.patch("time.monotonic", return_value=mock_seconds_into_future): - for task in list(loop._scheduled): # type: ignore[attr-defined] if not isinstance(task, asyncio.TimerHandle): continue diff --git a/tests/conftest.py b/tests/conftest.py index 5525c4ee..5dfd900f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" conftest for zeroconf tests. """ +"""conftest for zeroconf tests.""" import threading from unittest.mock import patch diff --git a/tests/services/__init__.py b/tests/services/__init__.py index 2ef4b15b..30920c6a 100644 --- a/tests/services/__init__.py +++ b/tests/services/__init__.py @@ -1,21 +1,21 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 37896ba1..17950683 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._services.browser. """ +"""Unit tests for zeroconf._services.browser.""" import asyncio import logging @@ -39,7 +39,7 @@ time_changed_millis, ) -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -65,7 +65,7 @@ def test_service_browser_cancel_multiple_times(): """Test we can cancel a ServiceBrowser multiple times before close.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." @@ -87,7 +87,7 @@ def test_service_browser_cancel_context_manager(): """Test we can cancel a ServiceBrowser with it being used as a context manager.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." @@ -116,7 +116,7 @@ def test_service_browser_cancel_multiple_times_after_close(): """Test we can cancel a ServiceBrowser multiple times after close.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." @@ -137,7 +137,7 @@ class MyServiceListener(r.ServiceListener): def test_service_browser_started_after_zeroconf_closed(): """Test starting a ServiceBrowser after close raises RuntimeError.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." @@ -155,9 +155,9 @@ def test_multiple_instances_running_close(): """Test we can shutdown multiple instances.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) - zc2 = Zeroconf(interfaces=['127.0.0.1']) - zc3 = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) + zc2 = Zeroconf(interfaces=["127.0.0.1"]) + zc3 = Zeroconf(interfaces=["127.0.0.1"]) assert zc.loop != zc2.loop assert zc.loop != zc3.loop @@ -177,13 +177,13 @@ class MyServiceListener(r.ServiceListener): class TestServiceBrowser(unittest.TestCase): def test_update_record(self): - enable_ipv6 = has_working_ipv6() and not os.environ.get('SKIP_IPV6') + enable_ipv6 = has_working_ipv6() and not os.environ.get("SKIP_IPV6") - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' - service_text = b'path=/~matt1/' - service_address = '10.0.1.2' + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." + service_text = b"path=/~matt1/" + service_address = "10.0.1.2" service_v6_address = "2001:db8::1" service_v6_second_address = "6001:db8::1" @@ -221,7 +221,9 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de assert service_info.server.lower() == service_server.lower() service_updated_event.set() - def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: + def mock_record_update_incoming_msg( + service_state_change: r.ServiceStateChange, + ) -> r.DNSIncoming: generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) assert generated.is_response() is True @@ -232,7 +234,11 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) generated.add_answer_at_time( r.DNSText( - service_name, const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, service_text + service_name, + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + service_text, ), 0, ) @@ -287,19 +293,26 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) ) generated.add_answer_at_time( - r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0 + r.DNSPointer( + service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name + ), + 0, ) return r.DNSIncoming(generated.packets()[0]) - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) - service_browser = r.ServiceBrowser(zeroconf, service_type, listener=MyServiceListener()) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) + service_browser = r.ServiceBrowser( + zeroconf, service_type, listener=MyServiceListener() + ) try: wait_time = 3 # service added - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Added)) + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Added) + ) service_add_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 0 @@ -307,8 +320,10 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) # service SRV updated service_updated_event.clear() - service_server = 'ash-2.local.' - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) + service_server = "ash-2.local." + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) + ) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 1 @@ -316,8 +331,10 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) # service TXT updated service_updated_event.clear() - service_text = b'path=/~matt2/' - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) + service_text = b"path=/~matt2/" + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) + ) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -325,8 +342,10 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) # service TXT updated - duplicate update should not trigger another service_updated service_updated_event.clear() - service_text = b'path=/~matt2/' - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) + service_text = b"path=/~matt2/" + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) + ) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -334,10 +353,12 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) # service A updated service_updated_event.clear() - service_address = '10.0.1.3' + service_address = "10.0.1.3" # Verify we match on uppercase service_server = service_server.upper() - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) + ) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 3 @@ -345,17 +366,21 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) # service all updated service_updated_event.clear() - service_server = 'ash-3.local.' - service_text = b'path=/~matt3/' - service_address = '10.0.1.3' - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) + service_server = "ash-3.local." + service_text = b"path=/~matt3/" + service_address = "10.0.1.3" + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) + ) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 assert service_removed_count == 0 # service removed - _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Removed)) + _inject_response( + zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Removed) + ) service_removed_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 @@ -372,8 +397,12 @@ def mock_record_update_incoming_msg(service_state_change: r.ServiceStateChange) class TestServiceBrowserMultipleTypes(unittest.TestCase): def test_update_record(self): - service_names = ['name2._type2._tcp.local.', 'name._type._tcp.local.', 'name._type._udp.local'] - service_types = ['_type2._tcp.local.', '_type._tcp.local.', '_type._udp.local.'] + service_names = [ + "name2._type2._tcp.local.", + "name._type._tcp.local.", + "name._type._udp.local", + ] + service_types = ["_type2._tcp.local.", "_type._tcp.local.", "_type._udp.local."] service_added_count = 0 service_removed_count = 0 @@ -394,16 +423,24 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de service_removed_event.set() def mock_record_update_incoming_msg( - service_state_change: r.ServiceStateChange, service_type: str, service_name: str, ttl: int + service_state_change: r.ServiceStateChange, + service_type: str, + service_name: str, + ttl: int, ) -> r.DNSIncoming: generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( - r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0 + r.DNSPointer( + service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name + ), + 0, ) return r.DNSIncoming(generated.packets()[0]) - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) - service_browser = r.ServiceBrowser(zeroconf, service_types, listener=MyServiceListener()) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) + service_browser = r.ServiceBrowser( + zeroconf, service_types, listener=MyServiceListener() + ) try: wait_time = 3 @@ -433,11 +470,16 @@ def _mock_get_expiration_time(self, percent): return self.created + (percent * self.ttl * 10) # Set an expire time that will force a refresh - with patch("zeroconf.DNSRecord.get_expiration_time", new=_mock_get_expiration_time): + with patch( + "zeroconf.DNSRecord.get_expiration_time", new=_mock_get_expiration_time + ): _inject_response( zeroconf, mock_record_update_incoming_msg( - r.ServiceStateChange.Added, service_types[0], service_names[0], 120 + r.ServiceStateChange.Added, + service_types[0], + service_names[0], + 120, ), ) # Add the last record after updating the first one @@ -446,7 +488,10 @@ def _mock_get_expiration_time(self, percent): _inject_response( zeroconf, mock_record_update_incoming_msg( - r.ServiceStateChange.Added, service_types[2], service_names[2], 120 + r.ServiceStateChange.Added, + service_types[2], + service_names[2], + 120, ), ) service_add_event.wait(wait_time) @@ -502,7 +547,7 @@ def test_first_query_delay(): https://datatracker.ietf.org/doc/html/rfc6762#section-5.2 """ type_ = "_http._tcp.local." - zeroconf_browser = Zeroconf(interfaces=['127.0.0.1']) + zeroconf_browser = Zeroconf(interfaces=["127.0.0.1"]) _wait_for_start(zeroconf_browser) # we are going to patch the zeroconf send to check query transmission @@ -525,10 +570,15 @@ def on_service_state_change(zeroconf, service_type, state_change, name): start_time = current_time_millis() browser = ServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) - time.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) + time.sleep( + millis_to_seconds( + _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 + ) + ) try: assert ( - current_time_millis() - start_time > _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[0] + current_time_millis() - start_time + > _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[0] ) finally: browser.cancel() @@ -553,7 +603,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): elif state_change is ServiceStateChange.Removed: service_removed.set() - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() @@ -573,7 +623,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert len(zeroconf_browser.engine.protocols) == 2 - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + aio_zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_registrar = aio_zeroconf_registrar.zeroconf await aio_zeroconf_registrar.zeroconf.async_wait_for_start() @@ -583,14 +633,16 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = AsyncServiceBrowser( + zeroconf_browser, type_, [on_service_state_change] + ) info = ServiceInfo( type_, registration_name, 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -655,7 +707,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): elif state_change is ServiceStateChange.Removed: service_removed.set() - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() @@ -675,7 +727,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert len(zeroconf_browser.engine.protocols) == 2 - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + aio_zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_registrar = aio_zeroconf_registrar.zeroconf await aio_zeroconf_registrar.zeroconf.async_wait_for_start() @@ -685,14 +737,16 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = AsyncServiceBrowser( + zeroconf_browser, type_, [on_service_state_change] + ) info = ServiceInfo( type_, registration_name, 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -751,7 +805,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): async def test_asking_qm_questions(): """Verify explictly asking QM questions.""" type_ = "_quservice._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf await zeroconf_browser.async_wait_for_start() # we are going to patch the zeroconf send to check query transmission @@ -773,9 +827,16 @@ def on_service_state_change(zeroconf, service_type, state_change, name): pass browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change], question_type=r.DNSQuestionType.QM + zeroconf_browser, + type_, + [on_service_state_change], + question_type=r.DNSQuestionType.QM, + ) + await asyncio.sleep( + millis_to_seconds( + _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 + ) ) - await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is False # type: ignore[union-attr] finally: @@ -787,7 +848,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): async def test_asking_qu_questions(): """Verify the service browser can ask QU questions.""" type_ = "_quservice._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf await zeroconf_browser.async_wait_for_start() @@ -810,9 +871,16 @@ def on_service_state_change(zeroconf, service_type, state_change, name): pass browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change], question_type=r.DNSQuestionType.QU + zeroconf_browser, + type_, + [on_service_state_change], + question_type=r.DNSQuestionType.QU, + ) + await asyncio.sleep( + millis_to_seconds( + _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 + ) ) - await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] finally: @@ -824,11 +892,15 @@ def test_legacy_record_update_listener(): """Test a RecordUpdateListener that does not implement update_records.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) with pytest.raises(RuntimeError): r.RecordUpdateListener().update_record( - zc, 0, r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) + zc, + 0, + r.DNSRecord( + "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL + ), ) updates = [] @@ -836,7 +908,9 @@ def test_legacy_record_update_listener(): class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def update_record(self, zc: 'Zeroconf', now: float, record: r.DNSRecord) -> None: + def update_record( + self, zc: "Zeroconf", now: float, record: r.DNSRecord + ) -> None: nonlocal updates updates.append(record) @@ -855,11 +929,11 @@ def on_service_state_change(zeroconf, service_type, state_change, name): info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -871,7 +945,15 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() assert len(updates) - assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 + assert ( + len( + [ + isinstance(update, r.DNSPointer) and update.name == type_ + for update in updates + ] + ) + >= 1 + ) zc.remove_listener(listener) # Removing a second time should not throw @@ -884,7 +966,7 @@ def test_service_browser_is_aware_of_port_changes(): """Test that the ServiceBrowser is aware of port changes.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -900,18 +982,29 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser = ServiceBrowser(zc, type_, [on_service_state_change]) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) _inject_response( zc, - mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), + mock_incoming_msg( + [ + info.dns_pointer(), + info.dns_service(), + info.dns_text(), + *info.dns_addresses(), + ] + ), ) time.sleep(0.1) - assert callbacks == [('_hap._tcp.local.', ServiceStateChange.Added, 'xxxyyy._hap._tcp.local.')] + assert callbacks == [ + ("_hap._tcp.local.", ServiceStateChange.Added, "xxxyyy._hap._tcp.local.") + ] service_info = zc.get_service_info(type_, registration_name) assert service_info is not None assert service_info.port == 80 @@ -926,8 +1019,8 @@ def on_service_state_change(zeroconf, service_type, state_change, name): time.sleep(0.1) assert callbacks == [ - ('_hap._tcp.local.', ServiceStateChange.Added, 'xxxyyy._hap._tcp.local.'), - ('_hap._tcp.local.', ServiceStateChange.Updated, 'xxxyyy._hap._tcp.local.'), + ("_hap._tcp.local.", ServiceStateChange.Added, "xxxyyy._hap._tcp.local."), + ("_hap._tcp.local.", ServiceStateChange.Updated, "xxxyyy._hap._tcp.local."), ] service_info = zc.get_service_info(type_, registration_name) assert service_info is not None @@ -941,7 +1034,7 @@ def test_service_browser_listeners_update_service(): """Test that the ServiceBrowser ServiceListener that implements update_service.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -967,14 +1060,23 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de browser = r.ServiceBrowser(zc, type_, None, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) _inject_response( zc, - mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), + mock_incoming_msg( + [ + info.dns_pointer(), + info.dns_service(), + info.dns_text(), + *info.dns_addresses(), + ] + ), ) time.sleep(0.2) info._dns_service_cache = None # we are mutating the record so clear the cache @@ -987,8 +1089,8 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de time.sleep(0.2) assert callbacks == [ - ('add', type_, registration_name), - ('update', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), ] browser.cancel() @@ -999,7 +1101,7 @@ def test_service_browser_listeners_no_update_service(): """Test that the ServiceBrowser ServiceListener that does not implement update_service.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -1020,14 +1122,23 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de browser = r.ServiceBrowser(zc, type_, None, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) _inject_response( zc, - mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), + mock_incoming_msg( + [ + info.dns_pointer(), + info.dns_service(), + info.dns_text(), + *info.dns_addresses(), + ] + ), ) time.sleep(0.2) info.port = 400 @@ -1040,7 +1151,7 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de time.sleep(0.2) assert callbacks == [ - ('add', type_, registration_name), + ("add", type_, registration_name), ] browser.cancel() @@ -1054,13 +1165,17 @@ def test_service_browser_uses_non_strict_names(): def on_service_state_change(zeroconf, service_type, state_change, name): pass - zc = r.Zeroconf(interfaces=['127.0.0.1']) - browser = ServiceBrowser(zc, ["_tivo-videostream._tcp.local."], [on_service_state_change]) + zc = r.Zeroconf(interfaces=["127.0.0.1"]) + browser = ServiceBrowser( + zc, ["_tivo-videostream._tcp.local."], [on_service_state_change] + ) browser.cancel() # Still fail on completely invalid with pytest.raises(r.BadTypeInNameException): - browser = ServiceBrowser(zc, ["tivo-videostream._tcp.local."], [on_service_state_change]) + browser = ServiceBrowser( + zc, ["tivo-videostream._tcp.local."], [on_service_state_change] + ) zc.close() @@ -1069,7 +1184,9 @@ def test_group_ptr_queries_with_known_answers(): now = current_time_millis() for i in range(120): name = f"_hap{i}._tcp._local." - questions_with_known_answers[DNSQuestion(name, const._TYPE_PTR, const._CLASS_IN)] = { + questions_with_known_answers[ + DNSQuestion(name, const._TYPE_PTR, const._CLASS_IN) + ] = { DNSPointer( name, const._TYPE_PTR, @@ -1079,7 +1196,9 @@ def test_group_ptr_queries_with_known_answers(): ) for counter in range(i) } - outs = _services_browser.group_ptr_queries_with_known_answers(now, True, questions_with_known_answers) + outs = _services_browser.group_ptr_queries_with_known_answers( + now, True, questions_with_known_answers + ) for out in outs: packets = out.packets() # If we generate multiple packets there must @@ -1092,7 +1211,7 @@ def test_group_ptr_queries_with_known_answers(): @pytest.mark.asyncio async def test_generate_service_query_suppress_duplicate_questions(): """Generate a service query for sending with zeroconf.send.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf now = current_time_millis() name = "_suppresstest._tcp.local." @@ -1102,21 +1221,25 @@ async def test_generate_service_query_suppress_duplicate_questions(): const._TYPE_PTR, const._CLASS_IN, 10000, - f'known-to-other.{name}', + f"known-to-other.{name}", ) other_known_answers: Set[r.DNSRecord] = {answer} zc.question_history.add_question_at_time(question, now, other_known_answers) assert zc.question_history.suppresses(question, now, other_known_answers) # The known answer list is different, do not suppress - outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) + outs = _services_browser.generate_service_query( + zc, now, {name}, multicast=True, question_type=None + ) assert outs zc.cache.async_add_records([answer]) # The known answer list contains all the asked questions in the history # we should suppress - outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) + outs = _services_browser.generate_service_query( + zc, now, {name}, multicast=True, question_type=None + ) assert not outs # We do not suppress once the question history expires @@ -1126,17 +1249,23 @@ async def test_generate_service_query_suppress_duplicate_questions(): assert outs # We do not suppress QU queries ever - outs = _services_browser.generate_service_query(zc, now, {name}, multicast=False, question_type=None) + outs = _services_browser.generate_service_query( + zc, now, {name}, multicast=False, question_type=None + ) assert outs zc.question_history.async_expire(now + 2000) # No suppression after clearing the history - outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) + outs = _services_browser.generate_service_query( + zc, now, {name}, multicast=True, question_type=None + ) assert outs # The previous query we just sent is still remembered and # the next one is suppressed - outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) + outs = _services_browser.generate_service_query( + zc, now, {name}, multicast=True, question_type=None + ) assert not outs await aiozc.async_close() @@ -1146,7 +1275,7 @@ async def test_generate_service_query_suppress_duplicate_questions(): async def test_query_scheduler(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() zc = aiozc.zeroconf sends: List[r.DNSIncoming] = [] @@ -1156,12 +1285,13 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): pout = r.DNSIncoming(out.packets()[0]) sends.append(pout) - query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) + query_scheduler = _services_browser.QueryScheduler( + zc, types_, None, 0, True, delay, (0, 0), None + ) loop = asyncio.get_running_loop() # patch the zeroconf send so we can capture what is being sent with patch.object(zc, "async_send", send): - query_scheduler.start(loop) original_now = loop.time() @@ -1186,15 +1316,23 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): ) query_scheduler.reschedule_ptr_first_refresh(ptr_record) - expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_when_time = ptr_record.get_expiration_time( + const._EXPIRE_REFRESH_TIME_PERCENT + ) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( - ptr_record.alias, ptr_record.name, int(ptr_record.ttl), expected_expire_time, expected_when_time + ptr_record.alias, + ptr_record.name, + int(ptr_record.ttl), + expected_expire_time, + expected_when_time, ) assert query_scheduler._query_heap == [ptr_query] query_scheduler.reschedule_ptr_first_refresh(ptr2_record) - expected_when_time = ptr2_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_when_time = ptr2_record.get_expiration_time( + const._EXPIRE_REFRESH_TIME_PERCENT + ) expected_expire_time = ptr2_record.get_expiration_time(100) ptr2_query = _ScheduledPTRQuery( ptr2_record.alias, @@ -1236,7 +1374,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): async def test_query_scheduler_rescue_records(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() zc = aiozc.zeroconf sends: List[r.DNSIncoming] = [] @@ -1246,12 +1384,13 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): pout = r.DNSIncoming(out.packets()[0]) sends.append(pout) - query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) + query_scheduler = _services_browser.QueryScheduler( + zc, types_, None, 0, True, delay, (0, 0), None + ) loop = asyncio.get_running_loop() # patch the zeroconf send so we can capture what is being sent with patch.object(zc, "async_send", send): - query_scheduler.start(loop) original_now = loop.time() @@ -1269,10 +1408,16 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): ) query_scheduler.reschedule_ptr_first_refresh(ptr_record) - expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) + expected_when_time = ptr_record.get_expiration_time( + const._EXPIRE_REFRESH_TIME_PERCENT + ) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( - ptr_record.alias, ptr_record.name, int(ptr_record.ttl), expected_expire_time, expected_when_time + ptr_record.alias, + ptr_record.name, + int(ptr_record.ttl), + expected_expire_time, + expected_when_time, ) assert query_scheduler._query_heap == [ptr_query] assert query_scheduler._query_heap[0].cancelled is False @@ -1308,7 +1453,7 @@ def test_service_browser_matching(): """Test that the ServiceBrowser matching does not match partial names.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_http._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -1336,17 +1481,33 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de browser = r.ServiceBrowser(zc, type_, None, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) should_not_match = ServiceInfo( - not_match_type_, not_match_registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + not_match_type_, + not_match_registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[address], ) _inject_response( zc, - mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), + mock_incoming_msg( + [ + info.dns_pointer(), + info.dns_service(), + info.dns_text(), + *info.dns_addresses(), + ] + ), ) _inject_response( zc, @@ -1375,8 +1536,8 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de time.sleep(0.2) assert callbacks == [ - ('add', type_, registration_name), - ('update', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), ] browser.cancel() @@ -1387,7 +1548,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de def test_service_browser_expire_callbacks(): """Test that the ServiceBrowser matching does not match partial names.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_old._tcp.local." registration_name = "uniquezip323.%s" % type_ @@ -1413,7 +1574,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de browser = r.ServiceBrowser(zc, type_, None, listener) - desc = {'path': '/~paul2/'} + desc = {"path": "/~paul2/"} address_parsed = "10.0.1.3" address = socket.inet_aton(address_parsed) info = ServiceInfo( @@ -1431,7 +1592,14 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de _inject_response( zc, - mock_incoming_msg([info.dns_pointer(), info.dns_service(), info.dns_text(), *info.dns_addresses()]), + mock_incoming_msg( + [ + info.dns_pointer(), + info.dns_service(), + info.dns_text(), + *info.dns_addresses(), + ] + ), ) # Force the ttl to be 1 second now = current_time_millis() @@ -1454,8 +1622,8 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de break assert callbacks == [ - ('add', type_, registration_name), - ('update', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), ] for _ in range(25): @@ -1464,9 +1632,9 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de break assert callbacks == [ - ('add', type_, registration_name), - ('update', type_, registration_name), - ('remove', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), + ("remove", type_, registration_name), ] browser.cancel() @@ -1474,9 +1642,15 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de def test_scheduled_ptr_query_dunder_methods(): - query75 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 75) - query80 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 80) - query75_2 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 140, 75) + query75 = _ScheduledPTRQuery( + "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 75 + ) + query80 = _ScheduledPTRQuery( + "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 80 + ) + query75_2 = _ScheduledPTRQuery( + "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 140, 75 + ) other = object() stringified = str(query75) assert "zoomy._hap._tcp.local." in stringified @@ -1515,7 +1689,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): if state_change is ServiceStateChange.Added: service_added.set() - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() @@ -1529,7 +1703,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert len(zeroconf_browser.engine.protocols) == 2 - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + aio_zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_registrar = aio_zeroconf_registrar.zeroconf await aio_zeroconf_registrar.zeroconf.async_wait_for_start() @@ -1538,14 +1712,16 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): with patch.object(zeroconf_browser, "async_send", send): service_added = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = AsyncServiceBrowser( + zeroconf_browser, type_, [on_service_state_change] + ) info = ServiceInfo( type_, registration_name, 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -1584,7 +1760,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): if state_change is ServiceStateChange.Added: service_added.set() - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() @@ -1598,7 +1774,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert len(zeroconf_browser.engine.protocols) == 2 - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + aio_zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_registrar = aio_zeroconf_registrar.zeroconf await aio_zeroconf_registrar.zeroconf.async_wait_for_start() @@ -1606,7 +1782,9 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): # patch the zeroconf send so we can capture what is being sent with patch.object(zeroconf_browser, "async_send", send): service_added = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = AsyncServiceBrowser( + zeroconf_browser, type_, [on_service_state_change] + ) expected_ttl = const._DNS_OTHER_TTL info = ServiceInfo( type_, @@ -1614,7 +1792,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index c02d5e05..aefef6c8 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._services.info. """ +"""Unit tests for zeroconf._services.info.""" import asyncio import logging @@ -26,7 +26,7 @@ from .. import _inject_response, has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -44,29 +44,43 @@ def teardown_module(): class TestServiceInfo(unittest.TestCase): def test_get_name(self): """Verify the name accessor can strip the type.""" - desc = {'path': '/~paulsm/'} - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' + desc = {"path": "/~paulsm/"} + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." service_address = socket.inet_aton("10.0.1.2") info = ServiceInfo( - service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + service_type, + service_name, + 22, + 0, + 0, + desc, + service_server, + addresses=[service_address], ) assert info.get_name() == "name" def test_service_info_rejects_non_matching_updates(self): """Verify records with the wrong name are rejected.""" - zc = r.Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' + zc = r.Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." service_address = socket.inet_aton("10.0.1.2") ttl = 120 now = r.current_time_millis() info = ServiceInfo( - service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + service_type, + service_name, + 22, + 0, + 0, + desc, + service_server, + addresses=[service_address], ) # Verify backwards compatiblity with calling with None info.async_update_records(zc, now, []) @@ -81,7 +95,7 @@ def test_service_info_rejects_non_matching_updates(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), None, ) @@ -101,14 +115,14 @@ def test_service_info_rejects_non_matching_updates(self): 0, 0, 80, - 'ASH-2.local.', + "ASH-2.local.", ), None, ) ], ) - assert info.server_key == 'ash-2.local.' - assert info.server == 'ASH-2.local.' + assert info.server_key == "ash-2.local." + assert info.server == "ASH-2.local." new_address = socket.inet_aton("10.0.1.3") info.async_update_records( zc, @@ -116,7 +130,7 @@ def test_service_info_rejects_non_matching_updates(self): [ RecordUpdate( r.DNSAddress( - 'ASH-2.local.', + "ASH-2.local.", const._TYPE_A, const._CLASS_IN | const._CLASS_UNIQUE, ttl, @@ -138,7 +152,7 @@ def test_service_info_rejects_non_matching_updates(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - b'\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==", ), None, ) @@ -158,14 +172,14 @@ def test_service_info_rejects_non_matching_updates(self): 0, 0, 80, - 'ASH-2.local.', + "ASH-2.local.", ), None, ) ], ) - assert info.server_key == 'ash-2.local.' - assert info.server == 'ASH-2.local.' + assert info.server_key == "ash-2.local." + assert info.server == "ASH-2.local." new_address = socket.inet_aton("10.0.1.4") info.async_update_records( zc, @@ -188,16 +202,23 @@ def test_service_info_rejects_non_matching_updates(self): def test_service_info_rejects_expired_records(self): """Verify records that are expired are rejected.""" - zc = r.Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' + zc = r.Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." service_address = socket.inet_aton("10.0.1.2") ttl = 120 now = r.current_time_millis() info = ServiceInfo( - service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + service_type, + service_name, + 22, + 0, + 0, + desc, + service_server, + addresses=[service_address], ) # Matching updates info.async_update_records( @@ -210,7 +231,7 @@ def test_service_info_rejects_expired_records(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), None, ) @@ -223,24 +244,24 @@ def test_service_info_rejects_expired_records(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - b'\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==", ) expired_record.set_created_ttl(1000, 1) info.async_update_records(zc, now, [RecordUpdate(expired_record, None)]) assert info.properties[b"ci"] == b"2" zc.close() - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_get_info_partial(self): - zc = r.Zeroconf(interfaces=['127.0.0.1']) - - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' - service_text = b'path=/~matt1/' - service_address = '10.0.1.2' - service_address_v6_ll = 'fe80::52e:c2f2:bc5f:e9c6' + zc = r.Zeroconf(interfaces=["127.0.0.1"]) + + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." + service_text = b"path=/~matt1/" + service_address = "10.0.1.2" + service_address_v6_ll = "fe80::52e:c2f2:bc5f:e9c6" service_scope_id = 12 service_info = None @@ -275,7 +296,8 @@ def get_service_info_helper(zc, type, name): try: ttl = 120 helper_thread = threading.Thread( - target=get_service_info_helper, args=(zc, service_type, service_name) + target=get_service_info_helper, + args=(zc, service_type, service_name), ) helper_thread.start() wait_time = 1 @@ -284,10 +306,22 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) + in last_sent.questions + ) assert service_info is None # Expect query for SRV, A, AAAA @@ -310,9 +344,18 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 3 # type: ignore[unreachable] - assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) + in last_sent.questions + ) assert service_info is None # Expect query for A, AAAA @@ -338,8 +381,14 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 2 - assert r.DNSQuestion(service_server, const._TYPE_A, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_server, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_server, const._TYPE_A, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_server, const._TYPE_AAAA, const._CLASS_IN) + in last_sent.questions + ) last_sent = None assert service_info is None @@ -362,7 +411,9 @@ def get_service_info_helper(zc, type, name): const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - socket.inet_pton(socket.AF_INET6, service_address_v6_ll), + socket.inet_pton( + socket.AF_INET6, service_address_v6_ll + ), scope_id=service_scope_id, ), ] @@ -377,13 +428,13 @@ def get_service_info_helper(zc, type, name): zc.remove_all_service_listeners() zc.close() - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_get_info_suppressed_by_question_history(self): - zc = r.Zeroconf(interfaces=['127.0.0.1']) + zc = r.Zeroconf(interfaces=["127.0.0.1"]) - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." service_info = None send_event = Event() @@ -416,19 +467,34 @@ def get_service_info_helper(zc, type, name): try: helper_thread = threading.Thread( - target=get_service_info_helper, args=(zc, service_type, service_name) + target=get_service_info_helper, + args=(zc, service_type, service_name), ) helper_thread.start() - wait_time = (const._LISTENER_TIME + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5) / 1000 + wait_time = ( + const._LISTENER_TIME + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5 + ) / 1000 # Expect query for SRV, TXT, A, AAAA send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) + in last_sent.questions + ) assert service_info is None # Expect query for SRV only as A, AAAA, and TXT are suppressed @@ -441,22 +507,33 @@ def get_service_info_helper(zc, type, name): ) # Wait long enough to be inside the question history window now = r.current_time_millis() zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), + now, + set(), ) zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), + now, + set(), ) zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), + now, + set(), ) send_event.wait(wait_time * 0.25) assert last_sent is not None assert len(last_sent.questions) == 1 # type: ignore[unreachable] - assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) + in last_sent.questions + ) assert service_info is None wait_time = ( - const._DUPLICATE_QUESTION_INTERVAL + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5 + const._DUPLICATE_QUESTION_INTERVAL + + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + + 5 ) / 1000 # Expect no queries as all are suppressed by the question history last_sent = None @@ -467,16 +544,24 @@ def get_service_info_helper(zc, type, name): ) # Wait long enough to be inside the question history window now = r.current_time_millis() zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN), + now, + set(), ) zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN), + now, + set(), ) zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN), + now, + set(), ) zc.question_history.add_question_at_time( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN), now, set() + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN), + now, + set(), ) send_event.wait(wait_time * 0.25) # All questions are suppressed so no query should be sent @@ -489,13 +574,13 @@ def get_service_info_helper(zc, type, name): zc.close() def test_get_info_single(self): - zc = r.Zeroconf(interfaces=['127.0.0.1']) + zc = r.Zeroconf(interfaces=["127.0.0.1"]) - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' - service_text = b'path=/~matt1/' - service_address = '10.0.1.2' + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." + service_text = b"path=/~matt1/" + service_address = "10.0.1.2" service_info = None send_event = Event() @@ -529,7 +614,8 @@ def get_service_info_helper(zc, type, name): try: ttl = 120 helper_thread = threading.Thread( - target=get_service_info_helper, args=(zc, service_type, service_name) + target=get_service_info_helper, + args=(zc, service_type, service_name), ) helper_thread.start() wait_time = 1 @@ -538,10 +624,22 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions - assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions + assert ( + r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) + in last_sent.questions + ) + assert ( + r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) + in last_sent.questions + ) assert service_info is None # Expext no further queries @@ -590,16 +688,23 @@ def get_service_info_helper(zc, type, name): def test_service_info_duplicate_properties_txt_records(self): """Verify the first property is always used when there are duplicates in a txt record.""" - zc = r.Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-1.local.' + zc = r.Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-1.local." service_address = socket.inet_aton("10.0.1.2") ttl = 120 now = r.current_time_millis() info = ServiceInfo( - service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + service_type, + service_name, + 22, + 0, + 0, + desc, + service_server, + addresses=[service_address], ) info.async_update_records( zc, @@ -611,7 +716,7 @@ def test_service_info_duplicate_properties_txt_records(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==\x04dd=0\x04jl=2\x04qq=0\x0brr=6fLM5A==\x04ci=3', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==\x04dd=0\x04jl=2\x04qq=0\x0brr=6fLM5A==\x04ci=3", ), None, ) @@ -626,12 +731,21 @@ def test_service_info_duplicate_properties_txt_records(self): def test_multiple_addresses(): type_ = "_http._tcp.local." registration_name = "xxxyyy.%s" % type_ - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) # New kwarg way - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address, address]) + info = ServiceInfo( + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[address, address], + ) assert info.addresses == [address, address] assert info.parsed_addresses() == [address_parsed, address_parsed] @@ -652,7 +766,7 @@ def test_multiple_addresses(): assert info.parsed_scoped_addresses() == [address_parsed, address_parsed] ipaddress_supports_scope_id = sys.version_info >= (3, 9, 0) - if has_working_ipv6() and not os.environ.get('SKIP_IPV6'): + if has_working_ipv6() and not os.environ.get("SKIP_IPV6"): address_v6_parsed = "2001:db8::1" address_v6 = socket.inet_pton(socket.AF_INET6, address_v6_parsed) address_v6_ll_parsed = "fe80::52e:c2f2:bc5f:e9c6" @@ -679,13 +793,21 @@ def test_multiple_addresses(): 0, desc, "ash-2.local.", - parsed_addresses=[address_parsed, address_v6_parsed, address_v6_ll_parsed], + parsed_addresses=[ + address_parsed, + address_v6_parsed, + address_v6_ll_parsed, + ], interface_index=interface_index, ), ] for info in infos: assert info.addresses == [address] - assert info.addresses_by_version(r.IPVersion.All) == [address, address_v6, address_v6_ll] + assert info.addresses_by_version(r.IPVersion.All) == [ + address, + address_v6, + address_v6_ll, + ] assert info.ip_addresses_by_version(r.IPVersion.All) == [ ip_address(address), ip_address(address_v6), @@ -694,34 +816,50 @@ def test_multiple_addresses(): else ip_address(address_v6_ll), ] assert info.addresses_by_version(r.IPVersion.V4Only) == [address] - assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ip_address(address)] - assert info.addresses_by_version(r.IPVersion.V6Only) == [address_v6, address_v6_ll] + assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ + ip_address(address) + ] + assert info.addresses_by_version(r.IPVersion.V6Only) == [ + address_v6, + address_v6_ll, + ] assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ ip_address(address_v6), ip_address(address_v6_ll_scoped_parsed) if ipaddress_supports_scope_id else ip_address(address_v6_ll), ] - assert info.parsed_addresses() == [address_parsed, address_v6_parsed, address_v6_ll_parsed] + assert info.parsed_addresses() == [ + address_parsed, + address_v6_parsed, + address_v6_ll_parsed, + ] assert info.parsed_addresses(r.IPVersion.V4Only) == [address_parsed] - assert info.parsed_addresses(r.IPVersion.V6Only) == [address_v6_parsed, address_v6_ll_parsed] + assert info.parsed_addresses(r.IPVersion.V6Only) == [ + address_v6_parsed, + address_v6_ll_parsed, + ] assert info.parsed_scoped_addresses() == [ address_parsed, address_v6_parsed, - address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, + address_v6_ll_scoped_parsed + if ipaddress_supports_scope_id + else address_v6_ll_parsed, ] assert info.parsed_scoped_addresses(r.IPVersion.V4Only) == [address_parsed] assert info.parsed_scoped_addresses(r.IPVersion.V6Only) == [ address_v6_parsed, - address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, + address_v6_ll_scoped_parsed + if ipaddress_supports_scope_id + else address_v6_ll_parsed, ] -@unittest.skipIf(sys.version_info < (3, 9, 0), 'Requires newer python') +@unittest.skipIf(sys.version_info < (3, 9, 0), "Requires newer python") def test_scoped_addresses_from_cache(): type_ = "_http._tcp.local." registration_name = f"scoped.{type_}" - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) host = "scoped.local." zeroconf.cache.async_add_records( @@ -758,7 +896,9 @@ def test_scoped_addresses_from_cache(): info = ServiceInfo(type_, registration_name) info.load_from_cache(zeroconf) assert info.parsed_scoped_addresses() == ["fe80::52e:c2f2:bc5f:e9c6%12"] - assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ip_address("fe80::52e:c2f2:bc5f:e9c6%12")] + assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ + ip_address("fe80::52e:c2f2:bc5f:e9c6%12") + ] zeroconf.close() @@ -769,18 +909,22 @@ async def test_multiple_a_addresses_newest_address_first(): """Test that info.addresses returns the newest seen address first.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) cache = aiozc.zeroconf.cache host = "multahost.local." - record1 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b'\x7f\x00\x00\x01') - record2 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b'\x7f\x00\x00\x02') + record1 = r.DNSAddress( + host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x01" + ) + record2 = r.DNSAddress( + host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x02" + ) cache.async_add_records([record1, record2]) # New kwarg way info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) info.load_from_cache(aiozc.zeroconf) - assert info.addresses == [b'\x7f\x00\x00\x02', b'\x7f\x00\x00\x01'] + assert info.addresses == [b"\x7f\x00\x00\x02", b"\x7f\x00\x00\x01"] await aiozc.async_close() @@ -788,12 +932,12 @@ async def test_multiple_a_addresses_newest_address_first(): async def test_invalid_a_addresses(caplog): type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) cache = aiozc.zeroconf.cache host = "multahost.local." - record1 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b'a') - record2 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b'b') + record1 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b"a") + record2 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b"b") cache.async_add_records([record1, record2]) # New kwarg way @@ -805,25 +949,34 @@ async def test_invalid_a_addresses(caplog): await aiozc.async_close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_filter_address_by_type_from_service_info(): """Verify dns_addresses can filter by ipversion.""" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} type_ = "_homeassistant._tcp.local." name = "MyTestHome" registration_name = f"{name}.{type_}" ipv4 = socket.inet_aton("10.0.1.2") ipv6 = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[ipv4, ipv6]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[ipv4, ipv6] + ) def dns_addresses_to_addresses(dns_address: List[DNSAddress]) -> List[bytes]: return [address.address for address in dns_address] assert dns_addresses_to_addresses(info.dns_addresses()) == [ipv4, ipv6] - assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.All)) == [ipv4, ipv6] - assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.V4Only)) == [ipv4] - assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.V6Only)) == [ipv6] + assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.All)) == [ + ipv4, + ipv6, + ] + assert dns_addresses_to_addresses( + info.dns_addresses(version=r.IPVersion.V4Only) + ) == [ipv4] + assert dns_addresses_to_addresses( + info.dns_addresses(version=r.IPVersion.V6Only) + ) == [ipv6] def test_changing_name_updates_serviceinfo_key(): @@ -832,11 +985,11 @@ def test_changing_name_updates_serviceinfo_key(): name = "MyTestHome" info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -854,11 +1007,11 @@ def test_serviceinfo_address_updates(): with pytest.raises(TypeError): info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], parsed_addresses=["10.0.1.2"], @@ -866,11 +1019,11 @@ def test_serviceinfo_address_updates(): info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -885,48 +1038,55 @@ def test_serviceinfo_accepts_bytes_or_string_dict(): addresses = [socket.inet_aton("10.0.1.2")] server_name = "ash-2.local." info_service = ServiceInfo( - type_, f'{name}.{type_}', 80, 0, 0, {b'path': b'/~paulsm/'}, server_name, addresses=addresses + type_, + f"{name}.{type_}", + 80, + 0, + 0, + {b"path": b"/~paulsm/"}, + server_name, + addresses=addresses, ) - assert info_service.dns_text().text == b'\x0epath=/~paulsm/' + assert info_service.dns_text().text == b"\x0epath=/~paulsm/" info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, server_name, addresses=addresses, ) - assert info_service.dns_text().text == b'\x0epath=/~paulsm/' + assert info_service.dns_text().text == b"\x0epath=/~paulsm/" info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {b'path': '/~paulsm/'}, + {b"path": "/~paulsm/"}, server_name, addresses=addresses, ) - assert info_service.dns_text().text == b'\x0epath=/~paulsm/' + assert info_service.dns_text().text == b"\x0epath=/~paulsm/" info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': b'/~paulsm/'}, + {"path": b"/~paulsm/"}, server_name, addresses=addresses, ) - assert info_service.dns_text().text == b'\x0epath=/~paulsm/' + assert info_service.dns_text().text == b"\x0epath=/~paulsm/" def test_asking_qu_questions(): """Verify explictly asking QU questions.""" type_ = "_quservice._tcp.local." - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) # we are going to patch the zeroconf send to check query transmission old_send = zeroconf.async_send @@ -942,7 +1102,9 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): # patch the zeroconf send with patch.object(zeroconf, "async_send", send): - zeroconf.get_service_info(f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QU) + zeroconf.get_service_info( + f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QU + ) assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] zeroconf.close() @@ -950,7 +1112,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): def test_asking_qm_questions(): """Verify explictly asking QM questions.""" type_ = "_quservice._tcp.local." - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) # we are going to patch the zeroconf send to check query transmission old_send = zeroconf.async_send @@ -966,16 +1128,21 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): # patch the zeroconf send with patch.object(zeroconf, "async_send", send): - zeroconf.get_service_info(f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QM) + zeroconf.get_service_info( + f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QM + ) assert first_outgoing.questions[0].unicast is False # type: ignore[union-attr] zeroconf.close() def test_request_timeout(): """Test that the timeout does not throw an exception and finishes close to the actual timeout.""" - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) start_time = r.current_time_millis() - assert zeroconf.get_service_info("_notfound.local.", "notthere._notfound.local.") is None + assert ( + zeroconf.get_service_info("_notfound.local.", "notthere._notfound.local.") + is None + ) end_time = r.current_time_millis() zeroconf.close() # 3000ms for the default timeout @@ -987,7 +1154,7 @@ def test_request_timeout(): async def test_we_try_four_times_with_random_delay(): """Verify we try four times even with the random delay.""" type_ = "_typethatisnothere._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) # we are going to patch the zeroconf send to check query transmission request_count = 0 @@ -1011,8 +1178,8 @@ async def test_release_wait_when_new_recorded_added(): """Test that async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1049,7 +1216,7 @@ async def test_release_wait_when_new_recorded_added(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1059,15 +1226,17 @@ async def test_release_wait_when_new_recorded_added(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) assert await asyncio.wait_for(task, timeout=2) - assert info.addresses == [b'\x7f\x00\x00\x01'] + assert info.addresses == [b"\x7f\x00\x00\x01"] await aiozc.async_close() @@ -1076,8 +1245,8 @@ async def test_port_changes_are_seen(): """Test that port changes are seen by async_request.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1112,7 +1281,7 @@ async def test_port_changes_are_seen(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1122,13 +1291,15 @@ async def test_port_changes_are_seen(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1144,7 +1315,9 @@ async def test_port_changes_are_seen(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1159,8 +1332,8 @@ async def test_port_changes_are_seen_with_directed_request(): """Test that port changes are seen by async_request with a directed request.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1195,7 +1368,7 @@ async def test_port_changes_are_seen_with_directed_request(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1205,13 +1378,15 @@ async def test_port_changes_are_seen_with_directed_request(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1227,7 +1402,9 @@ async def test_port_changes_are_seen_with_directed_request(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200, addr="127.0.0.1", port=5353) @@ -1242,7 +1419,7 @@ async def test_ipv4_changes_are_seen(): """Test that ipv4 changes are seen by async_request.""" type_ = "_http._tcp.local." registration_name = "multiaipv4rec.%s" % type_ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1277,7 +1454,7 @@ async def test_ipv4_changes_are_seen(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1287,16 +1464,18 @@ async def test_ipv4_changes_are_seen(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x01'] + assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1305,17 +1484,25 @@ async def test_ipv4_changes_are_seen(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x02', + b"\x7f\x00\x00\x02", ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x02', b'\x7f\x00\x00\x01'] + assert info.addresses_by_version(IPVersion.V4Only) == [ + b"\x7f\x00\x00\x02", + b"\x7f\x00\x00\x01", + ] await info.async_request(aiozc.zeroconf, timeout=200) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x02', b'\x7f\x00\x00\x01'] + assert info.addresses_by_version(IPVersion.V4Only) == [ + b"\x7f\x00\x00\x02", + b"\x7f\x00\x00\x01", + ] await aiozc.async_close() @@ -1324,7 +1511,7 @@ async def test_ipv6_changes_are_seen(): """Test that ipv6 changes are seen by async_request.""" type_ = "_http._tcp.local." registration_name = "multiaipv6rec.%s" % type_ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1359,7 +1546,7 @@ async def test_ipv6_changes_are_seen(): const._TYPE_AAAA, const._CLASS_IN, 10000, - b'\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ), 0, ) @@ -1369,17 +1556,19 @@ async def test_ipv6_changes_are_seen(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V6Only) == [ - b'\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" ] generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -1389,22 +1578,24 @@ async def test_ipv6_changes_are_seen(): const._TYPE_AAAA, const._CLASS_IN, 10000, - b'\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + b"\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V6Only) == [ - b'\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', - b'\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + b"\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ] await info.async_request(aiozc.zeroconf, timeout=200) assert info.addresses_by_version(IPVersion.V6Only) == [ - b'\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', - b'\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', + b"\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ] await aiozc.async_close() @@ -1414,7 +1605,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): """Test that bad ip address in the cache are ignored async_request.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1438,7 +1629,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1448,19 +1639,23 @@ async def test_bad_ip_addresses_ignored_in_cache(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) # Manually add a bad record to the cache - aiozc.zeroconf.cache.async_add_records([DNSAddress(host, const._TYPE_A, const._CLASS_IN, 10000, b'\x00')]) + aiozc.zeroconf.cache.async_add_records( + [DNSAddress(host, const._TYPE_A, const._CLASS_IN, 10000, b"\x00")] + ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x01'] + assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] @pytest.mark.asyncio @@ -1468,7 +1663,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): """Test that service name changes are seen by async_request when the ip is in the cache.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1490,7 +1685,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1500,7 +1695,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x02', + b"\x7f\x00\x00\x02", ), 0, ) @@ -1510,13 +1705,15 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1536,11 +1733,13 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x02'] + assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x02"] await aiozc.async_close() @@ -1550,7 +1749,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): """Test that service name changes are seen by async_request when the ip is not in the cache.""" type_ = "_http._tcp.local." registration_name = "multiarec.%s" % type_ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." # New kwarg way @@ -1572,7 +1771,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1582,13 +1781,15 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1614,15 +1815,17 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x02', + b"\x7f\x00\x00\x02", ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) - assert info.addresses_by_version(IPVersion.V4Only) == [b'\x7f\x00\x00\x02'] + assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x02"] await aiozc.async_close() @@ -1633,14 +1836,17 @@ async def test_release_wait_when_new_recorded_added_concurrency(): """Test that concurrent async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." registration_name = "multiareccon.%s" % type_ - desc = {'path': '/~paulsm/'} - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + desc = {"path": "/~paulsm/"} + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahostcon.local." await aiozc.zeroconf.async_wait_for_start() # New kwarg way info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) - tasks = [asyncio.create_task(info.async_request(aiozc.zeroconf, timeout=200000)) for _ in range(10)] + tasks = [ + asyncio.create_task(info.async_request(aiozc.zeroconf, timeout=200000)) + for _ in range(10) + ] await asyncio.sleep(0.1) for task in tasks: assert not task.done() @@ -1675,7 +1881,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): const._TYPE_A, const._CLASS_IN, 10000, - b'\x7f\x00\x00\x01', + b"\x7f\x00\x00\x01", ), 0, ) @@ -1685,17 +1891,19 @@ async def test_release_wait_when_new_recorded_added_concurrency(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 10000, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) await asyncio.sleep(0) for task in tasks: assert not task.done() - aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) + aiozc.zeroconf.record_manager.async_updates_from_response( + r.DNSIncoming(generated.packets()[0]) + ) _, pending = await asyncio.wait(tasks, timeout=2) assert not pending - assert info.addresses == [b'\x7f\x00\x00\x01'] + assert info.addresses == [b"\x7f\x00\x00\x01"] await aiozc.async_close() @@ -1704,7 +1912,7 @@ async def test_service_info_nsec_records(): """Test we can generate nsec records from ServiceInfo.""" type_ = "_http._tcp.local." registration_name = "multiareccon.%s" % type_ - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} host = "multahostcon.local." info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) nsec_record = info.dns_nsec([const._TYPE_A, const._TYPE_AAAA], 50) diff --git a/tests/services/test_registry.py b/tests/services/test_registry.py index f8656e2f..d3f60179 100644 --- a/tests/services/test_registry.py +++ b/tests/services/test_registry.py @@ -16,9 +16,16 @@ def test_only_register_once(self): name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) registry = r.ServiceRegistry() @@ -34,12 +41,26 @@ def test_register_same_server(self): registration_name = f"{name}.{type_}" registration_name2 = f"{name2}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "same.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "same.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( - type_, registration_name2, 80, 0, 0, desc, "same.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name2, + 80, + 0, + 0, + desc, + "same.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) registry = r.ServiceRegistry() registry.async_add(info) @@ -62,9 +83,16 @@ def test_unregister_multiple_times(self): name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) registry = r.ServiceRegistry() @@ -78,9 +106,16 @@ def test_lookups(self): name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) registry = r.ServiceRegistry() @@ -97,9 +132,16 @@ def test_lookups_upper_case_by_lower_case(self): name = "Xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ASH-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ASH-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) registry = r.ServiceRegistry() diff --git a/tests/services/test_types.py b/tests/services/test_types.py index 1afe6d53..d9340283 100644 --- a/tests/services/test_types.py +++ b/tests/services/test_types.py @@ -14,7 +14,7 @@ from .. import _clear_cache, has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -34,8 +34,8 @@ def test_integration_with_listener(disable_duplicate_packet_suppression): name = "xxxyyy" registration_name = f"{name}.{type_}" - zeroconf_registrar = Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} + zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -48,7 +48,7 @@ def test_integration_with_listener(disable_duplicate_packet_suppression): ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) + service_types = ZeroconfServiceTypes.find(interfaces=["127.0.0.1"], timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) service_types = ZeroconfServiceTypes.find(zc=zeroconf_registrar, timeout=2) @@ -58,16 +58,16 @@ def test_integration_with_listener(disable_duplicate_packet_suppression): zeroconf_registrar.close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_integration_with_listener_v6_records(disable_duplicate_packet_suppression): type_ = "_test-listenv6rec-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" addr = "2606:2800:220:1:248:1893:25c8:1946" # example.com - zeroconf_registrar = Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} + zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -80,7 +80,7 @@ def test_integration_with_listener_v6_records(disable_duplicate_packet_suppressi ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) + service_types = ZeroconfServiceTypes.find(interfaces=["127.0.0.1"], timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) service_types = ZeroconfServiceTypes.find(zc=zeroconf_registrar, timeout=2) @@ -90,8 +90,8 @@ def test_integration_with_listener_v6_records(disable_duplicate_packet_suppressi zeroconf_registrar.close() -@unittest.skipIf(not has_working_ipv6() or sys.platform == 'win32', 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6() or sys.platform == "win32", "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_integration_with_listener_ipv6(disable_duplicate_packet_suppression): type_ = "_test-listenv6ip-type._tcp.local." name = "xxxyyy" @@ -99,7 +99,7 @@ def test_integration_with_listener_ipv6(disable_duplicate_packet_suppression): addr = "2606:2800:220:1:248:1893:25c8:1946" # example.com zeroconf_registrar = Zeroconf(ip_version=r.IPVersion.V6Only) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -112,7 +112,9 @@ def test_integration_with_listener_ipv6(disable_duplicate_packet_suppression): ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(ip_version=r.IPVersion.V6Only, timeout=2) + service_types = ZeroconfServiceTypes.find( + ip_version=r.IPVersion.V6Only, timeout=2 + ) assert type_ in service_types _clear_cache(zeroconf_registrar) service_types = ZeroconfServiceTypes.find(zc=zeroconf_registrar, timeout=2) @@ -130,8 +132,8 @@ def test_integration_with_subtype_and_listener(disable_duplicate_packet_suppress discovery_type = f"{subtype_}.{type_}" registration_name = f"{name}.{type_}" - zeroconf_registrar = Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} + zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} info = ServiceInfo( discovery_type, registration_name, @@ -144,7 +146,7 @@ def test_integration_with_subtype_and_listener(disable_duplicate_packet_suppress ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find(interfaces=['127.0.0.1'], timeout=2) + service_types = ZeroconfServiceTypes.find(interfaces=["127.0.0.1"], timeout=2) assert discovery_type in service_types _clear_cache(zeroconf_registrar) service_types = ZeroconfServiceTypes.find(zc=zeroconf_registrar, timeout=2) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 382b1a3d..053ed26b 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -50,7 +50,7 @@ time_changed_millis, ) -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -83,14 +83,14 @@ def verify_threads_ended(): @pytest.mark.asyncio async def test_async_basic_usage() -> None: """Test we can create and close the instance.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() @pytest.mark.asyncio async def test_async_close_twice() -> None: """Test we can close twice.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() await aiozc.async_close() @@ -98,7 +98,7 @@ async def test_async_close_twice() -> None: @pytest.mark.asyncio async def test_async_with_sync_passed_in() -> None: """Test we can create and close the instance when passing in a sync Zeroconf.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) aiozc = AsyncZeroconf(zc=zc) assert aiozc.zeroconf is zc await aiozc.async_close() @@ -107,7 +107,7 @@ async def test_async_with_sync_passed_in() -> None: @pytest.mark.asyncio async def test_async_with_sync_passed_in_closed_in_async() -> None: """Test caller closes the sync version in async.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) aiozc = AsyncZeroconf(zc=zc) assert aiozc.zeroconf is zc zc.close() @@ -119,8 +119,13 @@ async def test_sync_within_event_loop_executor() -> None: """Test sync version still works from an executor within an event loop.""" def sync_code(): - zc = Zeroconf(interfaces=['127.0.0.1']) - assert zc.get_service_info("_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10) is None + zc = Zeroconf(interfaces=["127.0.0.1"]) + assert ( + zc.get_service_info( + "_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10 + ) + is None + ) zc.close() await asyncio.get_event_loop().run_in_executor(None, sync_code) @@ -129,7 +134,7 @@ def sync_code(): @pytest.mark.asyncio async def test_async_service_registration() -> None: """Test registering services broadcasts the registration by default.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" @@ -150,7 +155,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: aiozc.zeroconf.add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -186,10 +191,10 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('update', type_, registration_name), - ('update', type_, registration_name), - ('remove', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), + ("update", type_, registration_name), + ("remove", type_, registration_name), ] @@ -200,7 +205,7 @@ async def test_async_service_registration_with_server_missing() -> None: For backwards compatibility, the server should be set to the name that was passed in. """ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" @@ -221,7 +226,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: aiozc.zeroconf.add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -254,16 +259,16 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('update', type_, registration_name), - ('remove', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), + ("remove", type_, registration_name), ] @pytest.mark.asyncio async def test_async_service_registration_same_server_different_ports() -> None: """Test registering services with the same server with different srv records.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" name2 = "xxxyyy2" @@ -287,7 +292,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: aiozc.zeroconf.add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -320,17 +325,17 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: assert info2.dns_service() in entries await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('add', type_, registration_name2), - ('remove', type_, registration_name), - ('remove', type_, registration_name2), + ("add", type_, registration_name), + ("add", type_, registration_name2), + ("remove", type_, registration_name), + ("remove", type_, registration_name2), ] @pytest.mark.asyncio async def test_async_service_registration_same_server_same_ports() -> None: """Test registering services with the same server with the exact same srv record.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" name2 = "xxxyyy2" @@ -354,7 +359,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: aiozc.zeroconf.add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -387,22 +392,22 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: assert info2.dns_service() in entries await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('add', type_, registration_name2), - ('remove', type_, registration_name), - ('remove', type_, registration_name2), + ("add", type_, registration_name), + ("add", type_, registration_name2), + ("remove", type_, registration_name), + ("remove", type_, registration_name2), ] @pytest.mark.asyncio async def test_async_service_registration_name_conflict() -> None: """Test registering services throws on name conflict.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test-srvc2-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -445,12 +450,12 @@ async def test_async_service_registration_name_conflict() -> None: @pytest.mark.asyncio async def test_async_service_registration_name_does_not_match_type() -> None: """Test registering services throws when the name does not match the type.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test-srvc3-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -471,13 +476,13 @@ async def test_async_service_registration_name_does_not_match_type() -> None: @pytest.mark.asyncio async def test_async_service_registration_name_strict_check() -> None: """Test registering services throws when the name does not comply.""" - zc = Zeroconf(interfaces=['127.0.0.1']) - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_ibisip_http._tcp.local." name = "CustomerInformationService-F4D4895E9EEB" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -507,7 +512,7 @@ async def test_async_service_registration_name_strict_check() -> None: async def test_async_tasks() -> None: """Test awaiting broadcast tasks""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test-srvc4-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" @@ -527,7 +532,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: listener = MyListener() aiozc.zeroconf.add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -563,9 +568,9 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('update', type_, registration_name), - ('remove', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), + ("remove", type_, registration_name), ] @@ -573,12 +578,12 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: async def test_async_wait_unblocks_on_update() -> None: """Test async_wait will unblock on update.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test-srvc4-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -608,10 +613,10 @@ async def test_async_wait_unblocks_on_update() -> None: @pytest.mark.asyncio async def test_service_info_async_request() -> None: """Test registering services broadcasts and query with AsyncServceInfo.async_request.""" - if not has_working_ipv6() or os.environ.get('SKIP_IPV6'): - pytest.skip('Requires IPv6') + if not has_working_ipv6() or os.environ.get("SKIP_IPV6"): + pytest.skip("Requires IPv6") - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" name2 = "abc" @@ -620,11 +625,15 @@ async def test_service_info_async_request() -> None: # Start a tasks BEFORE the registration that will keep trying # and see the registration a bit later - get_service_info_task1 = asyncio.ensure_future(aiozc.async_get_service_info(type_, registration_name)) + get_service_info_task1 = asyncio.ensure_future( + aiozc.async_get_service_info(type_, registration_name) + ) await asyncio.sleep(_LISTENER_TIME / 1000 / 2) - get_service_info_task2 = asyncio.ensure_future(aiozc.async_get_service_info(type_, registration_name)) + get_service_info_task2 = asyncio.ensure_future( + aiozc.async_get_service_info(type_, registration_name) + ) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -670,7 +679,10 @@ async def test_service_info_async_request() -> None: 0, desc, "ash-2.local.", - addresses=[socket.inet_aton("10.0.1.3"), socket.inet_pton(socket.AF_INET6, "6001:db8::1")], + addresses=[ + socket.inet_aton("10.0.1.3"), + socket.inet_pton(socket.AF_INET6, "6001:db8::1"), + ], ) task = await aiozc.async_update_service(new_info) @@ -714,7 +726,7 @@ async def test_service_info_async_request() -> None: @pytest.mark.asyncio async def test_async_service_browser() -> None: """Test AsyncServiceBrowser.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test9-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" @@ -734,7 +746,7 @@ def update_service(self, aiozc: Zeroconf, type: str, name: str) -> None: listener = MyListener() await aiozc.async_add_service_listener(type_, listener) - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -765,9 +777,9 @@ def update_service(self, aiozc: Zeroconf, type: str, name: str) -> None: await aiozc.async_close() assert calls == [ - ('add', type_, registration_name), - ('update', type_, registration_name), - ('remove', type_, registration_name), + ("add", type_, registration_name), + ("update", type_, registration_name), + ("remove", type_, registration_name), ] @@ -778,14 +790,14 @@ async def test_async_context_manager() -> None: name = "xxxyyy" registration_name = f"{name}.{type_}" - async with AsyncZeroconf(interfaces=['127.0.0.1']) as aiozc: + async with AsyncZeroconf(interfaces=["127.0.0.1"]) as aiozc: info = ServiceInfo( type_, registration_name, 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -800,7 +812,7 @@ async def test_service_browser_cancel_async_context_manager(): """Test we can cancel an AsyncServiceBrowser with it being used as an async context manager.""" # instantiate a zeroconf instance - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_hap._tcp.local." @@ -824,14 +836,14 @@ class MyServiceListener(ServiceListener): @pytest.mark.asyncio async def test_async_unregister_all_services() -> None: """Test unregistering all services.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) type_ = "_test1-srvc-type._tcp.local." name = "xxxyyy" name2 = "abc" registration_name = f"{name}.{type_}" registration_name2 = f"{name2}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -886,8 +898,8 @@ async def test_async_zeroconf_service_types(): name = "xxxyyy" registration_name = f"{name}.{type_}" - zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} + zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -904,10 +916,14 @@ async def test_async_zeroconf_service_types(): await asyncio.sleep(0.2) _clear_cache(zeroconf_registrar.zeroconf) try: - service_types = await AsyncZeroconfServiceTypes.async_find(interfaces=['127.0.0.1'], timeout=2) + service_types = await AsyncZeroconfServiceTypes.async_find( + interfaces=["127.0.0.1"], timeout=2 + ) assert type_ in service_types _clear_cache(zeroconf_registrar.zeroconf) - service_types = await AsyncZeroconfServiceTypes.async_find(aiozc=zeroconf_registrar, timeout=2) + service_types = await AsyncZeroconfServiceTypes.async_find( + aiozc=zeroconf_registrar, timeout=2 + ) assert type_ in service_types finally: @@ -917,9 +933,11 @@ async def test_async_zeroconf_service_types(): @pytest.mark.asyncio async def test_guard_against_running_serviceinfo_request_event_loop() -> None: """Test that running ServiceInfo.request from the event loop throws.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) - service_info = AsyncServiceInfo("_hap._tcp.local.", "doesnotmatter._hap._tcp.local.") + service_info = AsyncServiceInfo( + "_hap._tcp.local.", "doesnotmatter._hap._tcp.local." + ) with pytest.raises(RuntimeError): service_info.request(aiozc.zeroconf, 3000) await aiozc.async_close() @@ -930,7 +948,7 @@ async def test_service_browser_instantiation_generates_add_events_from_cache(): """Test that the ServiceBrowser will generate Add events with the existing cache when starting.""" # instantiate a zeroconf instance - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_hap._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -954,10 +972,12 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de listener = MyServiceListener() - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) zc.cache.async_add_records( [info.dns_pointer(), info.dns_service(), *info.dns_addresses(), info.dns_text()] ) @@ -967,7 +987,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await asyncio.sleep(0) assert callbacks == [ - ('add', type_, registration_name), + ("add", type_, registration_name), ] await browser.async_cancel() @@ -991,7 +1011,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): elif state_change is ServiceStateChange.Removed: service_removed.set() - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf zeroconf_browser.question_history = QuestionHistoryWithoutSuppression() await zeroconf_browser.async_wait_for_start() @@ -1023,7 +1043,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert len(zeroconf_browser.engine.protocols) == 2 - aio_zeroconf_registrar = AsyncZeroconf(interfaces=['127.0.0.1']) + aio_zeroconf_registrar = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_registrar = aio_zeroconf_registrar.zeroconf await aio_zeroconf_registrar.zeroconf.async_wait_for_start() @@ -1033,14 +1053,16 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) + browser = AsyncServiceBrowser( + zeroconf_browser, type_, [on_service_state_change] + ) info = ServiceInfo( type_, registration_name, 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -1126,15 +1148,22 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): async def test_info_asking_default_is_asking_qm_questions_after_the_first_qu(): """Verify the service info first question is QU and subsequent ones are QM questions.""" type_ = "_quservice._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_info = aiozc.zeroconf name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) zeroconf_info.registry.async_add(info) @@ -1174,7 +1203,7 @@ async def test_service_browser_ignores_unrelated_updates(): """Test that the ServiceBrowser ignores unrelated updates.""" # instantiate a zeroconf instance - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_veryuniqueone._tcp.local." registration_name = "xxxyyy.%s" % type_ @@ -1198,10 +1227,12 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de listener = MyServiceListener() - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] + ) zc.cache.async_add_records( [ info.dns_pointer(), @@ -1216,7 +1247,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de 0, 0, 81, - 'unrelated.local.', + "unrelated.local.", ), ] ) @@ -1235,7 +1266,13 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de 0, ) generated.add_answer_at_time( - DNSAddress("unrelated.local.", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"1234"), + DNSAddress( + "unrelated.local.", + const._TYPE_A, + const._CLASS_IN, + const._DNS_HOST_TTL, + b"1234", + ), 0, ) generated.add_answer_at_time( @@ -1255,7 +1292,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await asyncio.sleep(0) assert callbacks == [ - ('add', type_, registration_name), + ("add", type_, registration_name), ] await aiozc.async_close() @@ -1263,10 +1300,15 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de @pytest.mark.asyncio async def test_async_request_timeout(): """Test that the timeout does not throw an exception and finishes close to the actual timeout.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() start_time = current_time_millis() - assert await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") is None + assert ( + await aiozc.async_get_service_info( + "_notfound.local.", "notthere._notfound.local." + ) + is None + ) end_time = current_time_millis() await aiozc.async_close() # 3000ms for the default timeout @@ -1277,25 +1319,34 @@ async def test_async_request_timeout(): @pytest.mark.asyncio async def test_async_request_non_running_instance(): """Test that the async_request throws when zeroconf is not running.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() with pytest.raises(NotRunningException): - await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") + await aiozc.async_get_service_info( + "_notfound.local.", "notthere._notfound.local." + ) @pytest.mark.asyncio async def test_legacy_unicast_response(run_isolated): """Verify legacy unicast responses include questions and correct id.""" type_ = "_mservice._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) aiozc.zeroconf.registry.async_add(info) @@ -1305,11 +1356,11 @@ async def test_legacy_unicast_response(run_isolated): protocol = aiozc.zeroconf.engine.protocols[0] with patch.object(aiozc.zeroconf, "async_send") as send_mock: - protocol.datagram_received(query.packets()[0], ('127.0.0.1', 6503)) + protocol.datagram_received(query.packets()[0], ("127.0.0.1", 6503)) calls = send_mock.mock_calls # Verify the response is sent back on the socket it was recieved from - assert calls == [call(ANY, '127.0.0.1', 6503, (), protocol.transport)] + assert calls == [call(ANY, "127.0.0.1", 6503, (), protocol.transport)] outgoing = send_mock.call_args[0][0] assert isinstance(outgoing, DNSOutgoing) assert outgoing.questions == [question] @@ -1320,7 +1371,7 @@ async def test_legacy_unicast_response(run_isolated): @pytest.mark.asyncio async def test_update_with_uppercase_names(run_isolated): """Test an ip update from a shelly which uses uppercase names.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() callbacks = [] @@ -1342,15 +1393,15 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de browser = AsyncServiceBrowser(aiozc.zeroconf, "_http._tcp.local.", None, listener) protocol = aiozc.zeroconf.engine.protocols[0] - packet = b'\x00\x00\x84\x80\x00\x00\x00\n\x00\x00\x00\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x14\x07_shelly\x04_tcp\x05local\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x12\x05_http\x04_tcp\x05local\x00\x07_shelly\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00.\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00\'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00"\napp=Pro4PM\x10ver=0.10.0-beta5\x05gen=2\x05_http\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00,\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00\'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\x06\x05gen=2\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01\x80\x01\x00\x00\x00x\x00\x04\xc0\xa8\xbc=\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00/\x80\x01\x00\x00\x00x\x00$\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01@' # noqa: E501 - protocol.datagram_received(packet, ('127.0.0.1', 6503)) + packet = b"\x00\x00\x84\x80\x00\x00\x00\n\x00\x00\x00\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x14\x07_shelly\x04_tcp\x05local\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x12\x05_http\x04_tcp\x05local\x00\x07_shelly\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00.\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\"\napp=Pro4PM\x10ver=0.10.0-beta5\x05gen=2\x05_http\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00,\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\x06\x05gen=2\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01\x80\x01\x00\x00\x00x\x00\x04\xc0\xa8\xbc=\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00/\x80\x01\x00\x00\x00x\x00$\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01@" # noqa: E501 + protocol.datagram_received(packet, ("127.0.0.1", 6503)) await asyncio.sleep(0) - packet = b'\x00\x00\x84\x80\x00\x00\x00\n\x00\x00\x00\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x14\x07_shelly\x04_tcp\x05local\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x12\x05_http\x04_tcp\x05local\x00\x07_shelly\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00.\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00\'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00"\napp=Pro4PM\x10ver=0.10.0-beta5\x05gen=2\x05_http\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00,\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00\'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\x06\x05gen=2\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01\x80\x01\x00\x00\x00x\x00\x04\xc0\xa8\xbcA\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00/\x80\x01\x00\x00\x00x\x00$\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01@' # noqa: E501 - protocol.datagram_received(packet, ('127.0.0.1', 6503)) + packet = b"\x00\x00\x84\x80\x00\x00\x00\n\x00\x00\x00\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x14\x07_shelly\x04_tcp\x05local\x00\t_services\x07_dns-sd\x04_udp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x12\x05_http\x04_tcp\x05local\x00\x07_shelly\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00.\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19shellypro4pm-94b97ec07650\x07_shelly\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\"\napp=Pro4PM\x10ver=0.10.0-beta5\x05gen=2\x05_http\x04_tcp\x05local\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00,\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00!\x80\x01\x00\x00\x00x\x00'\x00\x00\x00\x00\x00P\x19ShellyPro4PM-94B97EC07650\x05local\x00\x19ShellyPro4PM-94B97EC07650\x05_http\x04_tcp\x05local\x00\x00\x10\x80\x01\x00\x00\x00x\x00\x06\x05gen=2\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01\x80\x01\x00\x00\x00x\x00\x04\xc0\xa8\xbcA\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00/\x80\x01\x00\x00\x00x\x00$\x19ShellyPro4PM-94B97EC07650\x05local\x00\x00\x01@" # noqa: E501 + protocol.datagram_received(packet, ("127.0.0.1", 6503)) await browser.async_cancel() await aiozc.async_close() assert callbacks == [ - ('add', '_http._tcp.local.', 'ShellyPro4PM-94B97EC07650._http._tcp.local.'), - ('update', '_http._tcp.local.', 'ShellyPro4PM-94B97EC07650._http._tcp.local.'), + ("add", "_http._tcp.local.", "ShellyPro4PM-94B97EC07650._http._tcp.local."), + ("update", "_http._tcp.local.", "ShellyPro4PM-94B97EC07650._http._tcp.local."), ] diff --git a/tests/test_cache.py b/tests/test_cache.py index aac7e0ca..4b3859bd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,8 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._cache. """ - +"""Unit tests for zeroconf._cache.""" import logging import unittest @@ -11,7 +10,7 @@ import zeroconf as r from zeroconf import const -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -28,11 +27,11 @@ def teardown_module(): class TestDNSCache(unittest.TestCase): def test_order(self): - record1 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - entry = r.DNSEntry('a', const._TYPE_SOA, const._CLASS_IN) + entry = r.DNSEntry("a", const._TYPE_SOA, const._CLASS_IN) cached_record = cache.get(entry) assert cached_record == record2 @@ -42,8 +41,8 @@ def test_adding_same_record_to_cache_different_ttls_with_get(self): This ensures we only have one source of truth for TTLs as a record cannot be both expired and not expired. """ - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 10, b'a') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 10, b"a") cache = r.DNSCache() cache.async_add_records([record1, record2]) entry = r.DNSEntry(record2.name, const._TYPE_A, const._CLASS_IN) @@ -58,144 +57,231 @@ def test_adding_same_record_to_cache_different_ttls_with_get_all(self): only have one source of truth for TTLs as a record cannot be both expired and not expired. """ - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 10, b'a') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 10, b"a") cache = r.DNSCache() cache.async_add_records([record1, record2]) - cached_records = cache.get_all_by_details('a', const._TYPE_A, const._CLASS_IN) + cached_records = cache.get_all_by_details("a", const._TYPE_A, const._CLASS_IN) assert cached_records == [record2] def test_cache_empty_does_not_leak_memory_by_leaving_empty_list(self): - record1 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert 'a' in cache.cache + assert "a" in cache.cache cache.async_remove_records([record1, record2]) - assert 'a' not in cache.cache + assert "a" not in cache.cache def test_cache_empty_multiple_calls(self): - record1 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert 'a' in cache.cache + assert "a" in cache.cache cache.async_remove_records([record1, record2]) - assert 'a' not in cache.cache + assert "a" not in cache.cache class TestDNSAsyncCacheAPI(unittest.TestCase): def test_async_get_unique(self): - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) assert cache.async_get_unique(record1) == record1 assert cache.async_get_unique(record2) == record2 def test_async_all_by_details(self): - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.async_all_by_details('a', const._TYPE_A, const._CLASS_IN)) == {record1, record2} + assert set(cache.async_all_by_details("a", const._TYPE_A, const._CLASS_IN)) == { + record1, + record2, + } def test_async_entries_with_server(self): record1 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 85, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 85, + "ab", ) record2 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.async_entries_with_server('ab')) == {record1, record2} - assert set(cache.async_entries_with_server('AB')) == {record1, record2} + assert set(cache.async_entries_with_server("ab")) == {record1, record2} + assert set(cache.async_entries_with_server("AB")) == {record1, record2} def test_async_entries_with_name(self): record1 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 85, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 85, + "ab", ) record2 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.async_entries_with_name('irrelevant')) == {record1, record2} - assert set(cache.async_entries_with_name('Irrelevant')) == {record1, record2} + assert set(cache.async_entries_with_name("irrelevant")) == {record1, record2} + assert set(cache.async_entries_with_name("Irrelevant")) == {record1, record2} # These functions have been seen in other projects so # we try to maintain a stable API for all the threadsafe getters class TestDNSCacheAPI(unittest.TestCase): def test_get(self): - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'b') - record3 = r.DNSAddress('a', const._TYPE_AAAA, const._CLASS_IN, 1, b'ipv6') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"b") + record3 = r.DNSAddress("a", const._TYPE_AAAA, const._CLASS_IN, 1, b"ipv6") cache = r.DNSCache() cache.async_add_records([record1, record2, record3]) assert cache.get(record1) == record1 assert cache.get(record2) == record2 - assert cache.get(r.DNSEntry('a', const._TYPE_A, const._CLASS_IN)) == record2 - assert cache.get(r.DNSEntry('a', const._TYPE_AAAA, const._CLASS_IN)) == record3 - assert cache.get(r.DNSEntry('notthere', const._TYPE_A, const._CLASS_IN)) is None + assert cache.get(r.DNSEntry("a", const._TYPE_A, const._CLASS_IN)) == record2 + assert cache.get(r.DNSEntry("a", const._TYPE_AAAA, const._CLASS_IN)) == record3 + assert cache.get(r.DNSEntry("notthere", const._TYPE_A, const._CLASS_IN)) is None def test_get_by_details(self): - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert cache.get_by_details('a', const._TYPE_A, const._CLASS_IN) == record2 + assert cache.get_by_details("a", const._TYPE_A, const._CLASS_IN) == record2 def test_get_all_by_details(self): - record1 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'a') - record2 = r.DNSAddress('a', const._TYPE_A, const._CLASS_IN, 1, b'b') + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a") + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"b") cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.get_all_by_details('a', const._TYPE_A, const._CLASS_IN)) == {record1, record2} + assert set(cache.get_all_by_details("a", const._TYPE_A, const._CLASS_IN)) == { + record1, + record2, + } def test_entries_with_server(self): record1 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 85, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 85, + "ab", ) record2 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.entries_with_server('ab')) == {record1, record2} - assert set(cache.entries_with_server('AB')) == {record1, record2} + assert set(cache.entries_with_server("ab")) == {record1, record2} + assert set(cache.entries_with_server("AB")) == {record1, record2} def test_entries_with_name(self): record1 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 85, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 85, + "ab", ) record2 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert set(cache.entries_with_name('irrelevant')) == {record1, record2} - assert set(cache.entries_with_name('Irrelevant')) == {record1, record2} + assert set(cache.entries_with_name("irrelevant")) == {record1, record2} + assert set(cache.entries_with_name("Irrelevant")) == {record1, record2} def test_current_entry_with_name_and_alias(self): record1 = r.DNSPointer( - 'irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, 'x.irrelevant' + "irrelevant", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "x.irrelevant", ) record2 = r.DNSPointer( - 'irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, 'y.irrelevant' + "irrelevant", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "y.irrelevant", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert cache.current_entry_with_name_and_alias('irrelevant', 'x.irrelevant') == record1 + assert ( + cache.current_entry_with_name_and_alias("irrelevant", "x.irrelevant") + == record1 + ) def test_name(self): record1 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 85, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 85, + "ab", ) record2 = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert cache.names() == ['irrelevant'] + assert cache.names() == ["irrelevant"] diff --git a/tests/test_core.py b/tests/test_core.py index de4b2ef5..10545357 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._core """ +"""Unit tests for zeroconf._core""" import asyncio import logging @@ -30,7 +30,7 @@ from . import _clear_cache, _inject_response, _wait_for_start, has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -46,8 +46,8 @@ def teardown_module(): def threadsafe_query( - zc: 'Zeroconf', - protocol: 'AsyncListener', + zc: "Zeroconf", + protocol: "AsyncListener", msg: DNSIncoming, addr: str, port: int, @@ -88,34 +88,44 @@ def test_close_multiple_times(self): rv.close() rv.close() - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_launch_and_close_v4_v6(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.All) rv.close() - rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All) + rv = r.Zeroconf( + interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All + ) rv.close() - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_launch_and_close_v6_only(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.V6Only) rv.close() - rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only) + rv = r.Zeroconf( + interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only + ) rv.close() - @unittest.skipIf(sys.platform == 'darwin', reason="apple_p2p failure path not testable on mac") + @unittest.skipIf( + sys.platform == "darwin", reason="apple_p2p failure path not testable on mac" + ) def test_launch_and_close_apple_p2p_not_mac(self): with pytest.raises(RuntimeError): r.Zeroconf(apple_p2p=True) - @unittest.skipIf(sys.platform != 'darwin', reason="apple_p2p happy path only testable on mac") + @unittest.skipIf( + sys.platform != "darwin", reason="apple_p2p happy path only testable on mac" + ) def test_launch_and_close_apple_p2p_on_mac(self): rv = r.Zeroconf(apple_p2p=True) rv.close() def test_async_updates_from_response(self): - def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: + def mock_incoming_msg( + service_state_change: r.ServiceStateChange, + ) -> r.DNSIncoming: ttl = 120 generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -136,7 +146,10 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi ttl = 0 generated.add_answer_at_time( - r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0 + r.DNSPointer( + service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name + ), + 0, ) generated.add_answer_at_time( r.DNSService( @@ -153,7 +166,11 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi ) generated.add_answer_at_time( r.DNSText( - service_name, const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, ttl, service_text + service_name, + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + ttl, + service_text, ), 0, ) @@ -170,7 +187,9 @@ def mock_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncomi return r.DNSIncoming(generated.packets()[0]) - def mock_split_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNSIncoming: + def mock_split_incoming_msg( + service_state_change: r.ServiceStateChange, + ) -> r.DNSIncoming: """Mock an incoming message for the case where the packet is split.""" ttl = 120 generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -199,21 +218,27 @@ def mock_split_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNS ) return r.DNSIncoming(generated.packets()[0]) - service_name = 'name._type._tcp.local.' - service_type = '_type._tcp.local.' - service_server = 'ash-2.local.' - service_text = b'path=/~paulsm/' - service_address = '10.0.1.2' + service_name = "name._type._tcp.local." + service_type = "_type._tcp.local." + service_server = "ash-2.local." + service_text = b"path=/~paulsm/" + service_address = "10.0.1.2" - zeroconf = r.Zeroconf(interfaces=['127.0.0.1']) + zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) try: # service added _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Added)) - dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) + dns_text = zeroconf.cache.get_by_details( + service_name, const._TYPE_TXT, const._CLASS_IN + ) assert dns_text is not None - assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~paulsm/' - all_dns_text = zeroconf.cache.get_all_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) + assert ( + cast(r.DNSText, dns_text).text == service_text + ) # service_text is b'path=/~paulsm/' + all_dns_text = zeroconf.cache.get_all_by_details( + service_name, const._TYPE_TXT, const._CLASS_IN + ) assert [dns_text] == all_dns_text # https://tools.ietf.org/html/rfc6762#section-10.2 @@ -225,25 +250,37 @@ def mock_split_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNS time.sleep(1.1) # service updated. currently only text record can be updated - service_text = b'path=/~humingchun/' + service_text = b"path=/~humingchun/" _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) - dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) + dns_text = zeroconf.cache.get_by_details( + service_name, const._TYPE_TXT, const._CLASS_IN + ) assert dns_text is not None - assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~humingchun/' + assert ( + cast(r.DNSText, dns_text).text == service_text + ) # service_text is b'path=/~humingchun/' time.sleep(1.1) # The split message only has a SRV and A record. # This should not evict TXT records from the cache - _inject_response(zeroconf, mock_split_incoming_msg(r.ServiceStateChange.Updated)) + _inject_response( + zeroconf, mock_split_incoming_msg(r.ServiceStateChange.Updated) + ) time.sleep(1.1) - dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) + dns_text = zeroconf.cache.get_by_details( + service_name, const._TYPE_TXT, const._CLASS_IN + ) assert dns_text is not None - assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~humingchun/' + assert ( + cast(r.DNSText, dns_text).text == service_text + ) # service_text is b'path=/~humingchun/' # service removed _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Removed)) - dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) + dns_text = zeroconf.cache.get_by_details( + service_name, const._TYPE_TXT, const._CLASS_IN + ) assert dns_text is not None assert dns_text.is_expired(current_time_millis() + 1000) @@ -254,12 +291,19 @@ def mock_split_incoming_msg(service_state_change: r.ServiceStateChange) -> r.DNS def test_generate_service_query_set_qu_bit(): """Test generate_service_query sets the QU bit.""" - zeroconf_registrar = Zeroconf(interfaces=['127.0.0.1']) - desc = {'path': '/~paulsm/'} + zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) + desc = {"path": "/~paulsm/"} type_ = "._hap._tcp.local." registration_name = "this-host-is-not-used._hap._tcp.local." info = r.ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) out = zeroconf_registrar.generate_service_query(info) assert out.questions[0].unicast is True @@ -268,10 +312,10 @@ def test_generate_service_query_set_qu_bit(): def test_invalid_packets_ignored_and_does_not_cause_loop_exception(): """Ensure an invalid packet cannot cause the loop to collapse.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) generated = r.DNSOutgoing(0) packet = generated.packets()[0] - packet = packet[:8] + b'deadbeef' + packet[8:] + packet = packet[:8] + b"deadbeef" + packet[8:] parsed = r.DNSIncoming(packet) assert parsed.valid is False @@ -291,7 +335,7 @@ def test_invalid_packets_ignored_and_does_not_cause_loop_exception(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ) assert isinstance(entry, r.DNSText) assert isinstance(entry, r.DNSRecord) @@ -306,14 +350,21 @@ def test_invalid_packets_ignored_and_does_not_cause_loop_exception(): def test_goodbye_all_services(): """Verify generating the goodbye query does not change with time.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) out = zc.generate_unregister_all_services() assert out is None type_ = "_http._tcp.local." registration_name = "xxxyyy.%s" % type_ - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = r.ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) out = zc.generate_unregister_all_services() @@ -337,18 +388,18 @@ def test_register_service_with_custom_ttl(): """Test a registering a service with a custom ttl.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_homeassistant._tcp.local." name = "MyTestHome" info_service = r.ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-90.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -364,58 +415,65 @@ def test_logging_packets(caplog): """Test packets are only logged with debug logging.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_logging._tcp.local." name = "TLD" info_service = r.ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-90.local.", addresses=[socket.inet_aton("10.0.1.2")], ) - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) caplog.clear() zc.register_service(info_service, ttl=3000) assert "Sending to" in caplog.text record = zc.cache.get(info_service.dns_pointer()) assert record is not None assert record.ttl == 3000 - logging.getLogger('zeroconf').setLevel(logging.INFO) + logging.getLogger("zeroconf").setLevel(logging.INFO) caplog.clear() zc.unregister_service(info_service) assert "Sending to" not in caplog.text - logging.getLogger('zeroconf').setLevel(logging.DEBUG) + logging.getLogger("zeroconf").setLevel(logging.DEBUG) zc.close() def test_get_service_info_failure_path(): """Verify get_service_info return None when the underlying call returns False.""" - zc = Zeroconf(interfaces=['127.0.0.1']) - assert zc.get_service_info("_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10) is None + zc = Zeroconf(interfaces=["127.0.0.1"]) + assert ( + zc.get_service_info( + "_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10 + ) + is None + ) zc.close() def test_sending_unicast(): """Test sending unicast response.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) entry = r.DNSText( "didnotcrashincoming._crash._tcp.local.", const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ) generated.add_answer_at_time(entry, 0) - zc.send(generated, "2001:db8::1", const._MDNS_PORT) # https://www.iana.org/go/rfc3849 + zc.send( + generated, "2001:db8::1", const._MDNS_PORT + ) # https://www.iana.org/go/rfc3849 time.sleep(0.2) assert zc.cache.get(entry) is None @@ -437,7 +495,7 @@ def test_sending_unicast(): def test_tc_bit_defers(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) _wait_for_start(zc) type_ = "_tcbitdefer._tcp.local." name = "knownname" @@ -448,19 +506,40 @@ def test_tc_bit_defers(): registration2_name = f"{name2}.{type_}" registration3_name = f"{name3}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." server_name2 = "ash-3.local." server_name3 = "ash-4.local." info = r.ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = r.ServiceInfo( - type_, registration2_name, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration2_name, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) info3 = r.ServiceInfo( - type_, registration3_name, 80, 0, 0, desc, server_name3, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration3_name, + 80, + 0, + 0, + desc, + server_name3, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) zc.registry.async_add(info2) @@ -481,7 +560,7 @@ def test_tc_bit_defers(): packets = generated.packets() assert len(packets) == 4 expected_deferred = [] - source_ip = '203.0.113.13' + source_ip = "203.0.113.13" next_packet = r.DNSIncoming(packets.pop(0)) expected_deferred.append(next_packet) @@ -516,7 +595,7 @@ def test_tc_bit_defers(): def test_tc_bit_defers_last_response_missing(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) _wait_for_start(zc) type_ = "_knowndefer._tcp.local." name = "knownname" @@ -527,19 +606,40 @@ def test_tc_bit_defers_last_response_missing(): registration2_name = f"{name2}.{type_}" registration3_name = f"{name3}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." server_name2 = "ash-3.local." server_name3 = "ash-4.local." info = r.ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = r.ServiceInfo( - type_, registration2_name, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration2_name, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) info3 = r.ServiceInfo( - type_, registration3_name, 80, 0, 0, desc, server_name3, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration3_name, + 80, + 0, + 0, + desc, + server_name3, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) zc.registry.async_add(info2) @@ -548,7 +648,7 @@ def test_tc_bit_defers_last_response_missing(): protocol = zc.engine.protocols[0] now = r.current_time_millis() _clear_cache(zc) - source_ip = '203.0.113.12' + source_ip = "203.0.113.12" generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(type_, const._TYPE_PTR, const._CLASS_IN) @@ -620,7 +720,7 @@ async def test_open_close_twice_from_async() -> None: version they won't yield with an await like async_close we don't have much choice but to force things down. """ - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) zc.close() zc.close() await asyncio.sleep(0) @@ -631,8 +731,8 @@ async def test_multiple_sync_instances_stared_from_async_close(): """Test we can shutdown multiple sync instances from async.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) - zc2 = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) + zc2 = Zeroconf(interfaces=["127.0.0.1"]) assert zc.loop is not None assert zc2.loop is not None @@ -642,7 +742,7 @@ async def test_multiple_sync_instances_stared_from_async_close(): zc2.close() assert zc2.loop.is_running() - zc3 = Zeroconf(interfaces=['127.0.0.1']) + zc3 = Zeroconf(interfaces=["127.0.0.1"]) assert zc3.loop == zc2.loop zc3.close() @@ -655,18 +755,18 @@ def test_shutdown_while_register_in_process(): """Test we can shutdown while registering a service in another thread.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_homeassistant._tcp.local." name = "MyTestHome" info_service = r.ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-90.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -683,12 +783,14 @@ def _background_register(): @pytest.mark.asyncio -@unittest.skipIf(sys.version_info[:3][1] < 8, 'Requires Python 3.8 or later to patch _async_setup') +@unittest.skipIf( + sys.version_info[:3][1] < 8, "Requires Python 3.8 or later to patch _async_setup" +) @patch("zeroconf._core._STARTUP_TIMEOUT", 0) @patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): """Test we raise NotRunningException when waiting for startup that times out.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) with pytest.raises(NotRunningException): await aiozc.zeroconf.async_wait_for_start() assert aiozc.zeroconf.started is False diff --git a/tests/test_dns.py b/tests/test_dns.py index 05562135..b4ac6f88 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._dns. """ +"""Unit tests for zeroconf._dns.""" import logging import os @@ -17,7 +17,7 @@ from . import has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -36,50 +36,71 @@ class TestDunder(unittest.TestCase): def test_dns_text_repr(self): # There was an issue on Python 3 that prevented DNSText's repr # from working when the text was longer than 10 bytes - text = DNSText('irrelevant', 0, 0, 0, b'12345678901') + text = DNSText("irrelevant", 0, 0, 0, b"12345678901") repr(text) - text = DNSText('irrelevant', 0, 0, 0, b'123') + text = DNSText("irrelevant", 0, 0, 0, b"123") repr(text) def test_dns_hinfo_repr_eq(self): - hinfo = DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu', 'os') + hinfo = DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "os") assert hinfo == hinfo repr(hinfo) def test_dns_pointer_repr(self): - pointer = r.DNSPointer('irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '123') + pointer = r.DNSPointer( + "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" + ) repr(pointer) - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_dns_address_repr(self): - address = r.DNSAddress('irrelevant', const._TYPE_SOA, const._CLASS_IN, 1, b'a') + address = r.DNSAddress("irrelevant", const._TYPE_SOA, const._CLASS_IN, 1, b"a") assert repr(address).endswith("b'a'") address_ipv4 = r.DNSAddress( - 'irrelevant', const._TYPE_SOA, const._CLASS_IN, 1, socket.inet_pton(socket.AF_INET, '127.0.0.1') + "irrelevant", + const._TYPE_SOA, + const._CLASS_IN, + 1, + socket.inet_pton(socket.AF_INET, "127.0.0.1"), ) - assert repr(address_ipv4).endswith('127.0.0.1') + assert repr(address_ipv4).endswith("127.0.0.1") address_ipv6 = r.DNSAddress( - 'irrelevant', const._TYPE_SOA, const._CLASS_IN, 1, socket.inet_pton(socket.AF_INET6, '::1') + "irrelevant", + const._TYPE_SOA, + const._CLASS_IN, + 1, + socket.inet_pton(socket.AF_INET6, "::1"), ) - assert repr(address_ipv6).endswith('::1') + assert repr(address_ipv6).endswith("::1") def test_dns_question_repr(self): - question = r.DNSQuestion('irrelevant', const._TYPE_SRV, const._CLASS_IN | const._CLASS_UNIQUE) + question = r.DNSQuestion( + "irrelevant", const._TYPE_SRV, const._CLASS_IN | const._CLASS_UNIQUE + ) repr(question) assert not question != question def test_dns_service_repr(self): service = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'a' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "a", ) repr(service) def test_dns_record_abc(self): - record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) + record = r.DNSRecord( + "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL + ) self.assertRaises(r.AbstractMethodException, record.__eq__, record) with pytest.raises((r.AbstractMethodException, TypeError)): record.write(None) # type: ignore[arg-type] @@ -87,11 +108,19 @@ def test_dns_record_abc(self): def test_dns_record_reset_ttl(self): start = r.current_time_millis() record = r.DNSRecord( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, created=start + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + created=start, ) later = start + 1000 record2 = r.DNSRecord( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, created=later + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + created=later, ) now = r.current_time_millis() @@ -114,7 +143,7 @@ def test_service_info_dunder(self): 80, 0, 0, - b'', + b"", "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -142,14 +171,14 @@ def test_dns_outgoing_repr(self): repr(dns_outgoing) def test_dns_record_is_expired(self): - record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, 8) + record = r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, 8) now = current_time_millis() assert record.is_expired(now) is False assert record.is_expired(now + (8 / 2 * 1000)) is False assert record.is_expired(now + (8 * 1000)) is True def test_dns_record_is_stale(self): - record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, 8) + record = r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, 8) now = current_time_millis() assert record.is_stale(now) is False assert record.is_stale(now + (8 / 4.1 * 1000)) is False @@ -158,7 +187,7 @@ def test_dns_record_is_stale(self): def test_dns_record_is_recent(self): now = current_time_millis() - record = r.DNSRecord('irrelevant', const._TYPE_SRV, const._CLASS_IN, 8) + record = r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, 8) assert record.is_recent(now + (8 / 4.2 * 1000)) is True assert record.is_recent(now + (8 / 3 * 1000)) is False assert record.is_recent(now + (8 / 2 * 1000)) is False @@ -168,8 +197,8 @@ def test_dns_record_is_recent(self): def test_dns_question_hashablity(): """Test DNSQuestions are hashable.""" - record1 = r.DNSQuestion('irrelevant', const._TYPE_A, const._CLASS_IN) - record2 = r.DNSQuestion('irrelevant', const._TYPE_A, const._CLASS_IN) + record1 = r.DNSQuestion("irrelevant", const._TYPE_A, const._CLASS_IN) + record2 = r.DNSQuestion("irrelevant", const._TYPE_A, const._CLASS_IN) record_set = {record1, record2} assert len(record_set) == 1 @@ -177,14 +206,14 @@ def test_dns_question_hashablity(): record_set.add(record1) assert len(record_set) == 1 - record3_dupe = r.DNSQuestion('irrelevant', const._TYPE_A, const._CLASS_IN) + record3_dupe = r.DNSQuestion("irrelevant", const._TYPE_A, const._CLASS_IN) assert record2 == record3_dupe assert record2.__hash__() == record3_dupe.__hash__() record_set.add(record3_dupe) assert len(record_set) == 1 - record4_dupe = r.DNSQuestion('notsame', const._TYPE_A, const._CLASS_IN) + record4_dupe = r.DNSQuestion("notsame", const._TYPE_A, const._CLASS_IN) assert record2 != record4_dupe assert record2.__hash__() != record4_dupe.__hash__() @@ -196,8 +225,12 @@ def test_dns_record_hashablity_does_not_consider_ttl(): """Test DNSRecord are hashable.""" # Verify the TTL is not considered in the hash - record1 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b'same') - record2 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b'same') + record1 = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same" + ) + record2 = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same" + ) record_set = {record1, record2} assert len(record_set) == 1 @@ -205,7 +238,9 @@ def test_dns_record_hashablity_does_not_consider_ttl(): record_set.add(record1) assert len(record_set) == 1 - record3_dupe = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b'same') + record3_dupe = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same" + ) assert record2 == record3_dupe assert record2.__hash__() == record3_dupe.__hash__() @@ -218,9 +253,15 @@ def test_dns_record_hashablity_does_not_consider_unique(): # Verify the unique value is not considered in the hash record1 = r.DNSAddress( - 'irrelevant', const._TYPE_A, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, b'same' + "irrelevant", + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + b"same", + ) + record2 = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same" ) - record2 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b'same') assert record1.class_ == record2.class_ assert record1.__hash__() == record2.__hash__() @@ -230,10 +271,10 @@ def test_dns_record_hashablity_does_not_consider_unique(): def test_dns_address_record_hashablity(): """Test DNSAddress are hashable.""" - address1 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 1, b'a') - address2 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 1, b'b') - address3 = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 1, b'c') - address4 = r.DNSAddress('irrelevant', const._TYPE_AAAA, const._CLASS_IN, 1, b'c') + address1 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 1, b"a") + address2 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 1, b"b") + address3 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 1, b"c") + address4 = r.DNSAddress("irrelevant", const._TYPE_AAAA, const._CLASS_IN, 1, b"c") record_set = {address1, address2, address3, address4} assert len(record_set) == 4 @@ -241,7 +282,7 @@ def test_dns_address_record_hashablity(): record_set.add(address1) assert len(record_set) == 4 - address3_dupe = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 1, b'c') + address3_dupe = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 1, b"c") record_set.add(address3_dupe) assert len(record_set) == 4 @@ -254,8 +295,8 @@ def test_dns_address_record_hashablity(): def test_dns_hinfo_record_hashablity(): """Test DNSHinfo are hashable.""" - hinfo1 = r.DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu1', 'os') - hinfo2 = r.DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu2', 'os') + hinfo1 = r.DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu1", "os") + hinfo2 = r.DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu2", "os") record_set = {hinfo1, hinfo2} assert len(record_set) == 2 @@ -263,7 +304,7 @@ def test_dns_hinfo_record_hashablity(): record_set.add(hinfo1) assert len(record_set) == 2 - hinfo2_dupe = r.DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu2', 'os') + hinfo2_dupe = r.DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu2", "os") assert hinfo2 == hinfo2_dupe assert hinfo2.__hash__() == hinfo2_dupe.__hash__() @@ -273,8 +314,12 @@ def test_dns_hinfo_record_hashablity(): def test_dns_pointer_record_hashablity(): """Test DNSPointer are hashable.""" - ptr1 = r.DNSPointer('irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '123') - ptr2 = r.DNSPointer('irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '456') + ptr1 = r.DNSPointer( + "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" + ) + ptr2 = r.DNSPointer( + "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456" + ) record_set = {ptr1, ptr2} assert len(record_set) == 2 @@ -282,7 +327,9 @@ def test_dns_pointer_record_hashablity(): record_set.add(ptr1) assert len(record_set) == 2 - ptr2_dupe = r.DNSPointer('irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '456') + ptr2_dupe = r.DNSPointer( + "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456" + ) assert ptr2 == ptr2 assert ptr2.__hash__() == ptr2_dupe.__hash__() @@ -292,18 +339,26 @@ def test_dns_pointer_record_hashablity(): def test_dns_pointer_comparison_is_case_insensitive(): """Test DNSPointer comparison is case insensitive.""" - ptr1 = r.DNSPointer('irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '123') - ptr2 = r.DNSPointer('irrelevant'.upper(), const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, '123') + ptr1 = r.DNSPointer( + "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" + ) + ptr2 = r.DNSPointer( + "irrelevant".upper(), + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "123", + ) assert ptr1 == ptr2 def test_dns_text_record_hashablity(): """Test DNSText are hashable.""" - text1 = r.DNSText('irrelevant', 0, 0, const._DNS_OTHER_TTL, b'12345678901') - text2 = r.DNSText('irrelevant', 1, 0, const._DNS_OTHER_TTL, b'12345678901') - text3 = r.DNSText('irrelevant', 0, 1, const._DNS_OTHER_TTL, b'12345678901') - text4 = r.DNSText('irrelevant', 0, 0, const._DNS_OTHER_TTL, b'ABCDEFGHIJK') + text1 = r.DNSText("irrelevant", 0, 0, const._DNS_OTHER_TTL, b"12345678901") + text2 = r.DNSText("irrelevant", 1, 0, const._DNS_OTHER_TTL, b"12345678901") + text3 = r.DNSText("irrelevant", 0, 1, const._DNS_OTHER_TTL, b"12345678901") + text4 = r.DNSText("irrelevant", 0, 0, const._DNS_OTHER_TTL, b"ABCDEFGHIJK") record_set = {text1, text2, text3, text4} @@ -312,7 +367,7 @@ def test_dns_text_record_hashablity(): record_set.add(text1) assert len(record_set) == 4 - text1_dupe = r.DNSText('irrelevant', 0, 0, const._DNS_OTHER_TTL, b'12345678901') + text1_dupe = r.DNSText("irrelevant", 0, 0, const._DNS_OTHER_TTL, b"12345678901") assert text1 == text1_dupe assert text1.__hash__() == text1_dupe.__hash__() @@ -322,10 +377,46 @@ def test_dns_text_record_hashablity(): def test_dns_service_record_hashablity(): """Test DNSService are hashable.""" - srv1 = r.DNSService('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'a') - srv2 = r.DNSService('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 1, 80, 'a') - srv3 = r.DNSService('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 81, 'a') - srv4 = r.DNSService('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'ab') + srv1 = r.DNSService( + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "a", + ) + srv2 = r.DNSService( + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 1, + 80, + "a", + ) + srv3 = r.DNSService( + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 81, + "a", + ) + srv4 = r.DNSService( + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "ab", + ) record_set = {srv1, srv2, srv3, srv4} @@ -335,7 +426,14 @@ def test_dns_service_record_hashablity(): assert len(record_set) == 4 srv1_dupe = r.DNSService( - 'irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'a' + "irrelevant", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "a", ) assert srv1 == srv1_dupe assert srv1.__hash__() == srv1_dupe.__hash__() @@ -347,21 +445,42 @@ def test_dns_service_record_hashablity(): def test_dns_service_server_key(): """Test DNSService server_key is lowercase.""" srv1 = r.DNSService( - 'X._tcp._http.local.', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'X.local.' + "X._tcp._http.local.", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "X.local.", ) - assert srv1.name == 'X._tcp._http.local.' - assert srv1.key == 'x._tcp._http.local.' - assert srv1.server == 'X.local.' - assert srv1.server_key == 'x.local.' + assert srv1.name == "X._tcp._http.local." + assert srv1.key == "x._tcp._http.local." + assert srv1.server == "X.local." + assert srv1.server_key == "x.local." def test_dns_service_server_comparison_is_case_insensitive(): """Test DNSService server comparison is case insensitive.""" srv1 = r.DNSService( - 'X._tcp._http.local.', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'X.local.' + "X._tcp._http.local.", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "X.local.", ) srv2 = r.DNSService( - 'X._tcp._http.local.', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 0, 0, 80, 'x.local.' + "X._tcp._http.local.", + const._TYPE_SRV, + const._CLASS_IN, + const._DNS_HOST_TTL, + 0, + 0, + 80, + "x.local.", ) assert srv1 == srv2 @@ -369,10 +488,20 @@ def test_dns_service_server_comparison_is_case_insensitive(): def test_dns_nsec_record_hashablity(): """Test DNSNsec are hashable.""" nsec1 = r.DNSNsec( - 'irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, 'irrelevant', [1, 2, 3] + "irrelevant", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "irrelevant", + [1, 2, 3], ) nsec2 = r.DNSNsec( - 'irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, 'irrelevant', [1, 2] + "irrelevant", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "irrelevant", + [1, 2], ) record_set = {nsec1, nsec2} @@ -382,7 +511,12 @@ def test_dns_nsec_record_hashablity(): assert len(record_set) == 2 nsec2_dupe = r.DNSNsec( - 'irrelevant', const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, 'irrelevant', [1, 2] + "irrelevant", + const._TYPE_PTR, + const._CLASS_IN, + const._DNS_OTHER_TTL, + "irrelevant", + [1, 2], ) assert nsec2 == nsec2_dupe assert nsec2.__hash__() == nsec2_dupe.__hash__() @@ -394,10 +528,14 @@ def test_dns_nsec_record_hashablity(): def test_rrset_does_not_consider_ttl(): """Test DNSRRSet does not consider the ttl in the hash.""" - longarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 100, b'same') - shortarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 10, b'same') - longaaaarec = r.DNSAddress('irrelevant', const._TYPE_AAAA, const._CLASS_IN, 100, b'same') - shortaaaarec = r.DNSAddress('irrelevant', const._TYPE_AAAA, const._CLASS_IN, 10, b'same') + longarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 100, b"same") + shortarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 10, b"same") + longaaaarec = r.DNSAddress( + "irrelevant", const._TYPE_AAAA, const._CLASS_IN, 100, b"same" + ) + shortaaaarec = r.DNSAddress( + "irrelevant", const._TYPE_AAAA, const._CLASS_IN, 10, b"same" + ) rrset = DNSRRSet([longarec, shortaaaarec]) @@ -406,10 +544,12 @@ def test_rrset_does_not_consider_ttl(): assert not rrset.suppresses(longaaaarec) assert rrset.suppresses(shortaaaarec) - verylongarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 1000, b'same') - longarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 100, b'same') - mediumarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 60, b'same') - shortarec = r.DNSAddress('irrelevant', const._TYPE_A, const._CLASS_IN, 10, b'same') + verylongarec = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, 1000, b"same" + ) + longarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 100, b"same") + mediumarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 60, b"same") + shortarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 10, b"same") rrset2 = DNSRRSet([mediumarec]) assert not rrset2.suppresses(verylongarec) diff --git a/tests/test_engine.py b/tests/test_engine.py index dc6674dd..7a10b48d 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._engine """ +"""Unit tests for zeroconf._engine""" import asyncio import itertools @@ -15,7 +15,7 @@ from zeroconf import _engine, const from zeroconf.asyncio import AsyncZeroconf -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -35,12 +35,18 @@ def teardown_module(): @pytest.mark.asyncio async def test_reaper(): with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf = aiozc.zeroconf cache = zeroconf.cache - original_entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) - record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') - record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + original_entries = list( + itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) + ) + record_with_10s_ttl = r.DNSAddress( + "a", const._TYPE_SOA, const._CLASS_IN, 10, b"a" + ) + record_with_1s_ttl = r.DNSAddress( + "a", const._TYPE_SOA, const._CLASS_IN, 1, b"b" + ) zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) now = r.current_time_millis() @@ -50,17 +56,25 @@ async def test_reaper(): const._TYPE_PTR, const._CLASS_IN, 10000, - 'known-to-other._hap._tcp.local.', + "known-to-other._hap._tcp.local.", ) } - zeroconf.question_history.add_question_at_time(question, now, other_known_answers) + zeroconf.question_history.add_question_at_time( + question, now, other_known_answers + ) assert zeroconf.question_history.suppresses(question, now, other_known_answers) - entries_with_cache = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + entries_with_cache = list( + itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) + ) await asyncio.sleep(1.2) - entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + entries = list( + itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) + ) assert zeroconf.cache.get(record_with_1s_ttl) is None await aiozc.async_close() - assert not zeroconf.question_history.suppresses(question, now, other_known_answers) + assert not zeroconf.question_history.suppresses( + question, now, other_known_answers + ) assert entries != original_entries assert entries_with_cache != original_entries assert record_with_10s_ttl in entries @@ -71,10 +85,14 @@ async def test_reaper(): async def test_reaper_aborts_when_done(): """Ensure cache cleanup stops when zeroconf is done.""" with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf = aiozc.zeroconf - record_with_10s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 10, b'a') - record_with_1s_ttl = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'b') + record_with_10s_ttl = r.DNSAddress( + "a", const._TYPE_SOA, const._CLASS_IN, 10, b"a" + ) + record_with_1s_ttl = r.DNSAddress( + "a", const._TYPE_SOA, const._CLASS_IN, 1, b"b" + ) zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) assert zeroconf.cache.get(record_with_10s_ttl) is not None assert zeroconf.cache.get(record_with_1s_ttl) is not None diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 6a37c6db..33eac2d4 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._exceptions """ +"""Unit tests for zeroconf._exceptions""" import logging import unittest @@ -10,7 +10,7 @@ import zeroconf as r from zeroconf import ServiceInfo, Zeroconf -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -30,7 +30,7 @@ class Exceptions(unittest.TestCase): @classmethod def setUpClass(cls): - cls.browser = Zeroconf(interfaces=['127.0.0.1']) + cls.browser = Zeroconf(interfaces=["127.0.0.1"]) @classmethod def tearDownClass(cls): @@ -38,62 +38,74 @@ def tearDownClass(cls): del cls.browser def test_bad_service_info_name(self): - self.assertRaises(r.BadTypeInNameException, self.browser.get_service_info, "type", "type_not") + self.assertRaises( + r.BadTypeInNameException, self.browser.get_service_info, "type", "type_not" + ) def test_bad_service_names(self): bad_names_to_try = ( - '', - 'local', - '_tcp.local.', - '_udp.local.', - '._udp.local.', - '_@._tcp.local.', - '_A@._tcp.local.', - '_x--x._tcp.local.', - '_-x._udp.local.', - '_x-._tcp.local.', - '_22._udp.local.', - '_2-2._tcp.local.', - '\x00._x._udp.local.', + "", + "local", + "_tcp.local.", + "_udp.local.", + "._udp.local.", + "_@._tcp.local.", + "_A@._tcp.local.", + "_x--x._tcp.local.", + "_-x._udp.local.", + "_x-._tcp.local.", + "_22._udp.local.", + "_2-2._tcp.local.", + "\x00._x._udp.local.", ) for name in bad_names_to_try: - self.assertRaises(r.BadTypeInNameException, self.browser.get_service_info, name, 'x.' + name) + self.assertRaises( + r.BadTypeInNameException, + self.browser.get_service_info, + name, + "x." + name, + ) def test_bad_local_names_for_get_service_info(self): bad_names_to_try = ( - 'homekitdev._nothttp._tcp.local.', - 'homekitdev._http._udp.local.', + "homekitdev._nothttp._tcp.local.", + "homekitdev._http._udp.local.", ) for name in bad_names_to_try: self.assertRaises( - r.BadTypeInNameException, self.browser.get_service_info, '_http._tcp.local.', name + r.BadTypeInNameException, + self.browser.get_service_info, + "_http._tcp.local.", + name, ) def test_good_instance_names(self): - assert r.service_type_name('.._x._tcp.local.') == '_x._tcp.local.' - assert r.service_type_name('x.y._http._tcp.local.') == '_http._tcp.local.' - assert r.service_type_name('1.2.3._mqtt._tcp.local.') == '_mqtt._tcp.local.' - assert r.service_type_name('x.sub._http._tcp.local.') == '_http._tcp.local.' + assert r.service_type_name(".._x._tcp.local.") == "_x._tcp.local." + assert r.service_type_name("x.y._http._tcp.local.") == "_http._tcp.local." + assert r.service_type_name("1.2.3._mqtt._tcp.local.") == "_mqtt._tcp.local." + assert r.service_type_name("x.sub._http._tcp.local.") == "_http._tcp.local." assert ( - r.service_type_name('6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local.') - == '_http._tcp.local.' + r.service_type_name( + "6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local." + ) + == "_http._tcp.local." ) def test_good_instance_names_without_protocol(self): good_names_to_try = ( "Rachio-C73233.local.", - 'YeelightColorBulb-3AFD.local.', - 'YeelightTunableBulb-7220.local.', + "YeelightColorBulb-3AFD.local.", + "YeelightTunableBulb-7220.local.", "AlexanderHomeAssistant 74651D.local.", - 'iSmartGate-152.local.', - 'MyQ-FGA.local.', - 'lutron-02c4392a.local.', - 'WICED-hap-3E2734.local.', - 'MyHost.local.', - 'MyHost.sub.local.', + "iSmartGate-152.local.", + "MyQ-FGA.local.", + "lutron-02c4392a.local.", + "WICED-hap-3E2734.local.", + "MyHost.local.", + "MyHost.sub.local.", ) for name in good_names_to_try: - assert r.service_type_name(name, strict=False) == 'local.' + assert r.service_type_name(name, strict=False) == "local." for name in good_names_to_try: # Raises without strict=False @@ -101,48 +113,51 @@ def test_good_instance_names_without_protocol(self): def test_bad_types(self): bad_names_to_try = ( - '._x._tcp.local.', - 'a' * 64 + '._sub._http._tcp.local.', - 'a' * 62 + 'â._sub._http._tcp.local.', + "._x._tcp.local.", + "a" * 64 + "._sub._http._tcp.local.", + "a" * 62 + "â._sub._http._tcp.local.", ) for name in bad_names_to_try: self.assertRaises(r.BadTypeInNameException, r.service_type_name, name) def test_bad_sub_types(self): bad_names_to_try = ( - '_sub._http._tcp.local.', - '._sub._http._tcp.local.', - '\x7f._sub._http._tcp.local.', - '\x1f._sub._http._tcp.local.', + "_sub._http._tcp.local.", + "._sub._http._tcp.local.", + "\x7f._sub._http._tcp.local.", + "\x1f._sub._http._tcp.local.", ) for name in bad_names_to_try: self.assertRaises(r.BadTypeInNameException, r.service_type_name, name) def test_good_service_names(self): good_names_to_try = ( - ('_x._tcp.local.', '_x._tcp.local.'), - ('_x._udp.local.', '_x._udp.local.'), - ('_12345-67890-abc._udp.local.', '_12345-67890-abc._udp.local.'), - ('x._sub._http._tcp.local.', '_http._tcp.local.'), - ('a' * 63 + '._sub._http._tcp.local.', '_http._tcp.local.'), - ('a' * 61 + 'â._sub._http._tcp.local.', '_http._tcp.local.'), + ("_x._tcp.local.", "_x._tcp.local."), + ("_x._udp.local.", "_x._udp.local."), + ("_12345-67890-abc._udp.local.", "_12345-67890-abc._udp.local."), + ("x._sub._http._tcp.local.", "_http._tcp.local."), + ("a" * 63 + "._sub._http._tcp.local.", "_http._tcp.local."), + ("a" * 61 + "â._sub._http._tcp.local.", "_http._tcp.local."), ) for name, result in good_names_to_try: assert r.service_type_name(name) == result - assert r.service_type_name('_one_two._tcp.local.', strict=False) == '_one_two._tcp.local.' + assert ( + r.service_type_name("_one_two._tcp.local.", strict=False) + == "_one_two._tcp.local." + ) def test_invalid_addresses(self): type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - bad = (b'127.0.0.1', b'::1') + bad = (b"127.0.0.1", b"::1") for addr in bad: self.assertRaisesRegex( TypeError, - 'Addresses must either ', + "Addresses must either ", ServiceInfo, type_, registration_name, diff --git a/tests/test_handlers.py b/tests/test_handlers.py index a13824e0..e2e69aea 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._handlers """ +"""Unit tests for zeroconf._handlers""" import asyncio import logging @@ -26,7 +26,7 @@ from . import _clear_cache, _inject_response, has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -44,14 +44,14 @@ def teardown_module(): class TestRegistrar(unittest.TestCase): def test_ttl(self): # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( type_, registration_name, @@ -103,12 +103,16 @@ def _process_outgoing_packet(out): query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_SRV, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_TXT, const._CLASS_IN)) - query.add_question(r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN)) + query.add_question( + r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN) + ) question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) assert question_answers - _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) + _process_outgoing_packet( + construct_outgoing_multicast_answers(question_answers.mcast_aggregate) + ) # The additonals should all be suppresed since they are all in the answers section # There will be one NSEC additional to indicate the lack of AAAA record @@ -142,12 +146,16 @@ def _process_outgoing_packet(out): query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_SRV, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_TXT, const._CLASS_IN)) - query.add_question(r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN)) + query.add_question( + r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN) + ) question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) assert question_answers - _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) + _process_outgoing_packet( + construct_outgoing_multicast_answers(question_answers.mcast_aggregate) + ) # There will be one NSEC additional to indicate the lack of AAAA record assert nbr_answers == 4 and nbr_additionals == 1 and nbr_authorities == 0 @@ -164,7 +172,7 @@ def _process_outgoing_packet(out): def test_name_conflicts(self): # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_homeassistant._tcp.local." name = "Home" registration_name = f"{name}.{type_}" @@ -193,7 +201,7 @@ def test_name_conflicts(self): def test_register_and_lookup_type_by_uppercase_name(self): # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_mylowertype._tcp.local." name = "Home" registration_name = f"{name}.{type_}" @@ -225,16 +233,23 @@ def test_register_and_lookup_type_by_uppercase_name(self): def test_ptr_optimization(): # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) # register @@ -289,18 +304,20 @@ def test_ptr_optimization(): zc.close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_any_query_for_ptr(): """Test that queries for ANY will return PTR records and the response is aggregated.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_anyptr._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] + ) zc.registry.async_add(info) _clear_cache(zc) @@ -308,7 +325,9 @@ def test_any_query_for_ptr(): question = r.DNSQuestion(type_, const._TYPE_ANY, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers mcast_answers = list(question_answers.mcast_aggregate) assert mcast_answers[0].name == type_ @@ -318,25 +337,29 @@ def test_any_query_for_ptr(): zc.close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_aaaa_query(): """Test that queries for AAAA records work and should respond right away.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_knownaaaservice._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] + ) zc.registry.async_add(info) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(server_name, const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] @@ -345,25 +368,29 @@ def test_aaaa_query(): zc.close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_aaaa_query_upper_case(): """Test that queries for AAAA records work and should respond right away with an upper case name.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_knownaaaservice._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) + info = ServiceInfo( + type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] + ) zc.registry.async_add(info) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(server_name.upper(), const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] @@ -372,20 +399,27 @@ def test_aaaa_query_upper_case(): zc.close() -@unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') -@unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_a_and_aaaa_record_fate_sharing(): """Test that queries for AAAA always return A records in the additionals and should respond right away.""" - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_a-and-aaaa-service._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") ipv4_address = socket.inet_aton("10.0.1.2") info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address, ipv4_address] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[ipv6_address, ipv4_address], ) aaaa_record = info.dns_addresses(version=r.IPVersion.V6Only)[0] a_record = info.dns_addresses(version=r.IPVersion.V4Only)[0] @@ -397,7 +431,9 @@ def test_a_and_aaaa_record_fate_sharing(): question = r.DNSQuestion(server_name, const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert aaaa_record in question_answers.mcast_now @@ -410,7 +446,9 @@ def test_a_and_aaaa_record_fate_sharing(): question = r.DNSQuestion(server_name, const._TYPE_A, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert a_record in question_answers.mcast_now @@ -426,15 +464,22 @@ def test_a_and_aaaa_record_fate_sharing(): def test_unicast_response(): """Ensure we send a unicast response when the source port is not the MDNS port.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) # register zc.registry.async_add(info) @@ -478,15 +523,22 @@ def test_unicast_response(): async def test_probe_answered_immediately(): """Verify probes are responded to immediately.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -522,15 +574,22 @@ async def test_probe_answered_immediately(): async def test_probe_answered_immediately_with_uppercase_name(): """Verify probes are responded to immediately with an uppercase name.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -565,7 +624,7 @@ async def test_probe_answered_immediately_with_uppercase_name(): def test_qu_response(): """Handle multicast incoming with the QU bit set.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # service definition type_ = "_test-srvc-type._tcp.local." @@ -573,9 +632,16 @@ def test_qu_response(): name = "xxxyyy" registration_name = f"{name}.{type_}" registration_name2 = f"{name}.{other_type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( other_type_, @@ -658,7 +724,12 @@ def _validate_complete_response(answers): _validate_complete_response(question_answers.mcast_now) _inject_response( - zc, r.DNSIncoming(construct_outgoing_multicast_answers(question_answers.mcast_now).packets()[0]) + zc, + r.DNSIncoming( + construct_outgoing_multicast_answers(question_answers.mcast_now).packets()[ + 0 + ] + ), ) # With the cache repopulated; should respond to only unicast when the answer has been recently multicast query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -680,14 +751,21 @@ def _validate_complete_response(answers): def test_known_answer_supression(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_knownanswersv8._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) @@ -698,7 +776,9 @@ def test_known_answer_supression(): question = r.DNSQuestion(type_, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -710,7 +790,9 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_pointer(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -722,7 +804,9 @@ def test_known_answer_supression(): question = r.DNSQuestion(server_name, const._TYPE_A, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert question_answers.mcast_now @@ -735,7 +819,9 @@ def test_known_answer_supression(): for dns_address in info.dns_addresses(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -749,7 +835,9 @@ def test_known_answer_supression(): for dns_address in info.dns_addresses(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast expected_nsec_record = cast(r.DNSNsec, list(question_answers.mcast_now)[0]) @@ -763,7 +851,9 @@ def test_known_answer_supression(): question = r.DNSQuestion(registration_name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert question_answers.mcast_now @@ -775,7 +865,9 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_service(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -787,7 +879,9 @@ def test_known_answer_supression(): question = r.DNSQuestion(registration_name, const._TYPE_TXT, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -799,7 +893,9 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_text(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -812,7 +908,7 @@ def test_known_answer_supression(): def test_multi_packet_known_answer_supression(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_handlermultis._tcp.local." name = "knownname" name2 = "knownname2" @@ -822,19 +918,40 @@ def test_multi_packet_known_answer_supression(): registration2_name = f"{name2}.{type_}" registration3_name = f"{name3}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." server_name2 = "ash-3.local." server_name3 = "ash-4.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( - type_, registration2_name, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration2_name, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) info3 = ServiceInfo( - type_, registration3_name, 80, 0, 0, desc, server_name3, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration3_name, + 80, + 0, + 0, + desc, + server_name3, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) zc.registry.async_add(info2) @@ -853,7 +970,9 @@ def test_multi_packet_known_answer_supression(): generated.add_answer_at_time(info3.dns_pointer(), now) packets = generated.packets() assert len(packets) > 1 - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -867,24 +986,38 @@ def test_multi_packet_known_answer_supression(): def test_known_answer_supression_service_type_enumeration_query(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_otherknown._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) type_2 = "_otherknown2._tcp.local." name = "knownname" registration_name2 = f"{name}.{type_2}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name2 = "ash-3.local." info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info2) now = current_time_millis() @@ -892,10 +1025,14 @@ def test_known_answer_supression_service_type_enumeration_query(): # Test PTR supression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion( + const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN + ) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -903,7 +1040,9 @@ def test_known_answer_supression_service_type_enumeration_query(): assert not question_answers.mcast_aggregate_last_second generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion( + const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN + ) generated.add_question(question) generated.add_answer_at_time( r.DNSPointer( @@ -926,7 +1065,9 @@ def test_known_answer_supression_service_type_enumeration_query(): now, ) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -940,34 +1081,52 @@ def test_known_answer_supression_service_type_enumeration_query(): def test_upper_case_enumeration_query(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) type_ = "_otherknown._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) type_2 = "_otherknown2._tcp.local." name = "knownname" registration_name2 = f"{name}.{type_2}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name2 = "ash-3.local." info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info2) _clear_cache(zc) # Test PTR supression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion( + const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN + ) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -980,13 +1139,17 @@ def test_upper_case_enumeration_query(): def test_enumeration_query_with_no_registered_services(): - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) _clear_cache(zc) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion( + const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN + ) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert not question_answers # unregister zc.close() @@ -998,26 +1161,40 @@ def test_enumeration_query_with_no_registered_services(): async def test_qu_response_only_sends_additionals_if_sends_answer(): """Test that a QU response does not send additionals unless it sends the answer as well.""" # instantiate a zeroconf instance - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_addtest1._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "ash-2.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info) type_2 = "_addtest2._tcp.local." name = "knownname" registration_name2 = f"{name}.{type_2}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name2 = "ash-3.local." info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, server_name2, addresses=[socket.inet_aton("10.0.1.2")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + server_name2, + addresses=[socket.inet_aton("10.0.1.2")], ) zc.registry.async_add(info2) @@ -1028,7 +1205,9 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Add the A record to the cache with 50% ttl remaining a_record = info.dns_addresses()[0] - a_record.set_created_ttl(current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl) + a_record.set_created_ttl( + current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl + ) assert not a_record.is_recent(current_time_millis()) info._dns_address_cache = None # we are mutating the record so clear the cache zc.cache.async_add_records([a_record]) @@ -1079,7 +1258,9 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Remove the 100% PTR record and add a 50% PTR record zc.cache.async_remove_records([ptr_record]) - ptr_record.set_created_ttl(current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl) + ptr_record.set_created_ttl( + current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl + ) assert not ptr_record.is_recent(current_time_millis()) zc.cache.async_add_records([ptr_record]) # With QU should respond to only multicast since the has less @@ -1117,7 +1298,9 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question.unicast = True # Set the QU bit assert question.unicast is True query.add_question(question) - zc.cache.async_add_records([info2.dns_pointer()]) # Add 100% TTL for info2 to the cache + zc.cache.async_add_records( + [info2.dns_pointer()] + ) # Add 100% TTL for info2 to the cache question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False @@ -1149,19 +1332,28 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): async def test_cache_flush_bit(): """Test that the cache flush bit sets the TTL to one for matching records.""" # instantiate a zeroconf instance - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_cacheflush._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "server-uu1.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) a_record = info.dns_addresses()[0] - zc.cache.async_add_records([info.dns_pointer(), a_record, info.dns_text(), info.dns_service()]) + zc.cache.async_add_records( + [info.dns_pointer(), a_record, info.dns_text(), info.dns_service()] + ) info.addresses = [socket.inet_aton("10.0.1.5"), socket.inet_aton("10.0.1.6")] new_records = info.dns_addresses() @@ -1210,7 +1402,9 @@ async def test_cache_flush_bit(): assert cached_record is not None assert cached_record.ttl == 1 - for entry in zc.cache.async_all_by_details(server_name, const._TYPE_A, const._CLASS_IN): + for entry in zc.cache.async_all_by_details( + server_name, const._TYPE_A, const._CLASS_IN + ): assert isinstance(entry, r.DNSAddress) if entry.address == fresh_address: assert entry.ttl > 1 @@ -1233,28 +1427,39 @@ async def test_cache_flush_bit(): async def test_record_update_manager_add_listener_callsback_existing_records(): """Test that the RecordUpdateManager will callback existing records.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc: Zeroconf = aiozc.zeroconf updated = [] class MyListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] + ) -> None: """Update multiple records in one shot.""" updated.extend(records) type_ = "_cacheflush._tcp.local." name = "knownname" registration_name = f"{name}.{type_}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} server_name = "server-uu1.local." info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + server_name, + addresses=[socket.inet_aton("10.0.1.2")], ) a_record = info.dns_addresses()[0] ptr_record = info.dns_pointer() - zc.cache.async_add_records([ptr_record, a_record, info.dns_text(), info.dns_service()]) + zc.cache.async_add_records( + [ptr_record, a_record, info.dns_text(), info.dns_service()] + ) listener = MyListener() @@ -1278,7 +1483,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor @pytest.mark.asyncio async def test_questions_query_handler_populates_the_question_history_from_qm_questions(): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf now = current_time_millis() _clear_cache(zc) @@ -1301,13 +1506,19 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) question.unicast = False known_answer = r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-other._hap._tcp.local.' + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + "known-to-other._hap._tcp.local.", ) generated.add_question(question) generated.add_answer_at_time(known_answer, 0) now = r.current_time_millis() packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1320,7 +1531,7 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu @pytest.mark.asyncio async def test_questions_query_handler_does_not_put_qu_questions_in_history(): - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf now = current_time_millis() _clear_cache(zc) @@ -1339,13 +1550,19 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) question.unicast = True known_answer = r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'notqu._hap._tcp.local.' + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + "notqu._hap._tcp.local.", ) generated.add_question(question) generated.add_answer_at_time(known_answer, 0) now = r.current_time_millis() packets = generated.packets() - question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) + question_answers = zc.query_handler.async_response( + [r.DNSIncoming(packet) for packet in packets], False + ) assert question_answers assert "qu._hap._tcp.local." in str(question_answers) assert not question_answers.ucast # has not multicast recently @@ -1365,7 +1582,7 @@ async def test_guard_against_low_ptr_ttl(): TTLs would will cause ServiceBrowsers to flood the network with excessive refresh queries. """ - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf # Apple uses a 15s minimum TTL, however we do not have the same # level of rate limit and safe guards so we use 1/4 of the recommended value @@ -1374,21 +1591,21 @@ async def test_guard_against_low_ptr_ttl(): const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, 2, - 'low.local.', + "low.local.", ) answer_with_normal_ttl = r.DNSPointer( "myservicelow_tcp._tcp.local.", const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'normal.local.', + "normal.local.", ) good_bye_answer = r.DNSPointer( "myservicelow_tcp._tcp.local.", const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, 0, - 'goodbye.local.', + "goodbye.local.", ) # TTL should be adjusted to a safe value response = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -1411,21 +1628,21 @@ async def test_guard_against_low_ptr_ttl(): @pytest.mark.asyncio async def test_duplicate_goodbye_answers_in_packet(): """Ensure we do not throw an exception when there are duplicate goodbye records in a packet.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf answer_with_normal_ttl = r.DNSPointer( "myservicelow_tcp._tcp.local.", const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'host.local.', + "host.local.", ) good_bye_answer = r.DNSPointer( "myservicelow_tcp._tcp.local.", const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, 0, - 'host.local.', + "host.local.", ) response = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) response.add_answer_at_time(answer_with_normal_ttl, 0) @@ -1447,7 +1664,7 @@ async def test_response_aggregation_timings(run_isolated): type_2 = "_mservice2._tcp.local." type_3 = "_mservice3._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() name = "xxxyyy" @@ -1455,15 +1672,36 @@ async def test_response_aggregation_timings(run_isolated): registration_name2 = f"{name}.{type_2}" registration_name3 = f"{name}.{type_3}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, "ash-4.local.", addresses=[socket.inet_aton("10.0.1.3")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + "ash-4.local.", + addresses=[socket.inet_aton("10.0.1.3")], ) info3 = ServiceInfo( - type_3, registration_name3, 80, 0, 0, desc, "ash-4.local.", addresses=[socket.inet_aton("10.0.1.3")] + type_3, + registration_name3, + 80, + 0, + 0, + desc, + "ash-4.local.", + addresses=[socket.inet_aton("10.0.1.3")], ) aiozc.zeroconf.registry.async_add(info) aiozc.zeroconf.registry.async_add(info2) @@ -1489,9 +1727,9 @@ async def test_response_aggregation_timings(run_isolated): protocol = zc.engine.protocols[0] with patch.object(aiozc.zeroconf, "async_send") as send_mock: - protocol.datagram_received(query.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.datagram_received(query.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.datagram_received(query.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.datagram_received(query.packets()[0], ("127.0.0.1", const._MDNS_PORT)) await asyncio.sleep(0.7) # Should aggregate into a single answer with up to a 500ms + 120ms delay @@ -1504,7 +1742,7 @@ async def test_response_aggregation_timings(run_isolated): assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() - protocol.datagram_received(query3.packets()[0], ('127.0.0.1', const._MDNS_PORT)) + protocol.datagram_received(query3.packets()[0], ("127.0.0.1", const._MDNS_PORT)) await asyncio.sleep(0.3) # Should send within 120ms since there are no other @@ -1520,7 +1758,7 @@ async def test_response_aggregation_timings(run_isolated): # Because the response was sent in the last second we need to make # sure the next answer is delayed at least a second aiozc.zeroconf.engine.protocols[0].datagram_received( - query4.packets()[0], ('127.0.0.1', const._MDNS_PORT) + query4.packets()[0], ("127.0.0.1", const._MDNS_PORT) ) await asyncio.sleep(0.5) @@ -1542,21 +1780,30 @@ async def test_response_aggregation_timings(run_isolated): @pytest.mark.asyncio -async def test_response_aggregation_timings_multiple(run_isolated, disable_duplicate_packet_suppression): +async def test_response_aggregation_timings_multiple( + run_isolated, disable_duplicate_packet_suppression +): """Verify multicast responses that are aggregated do not take longer than 620ms to send. 620ms is the maximum random delay of 120ms and 500ms additional for aggregation.""" type_2 = "_mservice2._tcp.local." - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() name = "xxxyyy" registration_name2 = f"{name}.{type_2}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, "ash-4.local.", addresses=[socket.inet_aton("10.0.1.3")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + "ash-4.local.", + addresses=[socket.inet_aton("10.0.1.3")], ) aiozc.zeroconf.registry.async_add(info2) @@ -1569,8 +1816,10 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli with patch.object(aiozc.zeroconf, "async_send") as send_mock: send_mock.reset_mock() - protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression + protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.last_time = ( + 0 # manually reset the last time to avoid duplicate packet suppression + ) await asyncio.sleep(0.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1580,8 +1829,10 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() - protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression + protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.last_time = ( + 0 # manually reset the last time to avoid duplicate packet suppression + ) await asyncio.sleep(1.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1591,10 +1842,14 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert info2.dns_pointer() in incoming.answers() send_mock.reset_mock() - protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression - protocol.datagram_received(query2.packets()[0], ('127.0.0.1', const._MDNS_PORT)) - protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression + protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.last_time = ( + 0 # manually reset the last time to avoid duplicate packet suppression + ) + protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) + protocol.last_time = ( + 0 # manually reset the last time to avoid duplicate packet suppression + ) # The delay should increase with two packets and # 900ms is beyond the maximum aggregation delay # when there is no network protection delay @@ -1636,21 +1891,56 @@ async def test_response_aggregation_random_delay(): registration_name4 = f"{name}.{type_4}" registration_name5 = f"{name}.{type_5}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-1.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-1.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.3")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.3")], ) info3 = ServiceInfo( - type_3, registration_name3, 80, 0, 0, desc, "ash-3.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_3, + registration_name3, + 80, + 0, + 0, + desc, + "ash-3.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info4 = ServiceInfo( - type_4, registration_name4, 80, 0, 0, desc, "ash-4.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_4, + registration_name4, + 80, + 0, + 0, + desc, + "ash-4.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info5 = ServiceInfo( - type_5, registration_name5, 80, 0, 0, desc, "ash-5.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_5, + registration_name5, + 80, + 0, + 0, + desc, + "ash-5.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) mocked_zc = unittest.mock.MagicMock() outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 500) @@ -1668,7 +1958,9 @@ async def test_response_aggregation_random_delay(): # The third group should always be coalesced into first group since it will always come before outgoing_queue._multicast_delay_random_min = 100 outgoing_queue._multicast_delay_random_max = 200 - outgoing_queue.async_add(now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()}) + outgoing_queue.async_add( + now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()} + ) assert len(outgoing_queue.queue) == 1 assert info.dns_pointer() in outgoing_queue.queue[0].answers @@ -1698,12 +1990,26 @@ async def test_future_answers_are_removed_on_send(): registration_name = f"{name}.{type_}" registration_name2 = f"{name}.{type_2}" - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-1.local.", addresses=[socket.inet_aton("10.0.1.2")] + type_, + registration_name, + 80, + 0, + 0, + desc, + "ash-1.local.", + addresses=[socket.inet_aton("10.0.1.2")], ) info2 = ServiceInfo( - type_2, registration_name2, 80, 0, 0, desc, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.3")] + type_2, + registration_name2, + 80, + 0, + 0, + desc, + "ash-2.local.", + addresses=[socket.inet_aton("10.0.1.3")], ) mocked_zc = unittest.mock.MagicMock() outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 0) @@ -1743,22 +2049,25 @@ async def test_future_answers_are_removed_on_send(): async def test_add_listener_warns_when_not_using_record_update_listener(caplog): """Log when a listener is added that is not using RecordUpdateListener as a base class.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc: Zeroconf = aiozc.zeroconf updated = [] class MyListener: """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] + ) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.add_listener(MyListener(), None) # type: ignore[arg-type] await asyncio.sleep(0) # flush out any call soons assert ( - "listeners passed to async_add_listener must inherit from RecordUpdateListener" in caplog.text - or "TypeError: Argument \'listener\' has incorrect type" in caplog.text + "listeners passed to async_add_listener must inherit from RecordUpdateListener" + in caplog.text + or "TypeError: Argument 'listener' has incorrect type" in caplog.text ) await aiozc.async_close() @@ -1768,7 +2077,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor async def test_async_updates_iteration_safe(): """Ensure we can safely iterate over the async_updates.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc: Zeroconf = aiozc.zeroconf updated = [] good_bye_answer = r.DNSPointer( @@ -1776,13 +2085,15 @@ async def test_async_updates_iteration_safe(): const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, 0, - 'goodbye.local.', + "goodbye.local.", ) class OtherListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] + ) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -1791,7 +2102,9 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor class ListenerThatAddsListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records( + self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] + ) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.async_add_listener(other, None) @@ -1812,7 +2125,7 @@ def async_update_records(self, zc: 'Zeroconf', now: float, records: List[r.Recor async def test_async_updates_complete_iteration_safe(): """Ensure we can safely iterate over the async_updates_complete.""" - aiozc = AsyncZeroconf(interfaces=['127.0.0.1']) + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc: Zeroconf = aiozc.zeroconf class OtherListener(r.RecordUpdateListener): diff --git a/tests/test_history.py b/tests/test_history.py index fca57be2..659e67f8 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -17,12 +17,20 @@ def test_question_suppression(): now = r.current_time_millis() other_known_answers: Set[r.DNSRecord] = { r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-other._hap._tcp.local.' + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + "known-to-other._hap._tcp.local.", ) } our_known_answers: Set[r.DNSRecord] = { r.DNSPointer( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN, 10000, 'known-to-us._hap._tcp.local.' + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN, + 10000, + "known-to-us._hap._tcp.local.", ) } @@ -55,7 +63,7 @@ def test_question_expire(): const._TYPE_PTR, const._CLASS_IN, 10000, - 'known-to-other._hap._tcp.local.', + "known-to-other._hap._tcp.local.", created=now, ) } diff --git a/tests/test_init.py b/tests/test_init.py index 1d1f7086..3ba285d5 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf.py """ +"""Unit tests for zeroconf.py""" import logging import socket @@ -15,7 +15,7 @@ from . import _inject_responses -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -34,7 +34,9 @@ class Names(unittest.TestCase): def test_long_name(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) question = r.DNSQuestion( - "this.is.a.very.long.name.with.lots.of.parts.in.it.local.", const._TYPE_SRV, const._CLASS_IN + "this.is.a.very.long.name.with.lots.of.parts.in.it.local.", + const._TYPE_SRV, + const._CLASS_IN, ) generated.add_question(question) r.DNSIncoming(generated.packets()[0]) @@ -70,11 +72,11 @@ def test_same_name(self): def test_verify_name_change_with_lots_of_names(self): # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) # create a bunch of servers type_ = "_my-service._tcp.local." - name = 'a wonderful service' + name = "a wonderful service" server_count = 300 self.generate_many_hosts(zc, type_, name, server_count) @@ -87,15 +89,15 @@ def test_large_packet_exception_log_handling(self): """Verify we downgrade debug after warning.""" # instantiate a zeroconf instance - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) - with patch('zeroconf._logger.log.warning') as mocked_log_warn, patch( - 'zeroconf._logger.log.debug' + with patch("zeroconf._logger.log.warning") as mocked_log_warn, patch( + "zeroconf._logger.log.debug" ) as mocked_log_debug: # now that we have a long packet in our possession, let's verify the # exception handling. out = r.DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) - out.data.append(b'\0' * 10000) + out.data.append(b"\0" * 10000) # mock the zeroconf logger and check for the correct logging backoff call_counts = mocked_log_warn.call_count, mocked_log_debug.call_count @@ -112,7 +114,7 @@ def test_large_packet_exception_log_handling(self): zc.send(out, const._MDNS_ADDR, const._MDNS_PORT) time.sleep(0.3) r.log.debug( - 'warn %d debug %d was %s', + "warn %d debug %d was %s", mocked_log_warn.call_count, mocked_log_debug.call_count, call_counts, @@ -123,10 +125,10 @@ def test_large_packet_exception_log_handling(self): zc.close() def verify_name_change(self, zc, type_, name, number_hosts): - desc = {'path': '/~paulsm/'} + desc = {"path": "/~paulsm/"} info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, @@ -146,7 +148,7 @@ def verify_name_change(self, zc, type_, name, number_hosts): # in the registry info_service2 = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, @@ -155,23 +157,26 @@ def verify_name_change(self, zc, type_, name, number_hosts): addresses=[socket.inet_aton("10.0.1.2")], ) zc.register_service(info_service2, allow_name_change=True) - assert info_service2.name.split('.')[0] == '%s-%d' % (name, number_hosts + 1) + assert info_service2.name.split(".")[0] == "%s-%d" % (name, number_hosts + 1) def generate_many_hosts(self, zc, type_, name, number_hosts): block_size = 25 number_hosts = int((number_hosts - 1) / block_size + 1) * block_size out = r.DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) for i in range(1, number_hosts + 1): - next_name = name if i == 1 else '%s-%d' % (name, i) + next_name = name if i == 1 else "%s-%d" % (name, i) self.generate_host(out, next_name, type_) _inject_responses(zc, [r.DNSIncoming(packet) for packet in out.packets()]) @staticmethod def generate_host(out, host_name, type_): - name = '.'.join((host_name, type_)) + name = ".".join((host_name, type_)) out.add_answer_at_time( - r.DNSPointer(type_, const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, name), 0 + r.DNSPointer( + type_, const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, name + ), + 0, ) out.add_answer_at_time( r.DNSService( diff --git a/tests/test_listener.py b/tests/test_listener.py index bd802273..6faab4e8 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._listener """ +"""Unit tests for zeroconf._listener""" import logging import unittest @@ -23,7 +23,7 @@ from . import QuestionHistoryWithoutSuppression -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -43,7 +43,7 @@ def test_guard_against_oversized_packets(): These packets can quickly overwhelm the system. """ - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) @@ -54,7 +54,7 @@ def test_guard_against_oversized_packets(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ), 0, ) @@ -77,7 +77,7 @@ def test_guard_against_oversized_packets(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ) generated.add_answer_at_time( @@ -91,10 +91,10 @@ def test_guard_against_oversized_packets(): listener = _listener.AsyncListener(zc) listener.transport = unittest.mock.MagicMock() - listener.datagram_received(ok_packet, ('127.0.0.1', const._MDNS_PORT)) + listener.datagram_received(ok_packet, ("127.0.0.1", const._MDNS_PORT)) assert zc.cache.async_get_unique(okpacket_record) is not None - listener.datagram_received(over_sized_packet, ('127.0.0.1', const._MDNS_PORT)) + listener.datagram_received(over_sized_packet, ("127.0.0.1", const._MDNS_PORT)) assert ( zc.cache.async_get_unique( r.DNSText( @@ -102,15 +102,15 @@ def test_guard_against_oversized_packets(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ) ) is None ) - logging.getLogger('zeroconf').setLevel(logging.INFO) + logging.getLogger("zeroconf").setLevel(logging.INFO) - listener.datagram_received(over_sized_packet, ('::1', const._MDNS_PORT, 1, 1)) + listener.datagram_received(over_sized_packet, ("::1", const._MDNS_PORT, 1, 1)) assert ( zc.cache.async_get_unique( r.DNSText( @@ -118,7 +118,7 @@ def test_guard_against_oversized_packets(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 500, - b'path=/~paulsm/', + b"path=/~paulsm/", ) ) is None @@ -131,9 +131,14 @@ def test_guard_against_duplicate_packets(): """Ensure we do not process duplicate packets. These packets can quickly overwhelm the system. """ - zc = Zeroconf(interfaces=['127.0.0.1']) + zc = Zeroconf(interfaces=["127.0.0.1"]) zc.registry.async_add( - ServiceInfo("_http._tcp.local.", "Test._http._tcp.local.", server="Test._http._tcp.local.", port=4) + ServiceInfo( + "_http._tcp.local.", + "Test._http._tcp.local.", + server="Test._http._tcp.local.", + port=4, + ) ) zc.question_history = QuestionHistoryWithoutSuppression() @@ -174,14 +179,22 @@ def handle_query_or_defer( start_time = current_time_millis() listener._process_datagram_at_time( - False, len(packet_with_qm_question), start_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + start_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the same packet again and handle_query_or_defer should not fire listener._process_datagram_at_time( - False, len(packet_with_qm_question), start_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + start_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() @@ -190,35 +203,55 @@ def handle_query_or_defer( new_time = start_time + 1100 # Now call with the same packet again and handle_query_or_defer should fire listener._process_datagram_at_time( - False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + new_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet and handle_query_or_defer should fire listener._process_datagram_at_time( - False, len(packet_with_qm_question2), new_time, packet_with_qm_question2, addrs + False, + len(packet_with_qm_question2), + new_time, + packet_with_qm_question2, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet and handle_query_or_defer should fire listener._process_datagram_at_time( - False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + new_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the different packet with qu question and handle_query_or_defer should fire listener._process_datagram_at_time( - False, len(packet_with_qu_question), new_time, packet_with_qu_question, addrs + False, + len(packet_with_qu_question), + new_time, + packet_with_qu_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call again with the same packet that has a qu question and handle_query_or_defer should fire listener._process_datagram_at_time( - False, len(packet_with_qu_question), new_time, packet_with_qu_question, addrs + False, + len(packet_with_qu_question), + new_time, + packet_with_qu_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() @@ -227,20 +260,30 @@ def handle_query_or_defer( # Call with the QM packet again listener._process_datagram_at_time( - False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + new_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_called_once() _handle_query_or_defer.reset_mock() # Now call with the same packet again and handle_query_or_defer should not fire listener._process_datagram_at_time( - False, len(packet_with_qm_question), new_time, packet_with_qm_question, addrs + False, + len(packet_with_qm_question), + new_time, + packet_with_qm_question, + addrs, ) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() # Now call with garbage - listener._process_datagram_at_time(False, len(b'garbage'), new_time, b'garbage', addrs) + listener._process_datagram_at_time( + False, len(b"garbage"), new_time, b"garbage", addrs + ) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() diff --git a/tests/test_logger.py b/tests/test_logger.py index 84a46f89..7a9b4867 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -11,16 +11,16 @@ def test_loading_logger(): """Test loading logger does not change level unless it is unset.""" - log = logging.getLogger('zeroconf') + log = logging.getLogger("zeroconf") log.setLevel(logging.CRITICAL) set_logger_level_if_unset() - log = logging.getLogger('zeroconf') + log = logging.getLogger("zeroconf") assert log.level == logging.CRITICAL - log = logging.getLogger('zeroconf') + log = logging.getLogger("zeroconf") log.setLevel(logging.NOTSET) set_logger_level_if_unset() - log = logging.getLogger('zeroconf') + log = logging.getLogger("zeroconf") assert log.level == logging.WARNING @@ -73,12 +73,12 @@ def test_llog_exception_debug(): with patch("zeroconf._logger.log.debug") as mock_log_debug: quiet_logger.log_exception_debug("the exception") - assert mock_log_debug.mock_calls == [call('the exception', exc_info=True)] + assert mock_log_debug.mock_calls == [call("the exception", exc_info=True)] with patch("zeroconf._logger.log.debug") as mock_log_debug: quiet_logger.log_exception_debug("the exception") - assert mock_log_debug.mock_calls == [call('the exception', exc_info=False)] + assert mock_log_debug.mock_calls == [call("the exception", exc_info=False)] def test_log_exception_once(): diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 6990917a..e682a34c 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -""" Unit tests for zeroconf._protocol """ +"""Unit tests for zeroconf._protocol""" import copy import logging @@ -19,7 +19,7 @@ from . import has_working_ipv6 -log = logging.getLogger('zeroconf') +log = logging.getLogger("zeroconf") original_logging_level = logging.NOTSET @@ -49,16 +49,18 @@ def test_parse_own_packet_flags(self): def test_parse_own_packet_question(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - generated.add_question(r.DNSQuestion("testname.local.", const._TYPE_SRV, const._CLASS_IN)) + generated.add_question( + r.DNSQuestion("testname.local.", const._TYPE_SRV, const._CLASS_IN) + ) r.DNSIncoming(generated.packets()[0]) def test_parse_own_packet_nsec(self): answer = r.DNSNsec( - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", [const._TYPE_TXT, const._TYPE_SRV], ) @@ -69,11 +71,11 @@ def test_parse_own_packet_nsec(self): # Now with the higher RD type first answer = r.DNSNsec( - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", [const._TYPE_SRV, const._TYPE_TXT], ) @@ -84,30 +86,30 @@ def test_parse_own_packet_nsec(self): # Types > 255 should raise an exception answer_invalid_types = r.DNSNsec( - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", [const._TYPE_TXT, const._TYPE_SRV, 1000], ) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time(answer_invalid_types, 0) - with pytest.raises(ValueError, match='rdtype 1000 is too large for NSEC'): + with pytest.raises(ValueError, match="rdtype 1000 is too large for NSEC"): generated.packets() # Empty rdtypes are not allowed answer_invalid_types = r.DNSNsec( - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", [], ) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time(answer_invalid_types, 0) - with pytest.raises(ValueError, match='NSEC must have at least one rdtype'): + with pytest.raises(ValueError, match="NSEC must have at least one rdtype"): generated.packets() def test_parse_own_packet_response(self): @@ -250,14 +252,18 @@ def test_suppress_answer(self): def test_dns_hinfo(self): generated = r.DNSOutgoing(0) - generated.add_additional_answer(DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu', 'os')) + generated.add_additional_answer( + DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "os") + ) parsed = r.DNSIncoming(generated.packets()[0]) answer = cast(r.DNSHinfo, parsed.answers()[0]) - assert answer.cpu == 'cpu' - assert answer.os == 'os' + assert answer.cpu == "cpu" + assert answer.os == "os" generated = r.DNSOutgoing(0) - generated.add_additional_answer(DNSHinfo('irrelevant', const._TYPE_HINFO, 0, 0, 'cpu', 'x' * 257)) + generated.add_additional_answer( + DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "x" * 257) + ) self.assertRaises(r.NamePartTooLongException, generated.packets) def test_many_questions(self): @@ -265,7 +271,9 @@ def test_many_questions(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for i in range(100): - question = r.DNSQuestion(f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN) + question = r.DNSQuestion( + f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN + ) generated.add_question(question) questions.append(question) assert len(generated.questions) == 100 @@ -285,7 +293,9 @@ def test_many_questions_with_many_known_answers(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for _ in range(30): - question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) + question = r.DNSQuestion( + "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN + ) generated.add_question(question) questions.append(question) assert len(generated.questions) == 30 @@ -296,7 +306,7 @@ def test_many_questions_with_many_known_answers(self): const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - '123.local.', + "123.local.", ) generated.add_answer_at_time(known_answer, now) packets = generated.packets() @@ -324,7 +334,9 @@ def test_massive_probe_packet_split(self): questions = [] for _ in range(30): question = r.DNSQuestion( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE + "_hap._tcp.local.", + const._TYPE_PTR, + const._CLASS_IN | const._CLASS_UNIQUE, ) generated.add_question(question) questions.append(question) @@ -335,7 +347,7 @@ def test_massive_probe_packet_split(self): const._TYPE_PTR, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - '123.local.', + "123.local.", ) generated.add_authorative_answer(authorative_answer) packets = generated.packets() @@ -374,7 +386,7 @@ def test_only_one_answer_can_by_large(self): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, 1200, - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==' * 100, + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==" * 100, ), ) generated.add_answer( @@ -421,7 +433,9 @@ def test_questions_do_not_end_up_every_packet(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) for i in range(35): - question = r.DNSQuestion(f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN) + question = r.DNSQuestion( + f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN + ) generated.add_question(question) answer = r.DNSService( f"testname{i}.local.", @@ -480,7 +494,9 @@ def test_response_header_bits(self): def test_numbers(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) bytes = generated.packets()[0] - (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack('!4H', bytes[4:12]) + (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack( + "!4H", bytes[4:12] + ) assert num_questions == 0 assert num_answers == 0 assert num_authorities == 0 @@ -492,7 +508,9 @@ def test_numbers_questions(self): for i in range(10): generated.add_question(question) bytes = generated.packets()[0] - (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack('!4H', bytes[4:12]) + (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack( + "!4H", bytes[4:12] + ) assert num_questions == 10 assert num_answers == 0 assert num_authorities == 0 @@ -503,14 +521,14 @@ class TestDnsIncoming(unittest.TestCase): def test_incoming_exception_handling(self): generated = r.DNSOutgoing(0) packet = generated.packets()[0] - packet = packet[:8] + b'deadbeef' + packet[8:] + packet = packet[:8] + b"deadbeef" + packet[8:] parsed = r.DNSIncoming(packet) parsed = r.DNSIncoming(packet) assert parsed.valid is False def test_incoming_unknown_type(self): generated = r.DNSOutgoing(0) - answer = r.DNSAddress('a', const._TYPE_SOA, const._CLASS_IN, 1, b'a') + answer = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"a") generated.add_additional_answer(answer) packet = generated.packets()[0] parsed = r.DNSIncoming(packet) @@ -520,20 +538,22 @@ def test_incoming_unknown_type(self): def test_incoming_circular_reference(self): assert not r.DNSIncoming( bytes.fromhex( - '01005e0000fb542a1bf0577608004500006897934000ff11d81bc0a86a31e00000fb' - '14e914e90054f9b2000084000000000100000000095f7365727669636573075f646e' - '732d7364045f756470056c6f63616c00000c0001000011940018105f73706f746966' - '792d636f6e6e656374045f746370c023' + "01005e0000fb542a1bf0577608004500006897934000ff11d81bc0a86a31e00000fb" + "14e914e90054f9b2000084000000000100000000095f7365727669636573075f646e" + "732d7364045f756470056c6f63616c00000c0001000011940018105f73706f746966" + "792d636f6e6e656374045f746370c023" ) ).valid - @unittest.skipIf(not has_working_ipv6(), 'Requires IPv6') - @unittest.skipIf(os.environ.get('SKIP_IPV6'), 'IPv6 tests disabled') + @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") + @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_incoming_ipv6(self): addr = "2606:2800:220:1:248:1893:25c8:1946" # example.com packed = socket.inet_pton(socket.AF_INET6, addr) generated = r.DNSOutgoing(0) - answer = r.DNSAddress('domain', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed) + answer = r.DNSAddress( + "domain", const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed + ) generated.add_additional_answer(answer) packet = generated.packets()[0] parsed = r.DNSIncoming(packet) @@ -650,8 +670,8 @@ def test_dns_compression_rollback_for_corruption(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) @@ -695,7 +715,9 @@ def test_dns_compression_rollback_for_corruption(): assert incoming.valid is True assert ( len(incoming.answers()) - == incoming.num_answers + incoming.num_authorities + incoming.num_additionals + == incoming.num_answers + + incoming.num_authorities + + incoming.num_additionals ) @@ -712,8 +734,8 @@ def test_tc_bit_in_query_packet(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) @@ -744,8 +766,8 @@ def test_tc_bit_not_set_in_answer_packet(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) @@ -769,9 +791,7 @@ def test_tc_bit_not_set_in_answer_packet(): # 4003 15.973052 192.168.107.68 224.0.0.251 MDNS 76 Standard query 0xffc4 PTR _raop._tcp.local, "QM" question def test_qm_packet_parser(): """Test we can parse a query packet with the QM bit.""" - qm_packet = ( - b'\xff\xc4\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x05_raop\x04_tcp\x05local\x00\x00\x0c\x00\x01' - ) + qm_packet = b"\xff\xc4\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x05_raop\x04_tcp\x05local\x00\x00\x0c\x00\x01" parsed = DNSIncoming(qm_packet) assert parsed.questions[0].unicast is False assert ",QM," in str(parsed.questions[0]) @@ -781,8 +801,8 @@ def test_qm_packet_parser(): def test_qu_packet_parser(): """Test we can parse a query packet with the QU bit.""" qu_packet = ( - b'\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x0f_companion-link\x04_tcp\x05local' - b'\x00\x00\x0c\x80\x01\x00\x00)\x05\xa0\x00\x00\x11\x94\x00\x12\x00\x04\x00\x0e\x00dz{\x8a6\x9czF\x84,\xcaQ\xff' + b"\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x0f_companion-link\x04_tcp\x05local" + b"\x00\x00\x0c\x80\x01\x00\x00)\x05\xa0\x00\x00\x11\x94\x00\x12\x00\x04\x00\x0e\x00dz{\x8a6\x9czF\x84,\xcaQ\xff" ) parsed = DNSIncoming(qu_packet) assert parsed.questions[0].unicast is True @@ -818,8 +838,8 @@ def test_records_same_packet_share_fate(): const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - b'\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1' - b'\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==', + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", ), 0, ) @@ -834,19 +854,19 @@ def test_records_same_packet_share_fate(): def test_dns_compression_invalid_skips_bad_name_compress_in_question(): """Test our wire parser can skip bad compression in questions.""" packet = ( - b'\x00\x00\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x11homeassistant1128\x05l' - b'ocal\x00\x00\xff\x00\x014homeassistant1128 [534a4794e5ed41879ecf012252d3e02' - b'a]\x0c_workstation\x04_tcp\xc0\x1e\x00\xff\x00\x014homeassistant1127 [534a47' - b'94e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x014homeassistant1123 [534a479' - b'4e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x014homeassistant1118 [534a4794' - b'e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x01\xc0\x0c\x00\x01\x80' - b'\x01\x00\x00\x00x\x00\x04\xc0\xa8<\xc3\xc0v\x00\x10\x80\x01\x00\x00\x00' - b'x\x00\x01\x00\xc0v\x00!\x80\x01\x00\x00\x00x\x00\x1f\x00\x00\x00\x00' - b'\x00\x00\x11homeassistant1127\x05local\x00\xc0\xb1\x00\x10\x80' - b'\x01\x00\x00\x00x\x00\x01\x00\xc0\xb1\x00!\x80\x01\x00\x00\x00x\x00\x1f' - b'\x00\x00\x00\x00\x00\x00\x11homeassistant1123\x05local\x00\xc0)\x00\x10\x80' - b'\x01\x00\x00\x00x\x00\x01\x00\xc0)\x00!\x80\x01\x00\x00\x00x\x00\x1f' - b'\x00\x00\x00\x00\x00\x00\x11homeassistant1128\x05local\x00' + b"\x00\x00\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x11homeassistant1128\x05l" + b"ocal\x00\x00\xff\x00\x014homeassistant1128 [534a4794e5ed41879ecf012252d3e02" + b"a]\x0c_workstation\x04_tcp\xc0\x1e\x00\xff\x00\x014homeassistant1127 [534a47" + b"94e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x014homeassistant1123 [534a479" + b"4e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x014homeassistant1118 [534a4794" + b"e5ed41879ecf012252d3e02a]\xc0^\x00\xff\x00\x01\xc0\x0c\x00\x01\x80" + b"\x01\x00\x00\x00x\x00\x04\xc0\xa8<\xc3\xc0v\x00\x10\x80\x01\x00\x00\x00" + b"x\x00\x01\x00\xc0v\x00!\x80\x01\x00\x00\x00x\x00\x1f\x00\x00\x00\x00" + b"\x00\x00\x11homeassistant1127\x05local\x00\xc0\xb1\x00\x10\x80" + b"\x01\x00\x00\x00x\x00\x01\x00\xc0\xb1\x00!\x80\x01\x00\x00\x00x\x00\x1f" + b"\x00\x00\x00\x00\x00\x00\x11homeassistant1123\x05local\x00\xc0)\x00\x10\x80" + b"\x01\x00\x00\x00x\x00\x01\x00\xc0)\x00!\x80\x01\x00\x00\x00x\x00\x1f" + b"\x00\x00\x00\x00\x00\x00\x11homeassistant1128\x05local\x00" ) parsed = r.DNSIncoming(packet) assert len(parsed.questions) == 4 @@ -855,8 +875,8 @@ def test_dns_compression_invalid_skips_bad_name_compress_in_question(): def test_dns_compression_all_invalid(caplog): """Test our wire parser can skip all invalid data.""" packet = ( - b'\x00\x00\x84\x00\x00\x00\x00\x01\x00\x00\x00\x00!roborock-vacuum-s5e_miio416' - b'112328\x00\x00/\x80\x01\x00\x00\x00x\x00\t\xc0P\x00\x05@\x00\x00\x00\x00' + b"\x00\x00\x84\x00\x00\x00\x00\x01\x00\x00\x00\x00!roborock-vacuum-s5e_miio416" + b"112328\x00\x00/\x80\x01\x00\x00\x00x\x00\t\xc0P\x00\x05@\x00\x00\x00\x00" ) parsed = r.DNSIncoming(packet, ("2.4.5.4", 5353)) assert len(parsed.questions) == 0 @@ -871,9 +891,9 @@ def test_invalid_next_name_ignored(): The RFC states it should be ignored when used with mDNS. """ packet = ( - b'\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x07Android\x05local\x00\x00' - b'\xff\x00\x01\xc0\x0c\x00/\x00\x01\x00\x00\x00x\x00\x08\xc02\x00\x04@' - b'\x00\x00\x08\xc0\x0c\x00\x01\x00\x01\x00\x00\x00x\x00\x04\xc0\xa8X<' + b"\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x07Android\x05local\x00\x00" + b"\xff\x00\x01\xc0\x0c\x00/\x00\x01\x00\x00\x00x\x00\x08\xc02\x00\x04@" + b"\x00\x00\x08\xc0\x0c\x00\x01\x00\x01\x00\x00\x00x\x00\x04\xc0\xa8X<" ) parsed = r.DNSIncoming(packet) assert len(parsed.questions) == 1 @@ -893,11 +913,11 @@ def test_dns_compression_invalid_skips_record(): ) parsed = r.DNSIncoming(packet) answer = r.DNSNsec( - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'eufy HomeBase2-2464._hap._tcp.local.', + "eufy HomeBase2-2464._hap._tcp.local.", [const._TYPE_TXT, const._TYPE_SRV], ) assert answer in parsed.answers() @@ -918,11 +938,11 @@ def test_dns_compression_points_forward(): ) parsed = r.DNSIncoming(packet) answer = r.DNSNsec( - 'TV Beneden (2)._androidtvremote._tcp.local.', + "TV Beneden (2)._androidtvremote._tcp.local.", const._TYPE_NSEC, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, - 'TV Beneden (2)._androidtvremote._tcp.local.', + "TV Beneden (2)._androidtvremote._tcp.local.", [const._TYPE_TXT, const._TYPE_SRV], ) assert answer in parsed.answers() @@ -942,9 +962,9 @@ def test_dns_compression_points_to_itself(): def test_dns_compression_points_beyond_packet(): """Test our wire parser does not fail when the compression pointer points beyond the packet.""" packet = ( - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x06domain\x05local\x00\x00\x01' - b'\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\xe7\x0f\x00\x01\x80\x01\x00\x00' - b'\x00\x01\x00\x04\xc0\xa8\xd0\x06' + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x06domain\x05local\x00\x00\x01" + b"\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\xe7\x0f\x00\x01\x80\x01\x00\x00" + b"\x00\x01\x00\x04\xc0\xa8\xd0\x06" ) parsed = r.DNSIncoming(packet) assert len(parsed.answers()) == 1 @@ -953,9 +973,9 @@ def test_dns_compression_points_beyond_packet(): def test_dns_compression_generic_failure(caplog): """Test our wire parser does not loop forever when dns compression is corrupt.""" packet = ( - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x06domain\x05local\x00\x00\x01' - b'\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05-\x0c\x00\x01\x80\x01\x00\x00' - b'\x00\x01\x00\x04\xc0\xa8\xd0\x06' + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x06domain\x05local\x00\x00\x01" + b"\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05-\x0c\x00\x01\x80\x01\x00\x00" + b"\x00\x01\x00\x04\xc0\xa8\xd0\x06" ) parsed = r.DNSIncoming(packet, ("1.2.3.4", 5353)) assert len(parsed.answers()) == 1 @@ -965,17 +985,17 @@ def test_dns_compression_generic_failure(caplog): def test_label_length_attack(): """Test our wire parser does not loop forever when the name exceeds 253 chars.""" packet = ( - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d' - b'\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x00\x00\x01\x80' - b'\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\xc0\x0c\x00\x01\x80\x01\x00\x00\x00' - b'\x01\x00\x04\xc0\xa8\xd0\x06' + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d" + b"\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x01d\x00\x00\x01\x80" + b"\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\xc0\x0c\x00\x01\x80\x01\x00\x00\x00" + b"\x01\x00\x04\xc0\xa8\xd0\x06" ) parsed = r.DNSIncoming(packet) assert len(parsed.answers()) == 0 @@ -984,28 +1004,28 @@ def test_label_length_attack(): def test_label_compression_attack(): """Test our wire parser does not loop forever when exceeding the maximum number of labels.""" packet = ( - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x03atk\x00\x00\x01\x80' - b'\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03' - b'atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\xc0' - b'\x0c\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x06' + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x03atk\x00\x00\x01\x80" + b"\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03" + b"atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\x03atk\xc0" + b"\x0c\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x06" ) parsed = r.DNSIncoming(packet) assert len(parsed.answers()) == 1 @@ -1014,15 +1034,15 @@ def test_label_compression_attack(): def test_dns_compression_loop_attack(): """Test our wire parser does not loop forever when dns compression is in a loop.""" packet = ( - b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x03atk\x03dns\x05loc' - b'al\xc0\x10\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\x04a' - b'tk2\x04dns2\xc0\x14\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05' - b'\x04atk3\xc0\x10\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0' - b'\x05\x04atk4\x04dns5\xc0\x14\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0' - b'\xa8\xd0\x05\x04atk5\x04dns2\xc0^\x00\x01\x80\x01\x00\x00\x00\x01\x00' - b'\x04\xc0\xa8\xd0\x05\xc0s\x00\x01\x80\x01\x00\x00\x00\x01\x00' - b'\x04\xc0\xa8\xd0\x05\xc0s\x00\x01\x80\x01\x00\x00\x00\x01\x00' - b'\x04\xc0\xa8\xd0\x05' + b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x03atk\x03dns\x05loc" + b"al\xc0\x10\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05\x04a" + b"tk2\x04dns2\xc0\x14\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05" + b"\x04atk3\xc0\x10\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0" + b"\x05\x04atk4\x04dns5\xc0\x14\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0" + b"\xa8\xd0\x05\x04atk5\x04dns2\xc0^\x00\x01\x80\x01\x00\x00\x00\x01\x00" + b"\x04\xc0\xa8\xd0\x05\xc0s\x00\x01\x80\x01\x00\x00\x00\x01\x00" + b"\x04\xc0\xa8\xd0\x05\xc0s\x00\x01\x80\x01\x00\x00\x00\x01\x00" + b"\x04\xc0\xa8\xd0\x05" ) parsed = r.DNSIncoming(packet) assert len(parsed.answers()) == 0 @@ -1031,28 +1051,28 @@ def test_dns_compression_loop_attack(): def test_txt_after_invalid_nsec_name_still_usable(): """Test that we can see the txt record after the invalid nsec record.""" packet = ( - b'\x00\x00\x84\x00\x00\x00\x00\x06\x00\x00\x00\x00\x06_sonos\x04_tcp\x05loc' - b'al\x00\x00\x0c\x00\x01\x00\x00\x11\x94\x00\x15\x12Sonos-542A1BC9220E' - b'\xc0\x0c\x12Sonos-542A1BC9220E\xc0\x18\x00/\x80\x01\x00\x00\x00x\x00' - b'\x08\xc1t\x00\x04@\x00\x00\x08\xc0)\x00/\x80\x01\x00\x00\x11\x94\x00' - b'\t\xc0)\x00\x05\x00\x00\x80\x00@\xc0)\x00!\x80\x01\x00\x00\x00x' - b'\x00\x08\x00\x00\x00\x00\x05\xa3\xc0>\xc0>\x00\x01\x80\x01\x00\x00\x00x' - b'\x00\x04\xc0\xa8\x02:\xc0)\x00\x10\x80\x01\x00\x00\x11\x94\x01*2info=/api' - b'/v1/players/RINCON_542A1BC9220E01400/info\x06vers=3\x10protovers=1.24.1\nbo' - b'otseq=11%hhid=Sonos_rYn9K9DLXJe0f3LP9747lbvFvh;mhhid=Sonos_rYn9K9DLXJe0f3LP9' - b'747lbvFvh.Q45RuMaeC07rfXh7OJGm\xc0>\x00\x01\x80\x01\x00\x00\x00x" + b"\x00\x04\xc0\xa8\x02:\xc0)\x00\x10\x80\x01\x00\x00\x11\x94\x01*2info=/api" + b"/v1/players/RINCON_542A1BC9220E01400/info\x06vers=3\x10protovers=1.24.1\nbo" + b"otseq=11%hhid=Sonos_rYn9K9DLXJe0f3LP9747lbvFvh;mhhid=Sonos_rYn9K9DLXJe0f3LP9" + b"747lbvFvh.Q45RuMaeC07rfXh7OJGm None: + def update_record( + self, zc: "Zeroconf", now: float, record: r.DNSRecord + ) -> None: nonlocal updates updates.append(record) @@ -65,11 +71,11 @@ def on_service_state_change(zeroconf, service_type, state_change, name): info_service = ServiceInfo( type_, - f'{name}.{type_}', + f"{name}.{type_}", 80, 0, 0, - {'path': '/~paulsm/'}, + {"path": "/~paulsm/"}, "ash-2.local.", addresses=[socket.inet_aton("10.0.1.2")], ) @@ -81,7 +87,15 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() assert len(updates) - assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 + assert ( + len( + [ + isinstance(update, r.DNSPointer) and update.name == type_ + for update in updates + ] + ) + >= 1 + ) zc.remove_listener(listener) # Removing a second time should not throw @@ -92,8 +106,12 @@ def on_service_state_change(zeroconf, service_type, state_change, name): def test_record_update_compat(): """Test a RecordUpdate can fetch by index.""" - new = r.DNSPointer('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 'new') - old = r.DNSPointer('irrelevant', const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, 'old') + new = r.DNSPointer( + "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "new" + ) + old = r.DNSPointer( + "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "old" + ) update = RecordUpdate(new, old) assert update[0] == new assert update[1] == old diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 2ef4b15b..30920c6a 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,21 +1,21 @@ -""" Multicast DNS Service Discovery for Python, v0.14-wmcbrine - Copyright 2003 Paul Scott-Murphy, 2014 William McBrine +"""Multicast DNS Service Discovery for Python, v0.14-wmcbrine +Copyright 2003 Paul Scott-Murphy, 2014 William McBrine - This module provides a framework for the use of DNS Service Discovery - using IP multicast. +This module provides a framework for the use of DNS Service Discovery +using IP multicast. - This library is free software; you can redistribute it and/or - modify it under the terms of the GNU Lesser General Public - License as published by the Free Software Foundation; either - version 2.1 of the License, or (at your option) any later version. +This library is free software; you can redistribute it and/or +modify it under the terms of the GNU Lesser General Public +License as published by the Free Software Foundation; either +version 2.1 of the License, or (at your option) any later version. - This library is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - Lesser General Public License for more details. +This library is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +Lesser General Public License for more details. - You should have received a copy of the GNU Lesser General Public - License along with this library; if not, write to the Free Software - Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 - USA +You should have received a copy of the GNU Lesser General Public +License along with this library; if not, write to the Free Software +Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 +USA """ diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index a0385515..cf4b4e8e 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -29,7 +29,7 @@ async def test_async_get_all_tasks() -> None: loop = aioutils.get_running_loop() assert loop is not None await aioutils._async_get_all_tasks(loop) - if not hasattr(asyncio, 'all_tasks'): + if not hasattr(asyncio, "all_tasks"): return with patch("zeroconf._utils.asyncio.asyncio.all_tasks", side_effect=RuntimeError): await aioutils._async_get_all_tasks(loop) @@ -115,7 +115,9 @@ def test_cumulative_timeouts_less_than_close_plus_buffer(): raised if something goes wrong. """ assert ( - aioutils._TASK_AWAIT_TIMEOUT + aioutils._GET_ALL_TASKS_TIMEOUT + aioutils._WAIT_FOR_LOOP_TASKS_TIMEOUT + aioutils._TASK_AWAIT_TIMEOUT + + aioutils._GET_ALL_TASKS_TIMEOUT + + aioutils._WAIT_FOR_LOOP_TASKS_TIMEOUT ) < 1 + _CLOSE_TIMEOUT + _LOADED_SYSTEM_TIMEOUT @@ -134,7 +136,9 @@ async def _saved_sleep_task(): def _run_in_loop(): aioutils.run_coro_with_timeout(_saved_sleep_task(), loop, 0.1) - with pytest.raises(EventLoopBlocked), patch.object(aioutils, "_LOADED_SYSTEM_TIMEOUT", 0.0): + with pytest.raises(EventLoopBlocked), patch.object( + aioutils, "_LOADED_SYSTEM_TIMEOUT", 0.0 + ): await loop.run_in_executor(None, _run_in_loop) assert task is not None diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 73c5ab7e..4066eba4 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -13,61 +13,92 @@ def test_cached_ip_addresses_wrapper(): """Test the cached_ip_addresses_wrapper.""" - assert ipaddress.cached_ip_addresses('') is None - assert ipaddress.cached_ip_addresses('foo') is None + assert ipaddress.cached_ip_addresses("") is None + assert ipaddress.cached_ip_addresses("foo") is None assert ( - str(ipaddress.cached_ip_addresses(b'&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F')) - == '2606:2800:220:1:248:1893:25c8:1946' + str( + ipaddress.cached_ip_addresses( + b"&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F" + ) + ) + == "2606:2800:220:1:248:1893:25c8:1946" ) - assert ipaddress.cached_ip_addresses('::1') == ipaddress.IPv6Address('::1') + assert ipaddress.cached_ip_addresses("::1") == ipaddress.IPv6Address("::1") - ipv4 = ipaddress.cached_ip_addresses('169.254.0.0') + ipv4 = ipaddress.cached_ip_addresses("169.254.0.0") assert ipv4 is not None assert ipv4.is_link_local is True assert ipv4.is_unspecified is False - ipv4 = ipaddress.cached_ip_addresses('0.0.0.0') + ipv4 = ipaddress.cached_ip_addresses("0.0.0.0") assert ipv4 is not None assert ipv4.is_link_local is False assert ipv4.is_unspecified is True - ipv6 = ipaddress.cached_ip_addresses('fe80::1') + ipv6 = ipaddress.cached_ip_addresses("fe80::1") assert ipv6 is not None assert ipv6.is_link_local is True assert ipv6.is_unspecified is False - ipv6 = ipaddress.cached_ip_addresses('0:0:0:0:0:0:0:0') + ipv6 = ipaddress.cached_ip_addresses("0:0:0:0:0:0:0:0") assert ipv6 is not None assert ipv6.is_link_local is False assert ipv6.is_unspecified is True -@pytest.mark.skipif(sys.version_info < (3, 9, 0), reason='scope_id is not supported') +@pytest.mark.skipif(sys.version_info < (3, 9, 0), reason="scope_id is not supported") def test_get_ip_address_object_from_record(): """Test the get_ip_address_object_from_record.""" # not link local - packed = b'&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F' + packed = b"&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F" record = DNSAddress( - 'domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=3 + "domain.local", + const._TYPE_AAAA, + const._CLASS_IN | const._CLASS_UNIQUE, + 1, + packed, + scope_id=3, ) assert record.scope_id == 3 assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( - '2606:2800:220:1:248:1893:25c8:1946' + "2606:2800:220:1:248:1893:25c8:1946" ) # link local - packed = b'\xfe\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01' + packed = b"\xfe\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01" record = DNSAddress( - 'domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=3 + "domain.local", + const._TYPE_AAAA, + const._CLASS_IN | const._CLASS_UNIQUE, + 1, + packed, + scope_id=3, ) assert record.scope_id == 3 - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1%3') - record = DNSAddress('domain.local', const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed) + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( + "fe80::1%3" + ) + record = DNSAddress( + "domain.local", + const._TYPE_AAAA, + const._CLASS_IN | const._CLASS_UNIQUE, + 1, + packed, + ) assert record.scope_id is None - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1') + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( + "fe80::1" + ) record = DNSAddress( - 'domain.local', const._TYPE_A, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed, scope_id=0 + "domain.local", + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + 1, + packed, + scope_id=0, ) assert record.scope_id == 0 # Ensure scope_id of 0 is not appended to the address - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address('fe80::1') + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( + "fe80::1" + ) diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index 9604b775..d4c57c40 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -2,6 +2,7 @@ """Unit tests for zeroconf._utils.name.""" + import socket import pytest @@ -24,7 +25,9 @@ def test_service_type_name_overlong_full_name(): with pytest.raises(BadTypeInNameException): nameutils.service_type_name(f"{long_name}._tivo-videostream._tcp.local.") with pytest.raises(BadTypeInNameException): - nameutils.service_type_name(f"{long_name}._tivo-videostream._tcp.local.", strict=False) + nameutils.service_type_name( + f"{long_name}._tivo-videostream._tcp.local.", strict=False + ) @pytest.mark.parametrize( @@ -36,12 +39,19 @@ def test_service_type_name_overlong_full_name(): ) def test_service_type_name_non_strict_compliant_names(instance_name, service_type): """Test service_type_name for valid names, but not strict-compliant.""" - desc = {'path': '/~paulsm/'} - service_name = f'{instance_name}.{service_type}' - service_server = 'ash-1.local.' + desc = {"path": "/~paulsm/"} + service_name = f"{instance_name}.{service_type}" + service_server = "ash-1.local." service_address = socket.inet_aton("10.0.1.2") info = ServiceInfo( - service_type, service_name, 22, 0, 0, desc, service_server, addresses=[service_address] + service_type, + service_name, + 22, + 0, + 0, + desc, + service_server, + addresses=[service_address], ) assert info.get_name() == instance_name @@ -56,21 +66,25 @@ def test_service_type_name_non_strict_compliant_names(instance_name, service_typ def test_possible_types(): """Test possible types from name.""" - assert nameutils.possible_types('.') == set() - assert nameutils.possible_types('local.') == set() - assert nameutils.possible_types('_tcp.local.') == set() - assert nameutils.possible_types('_test-srvc-type._tcp.local.') == {'_test-srvc-type._tcp.local.'} - assert nameutils.possible_types('_any._tcp.local.') == {'_any._tcp.local.'} - assert nameutils.possible_types('.._x._tcp.local.') == {'_x._tcp.local.'} - assert nameutils.possible_types('x.y._http._tcp.local.') == {'_http._tcp.local.'} - assert nameutils.possible_types('1.2.3._mqtt._tcp.local.') == {'_mqtt._tcp.local.'} - assert nameutils.possible_types('x.sub._http._tcp.local.') == {'_http._tcp.local.'} - assert nameutils.possible_types('6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local.') == { - '_http._tcp.local.', - '_zget._http._tcp.local.', + assert nameutils.possible_types(".") == set() + assert nameutils.possible_types("local.") == set() + assert nameutils.possible_types("_tcp.local.") == set() + assert nameutils.possible_types("_test-srvc-type._tcp.local.") == { + "_test-srvc-type._tcp.local." + } + assert nameutils.possible_types("_any._tcp.local.") == {"_any._tcp.local."} + assert nameutils.possible_types(".._x._tcp.local.") == {"_x._tcp.local."} + assert nameutils.possible_types("x.y._http._tcp.local.") == {"_http._tcp.local."} + assert nameutils.possible_types("1.2.3._mqtt._tcp.local.") == {"_mqtt._tcp.local."} + assert nameutils.possible_types("x.sub._http._tcp.local.") == {"_http._tcp.local."} + assert nameutils.possible_types( + "6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local." + ) == { + "_http._tcp.local.", + "_zget._http._tcp.local.", } - assert nameutils.possible_types('my._printer._sub._http._tcp.local.') == { - '_http._tcp.local.', - '_sub._http._tcp.local.', - '_printer._sub._http._tcp.local.', + assert nameutils.possible_types("my._printer._sub._http._tcp.local.") == { + "_http._tcp.local.", + "_sub._http._tcp.local.", + "_printer._sub._http._tcp.local.", } diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 29844d57..5a229b0d 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -2,6 +2,7 @@ """Unit tests for zeroconf._utils.net.""" + import errno import socket import unittest @@ -37,8 +38,14 @@ def _generate_mock_adapters(): def test_ip6_to_address_and_index(): """Test we can extract from mocked adapters.""" adapters = _generate_mock_adapters() - assert netutils.ip6_to_address_and_index(adapters, "2001:db8::") == (('2001:db8::', 1, 1), 1) - assert netutils.ip6_to_address_and_index(adapters, "2001:db8::%1") == (('2001:db8::', 1, 1), 1) + assert netutils.ip6_to_address_and_index(adapters, "2001:db8::") == ( + ("2001:db8::", 1, 1), + 1, + ) + assert netutils.ip6_to_address_and_index(adapters, "2001:db8::%1") == ( + ("2001:db8::", 1, 1), + 1, + ) with pytest.raises(RuntimeError): assert netutils.ip6_to_address_and_index(adapters, "2005:db8::") @@ -46,7 +53,7 @@ def test_ip6_to_address_and_index(): def test_interface_index_to_ip6_address(): """Test we can extract from mocked adapters.""" adapters = _generate_mock_adapters() - assert netutils.interface_index_to_ip6_address(adapters, 1) == ('2001:db8::', 1, 1) + assert netutils.interface_index_to_ip6_address(adapters, 1) == ("2001:db8::", 1, 1) # call with invalid adapter with pytest.raises(RuntimeError): @@ -60,12 +67,22 @@ def test_interface_index_to_ip6_address(): def test_ip6_addresses_to_indexes(): """Test we can extract from mocked adapters.""" interfaces = [1] - with patch("zeroconf._utils.net.ifaddr.get_adapters", return_value=_generate_mock_adapters()): - assert netutils.ip6_addresses_to_indexes(interfaces) == [(('2001:db8::', 1, 1), 1)] - - interfaces_2 = ['2001:db8::'] - with patch("zeroconf._utils.net.ifaddr.get_adapters", return_value=_generate_mock_adapters()): - assert netutils.ip6_addresses_to_indexes(interfaces_2) == [(('2001:db8::', 1, 1), 1)] + with patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=_generate_mock_adapters(), + ): + assert netutils.ip6_addresses_to_indexes(interfaces) == [ + (("2001:db8::", 1, 1), 1) + ] + + interfaces_2 = ["2001:db8::"] + with patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=_generate_mock_adapters(), + ): + assert netutils.ip6_addresses_to_indexes(interfaces_2) == [ + (("2001:db8::", 1, 1), 1) + ] def test_normalize_interface_choice_errors(): @@ -81,12 +98,19 @@ def test_normalize_interface_choice_errors(): @pytest.mark.parametrize( "errno,expected_result", - [(errno.EADDRINUSE, False), (errno.EADDRNOTAVAIL, False), (errno.EINVAL, False), (0, True)], + [ + (errno.EADDRINUSE, False), + (errno.EADDRNOTAVAIL, False), + (errno.EINVAL, False), + (0, True), + ], ) def test_add_multicast_member_socket_errors(errno, expected_result): """Test we handle socket errors when adding multicast members.""" if errno: - setsockopt_mock = unittest.mock.Mock(side_effect=OSError(errno, f"Error: {errno}")) + setsockopt_mock = unittest.mock.Mock( + side_effect=OSError(errno, f"Error: {errno}") + ) else: setsockopt_mock = unittest.mock.Mock() fileno_mock = unittest.mock.PropertyMock(return_value=10) @@ -118,22 +142,26 @@ def _log_error(*args): assert ( errors_logged[0][0] - == 'Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6' + == "Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6" ) -@pytest.mark.skipif(not hasattr(socket, 'SO_REUSEPORT'), reason="System does not have SO_REUSEPORT") +@pytest.mark.skipif( + not hasattr(socket, "SO_REUSEPORT"), reason="System does not have SO_REUSEPORT" +) def test_set_so_reuseport_if_available_is_present(): """Test that setting socket.SO_REUSEPORT only OSError errno.ENOPROTOOPT is trapped.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError): netutils.set_so_reuseport_if_available(sock) - with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): + with patch( + "socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None) + ): netutils.set_so_reuseport_if_available(sock) -@pytest.mark.skipif(hasattr(socket, 'SO_REUSEPORT'), reason="System has SO_REUSEPORT") +@pytest.mark.skipif(hasattr(socket, "SO_REUSEPORT"), reason="System has SO_REUSEPORT") def test_set_so_reuseport_if_available_not_present(): """Test that we do not try to set SO_REUSEPORT if it is not present.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) @@ -146,24 +174,36 @@ def test_set_mdns_port_socket_options_for_ip_version(): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Should raise on EPERM always - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ('',), r.IPVersion.V4Only) + with pytest.raises(OSError), patch( + "socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None) + ): + netutils.set_mdns_port_socket_options_for_ip_version( + sock, ("",), r.IPVersion.V4Only + ) # Should raise on EINVAL always when bind address is not '' - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ('127.0.0.1',), r.IPVersion.V4Only) + with pytest.raises(OSError), patch( + "socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None) + ): + netutils.set_mdns_port_socket_options_for_ip_version( + sock, ("127.0.0.1",), r.IPVersion.V4Only + ) # Should not raise on EINVAL when bind address is '' with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ('',), r.IPVersion.V4Only) + netutils.set_mdns_port_socket_options_for_ip_version( + sock, ("",), r.IPVersion.V4Only + ) def test_add_multicast_member(): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - interface = '127.0.0.1' + interface = "127.0.0.1" # EPERM should always raise - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): + with pytest.raises(OSError), patch( + "socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None) + ): netutils.add_multicast_member(sock, interface) # EADDRINUSE should return False @@ -171,7 +211,9 @@ def test_add_multicast_member(): assert netutils.add_multicast_member(sock, interface) is False # EADDRNOTAVAIL should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None)): + with patch( + "socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None) + ): assert netutils.add_multicast_member(sock, interface) is False # EINVAL should return False @@ -179,20 +221,24 @@ def test_add_multicast_member(): assert netutils.add_multicast_member(sock, interface) is False # ENOPROTOOPT should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): + with patch( + "socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None) + ): assert netutils.add_multicast_member(sock, interface) is False # ENODEV should raise for ipv4 - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): + with pytest.raises(OSError), patch( + "socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None) + ): netutils.add_multicast_member(sock, interface) is False # ENODEV should return False for ipv6 with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): - assert netutils.add_multicast_member(sock, ('2001:db8::', 1, 1)) is False # type: ignore[arg-type] + assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] # No IPv6 support should return False for IPv6 with patch("socket.inet_pton", side_effect=OSError()): - assert netutils.add_multicast_member(sock, ('2001:db8::', 1, 1)) is False # type: ignore[arg-type] + assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] # No error should return True with patch("socket.socket.setsockopt"): From 596edb2432b15ffbb5b90b724b6699c400a2a7d3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 17:37:16 -0500 Subject: [PATCH 230/434] chore(pre-commit.ci): pre-commit autoupdate (#1381) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 10 +++++----- src/zeroconf/__init__.py | 11 ----------- src/zeroconf/_engine.py | 2 +- 3 files changed, 6 insertions(+), 17 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e4a88203..8fa230de 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,12 +9,12 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v2.32.4 + rev: v3.27.0 hooks: - id: commitizen stages: [commit-msg] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.6.0 hooks: - id: debug-statements - id: check-builtin-literals @@ -29,12 +29,12 @@ repos: - id: trailing-whitespace - id: debug-statements - repo: https://github.com/pre-commit/mirrors-prettier - rev: v2.7.1 + rev: v4.0.0-alpha.8 hooks: - id: prettier args: ["--tab-width", "2"] - repo: https://github.com/asottile/pyupgrade - rev: v2.37.3 + rev: v3.16.0 hooks: - id: pyupgrade args: [--py37-plus] @@ -53,7 +53,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v0.931 + rev: v1.10.1 hooks: - id: mypy additional_dependencies: [] diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 0c89a881..f3130307 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -20,8 +20,6 @@ USA """ -import sys - from ._cache import DNSCache # noqa # import needed for backwards compat from ._core import Zeroconf from ._dns import ( # noqa # import needed for backwards compat @@ -114,12 +112,3 @@ "NotRunningException", "ServiceNameAlreadyRegistered", ] - -if sys.version_info <= (3, 6): # pragma: no cover - raise ImportError( # pragma: no cover - """ -Python version > 3.6 required for python-zeroconf. -If you need support for Python 2 or Python 3.3-3.4 please use version 19.1 -If you need support for Python 3.5 please use version 0.28.0 - """ - ) diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 6083c19a..afe22f59 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -105,7 +105,7 @@ async def _async_create_endpoints(self) -> None: sender_sockets.append(s) for s in reader_sockets: - transport, protocol = await loop.create_datagram_endpoint( + transport, protocol = await loop.create_datagram_endpoint( # type: ignore[type-var] lambda: AsyncListener(self.zc), # type: ignore[arg-type, return-value] sock=s, ) From 144449223cc8b68a388376a2386b6bad02c647a7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 14:24:07 -1000 Subject: [PATCH 231/434] chore(pre-commit.ci): pre-commit autoupdate (#1383) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8fa230de..1e37e5a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v3.27.0 + rev: v3.29.0 hooks: - id: commitizen stages: [commit-msg] @@ -34,12 +34,12 @@ repos: - id: prettier args: ["--tab-width", "2"] - repo: https://github.com/asottile/pyupgrade - rev: v3.16.0 + rev: v3.17.0 hooks: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.0 + rev: v0.6.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -49,11 +49,11 @@ repos: # hooks: # - id: codespell - repo: https://github.com/PyCQA/flake8 - rev: 7.1.0 + rev: 7.1.1 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.10.1 + rev: v1.11.2 hooks: - id: mypy additional_dependencies: [] From bddbe9e594483c23c2e6277c36f3156b54a9fa94 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 14:37:08 -1000 Subject: [PATCH 232/434] chore: create dependabot.yml (#1391) --- .github/dependabot.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..9d866e39 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + - package-ecosystem: "pip" # See documentation for possible values + directory: "/" # Location of package manifests + schedule: + interval: "weekly" From 98cfa83710e43880698353821bae61108b08cb2f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 14:40:27 -1000 Subject: [PATCH 233/434] feat: python 3.13 support (#1390) --- .github/workflows/ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ad892f2..56010b57 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v5 with: python-version: "3.9" - uses: pre-commit/action@v2.0.3 @@ -41,6 +41,7 @@ jobs: - "3.10" - "3.11" - "3.12" + - "3.13" - "pypy-3.8" - "pypy-3.9" os: @@ -69,10 +70,11 @@ jobs: - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: "poetry" + allow-prereleases: true - name: Install Dependencies no cython if: ${{ matrix.extension == 'skip_cython' }} env: @@ -136,7 +138,7 @@ jobs: # Used to host cibuildwheel - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 - name: Install python-semantic-release run: pipx install python-semantic-release==7.34.6 @@ -159,7 +161,7 @@ jobs: platforms: arm64 - name: Build wheels - uses: pypa/cibuildwheel@v2.17.0 + uses: pypa/cibuildwheel@v2.20.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 From 7fb2bb21421c70db0eb288fa7e73d955f58b0f5d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 14:46:34 -1000 Subject: [PATCH 234/434] feat: add classifier for python 3.13 (#1393) --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 1d88efbd..7518f3a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers=[ 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ] From 8d8b9ca395fed1deae5f115442a41fa1454ad3e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:10:45 -1000 Subject: [PATCH 235/434] chore(deps-dev): bump pytest from 7.4.4 to 8.3.2 (#1394) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 24 ++++++++++++------------ pyproject.toml | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index a9a7c6c2..f3dd4dfa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "async-timeout" @@ -205,13 +205,13 @@ files = [ [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -220,13 +220,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "7.4.4" +version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, + {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, ] [package.dependencies] @@ -234,11 +234,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -320,4 +320,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "26c7f2ec91a34a0661a5511d2ade43511d80dd4f89e1aefbb59c9fafc2c92df2" +content-hash = "80115c5f3c7fd52ab1466c37903845b099ffc803b6ddc6329b612af96ee1d421" diff --git a/pyproject.toml b/pyproject.toml index 7518f3a3..a8949be8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,7 +56,7 @@ async-timeout = {version = ">=3.0.0", python = "<3.11"} ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] -pytest = "^7.2.0" +pytest = ">=7.2,<9.0" pytest-cov = "^4.0.0" pytest-asyncio = "^0.20.3" cython = "^3.0.5" From 764bdabe76099a7dbb206433310f04c55234f299 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:11:34 -1000 Subject: [PATCH 236/434] chore(deps-dev): bump coverage from 7.4.1 to 7.6.1 (#1396) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 126 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 73 insertions(+), 53 deletions(-) diff --git a/poetry.lock b/poetry.lock index f3dd4dfa..b1fe410e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,63 +24,83 @@ files = [ [[package]] name = "coverage" -version = "7.4.1" +version = "7.6.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7"}, - {file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25"}, - {file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c"}, - {file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b"}, - {file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016"}, - {file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295"}, - {file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd"}, - {file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1"}, - {file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6"}, - {file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5"}, - {file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581"}, - {file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156"}, - {file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1"}, - {file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc"}, - {file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74"}, - {file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218"}, - {file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06"}, - {file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60"}, - {file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad"}, - {file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042"}, - {file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54"}, - {file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950"}, - {file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756"}, - {file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35"}, - {file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c"}, - {file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a"}, - {file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166"}, - {file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, + {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, + {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, + {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, + {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, + {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, + {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, + {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, + {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, + {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, + {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, + {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, + {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, + {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, + {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, + {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, + {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, + {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, + {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, + {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, + {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, + {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, + {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, + {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, + {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, + {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, + {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, + {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, + {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, + {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, + {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, + {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, + {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, + {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, + {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, + {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, + {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, + {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, ] [package.dependencies] From 0df2ce0e6f7313831da6a63d477019982d5df55c Mon Sep 17 00:00:00 2001 From: Alexandru Ciobanu <93059748+devbanu@users.noreply.github.com> Date: Mon, 26 Aug 2024 21:16:18 -0400 Subject: [PATCH 237/434] feat: enable building of arm64 macOS builds (#1384) Co-authored-by: Alex Ciobanu Co-authored-by: J. Nick Koston --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 56010b57..f9669424 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -128,7 +128,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-20.04, windows-2019, macOS-11] + os: [ubuntu-20.04, windows-2019, macos-12, macos-latest] steps: - uses: actions/checkout@v3 @@ -164,8 +164,8 @@ jobs: uses: pypa/cibuildwheel@v2.20.0 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 - CIBW_BEFORE_ALL_LINUX: apt-get install -y gcc || yum install -y gcc || apk add gcc + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 cp38-*_arm64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 + CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 From 5145617db95e214d33d5c5e68d27ff011df3d38e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:29:02 -1000 Subject: [PATCH 238/434] chore(deps-dev): bump pytest-asyncio from 0.20.3 to 0.24.0 (#1400) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++------- pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index b1fe410e..af80c226 100644 --- a/poetry.lock +++ b/poetry.lock @@ -262,21 +262,21 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.20.3" +version = "0.24.0" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-asyncio-0.20.3.tar.gz", hash = "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36"}, - {file = "pytest_asyncio-0.20.3-py3-none-any.whl", hash = "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=6.1.0" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" @@ -340,4 +340,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "80115c5f3c7fd52ab1466c37903845b099ffc803b6ddc6329b612af96ee1d421" +content-hash = "259e5ec479b559f3c02fdb7224f17b4979b66419c1f82b273d837ecd75b743ac" diff --git a/pyproject.toml b/pyproject.toml index a8949be8..a5e84a15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" pytest-cov = "^4.0.0" -pytest-asyncio = "^0.20.3" +pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" setuptools = "^65.6.3" pytest-timeout = "^2.1.0" From d399a4ede0fd68214dc05eccae392e06ef49bd2c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 15:33:23 -1000 Subject: [PATCH 239/434] chore: fix wheel builds with newer python (#1401) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f9669424..217cc11c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -128,7 +128,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-20.04, windows-2019, macos-12, macos-latest] + os: [ubuntu-latest, windows-2019, macos-12, macos-latest] steps: - uses: actions/checkout@v3 From a43753f9249c78564ea23e77103cd74e9522e305 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 15:36:34 -1000 Subject: [PATCH 240/434] chore: enable and fix additional ruff rules (#1399) --- bench/create_destory.py | 4 +- bench/incoming.py | 2 +- bench/txt_properties.py | 2 +- build_ext.py | 17 +- examples/async_apple_scanner.py | 20 +- examples/async_browser.py | 23 +- examples/async_service_info_request.py | 5 +- examples/browser.py | 9 +- examples/self_test.py | 9 +- pyproject.toml | 24 +- src/zeroconf/_cache.py | 29 +-- src/zeroconf/_core.py | 58 ++--- src/zeroconf/_dns.py | 34 +-- src/zeroconf/_engine.py | 17 +- src/zeroconf/_handlers/answers.py | 4 +- .../_handlers/multicast_outgoing_queue.py | 12 +- src/zeroconf/_handlers/query_handler.py | 56 ++--- src/zeroconf/_handlers/record_manager.py | 10 +- src/zeroconf/_history.py | 8 +- src/zeroconf/_listener.py | 21 +- src/zeroconf/_logger.py | 4 +- src/zeroconf/_protocol/incoming.py | 31 +-- src/zeroconf/_protocol/outgoing.py | 28 +-- src/zeroconf/_services/__init__.py | 8 +- src/zeroconf/_services/browser.py | 116 +++------ src/zeroconf/_services/info.py | 88 ++----- src/zeroconf/_services/registry.py | 4 +- src/zeroconf/_services/types.py | 4 +- src/zeroconf/_updates.py | 8 +- src/zeroconf/_utils/asyncio.py | 18 +- src/zeroconf/_utils/ipaddress.py | 8 +- src/zeroconf/_utils/name.py | 44 ++-- src/zeroconf/_utils/net.py | 89 ++----- src/zeroconf/asyncio.py | 23 +- src/zeroconf/const.py | 4 +- tests/__init__.py | 4 +- tests/services/test_browser.py | 199 ++++------------ tests/services/test_info.py | 220 ++++-------------- tests/services/test_types.py | 4 +- tests/test_asyncio.py | 50 +--- tests/test_cache.py | 5 +- tests/test_core.py | 71 ++---- tests/test_dns.py | 56 ++--- tests/test_engine.py | 36 +-- tests/test_exceptions.py | 13 +- tests/test_handlers.py | 213 +++++------------ tests/test_init.py | 4 +- tests/test_listener.py | 6 +- tests/test_protocol.py | 52 ++--- tests/test_services.py | 31 ++- tests/test_updates.py | 26 +-- tests/utils/test_asyncio.py | 8 +- tests/utils/test_ipaddress.py | 18 +- tests/utils/test_name.py | 12 +- tests/utils/test_net.py | 62 ++--- 55 files changed, 520 insertions(+), 1411 deletions(-) diff --git a/bench/create_destory.py b/bench/create_destory.py index 77d8af6f..6fde9ebe 100644 --- a/bench/create_destory.py +++ b/bench/create_destory.py @@ -18,9 +18,7 @@ async def _run() -> None: start = time.perf_counter() await _create_destroy(iterations) duration = time.perf_counter() - start - print( - f"Creating and destroying {iterations} Zeroconf instances took {duration} seconds" - ) + print(f"Creating and destroying {iterations} Zeroconf instances took {duration} seconds") asyncio.run(_run()) diff --git a/bench/incoming.py b/bench/incoming.py index d0cc3588..3edcfec2 100644 --- a/bench/incoming.py +++ b/bench/incoming.py @@ -178,7 +178,7 @@ def generate_packets() -> List[bytes]: def parse_incoming_message() -> None: for packet in packets: - DNSIncoming(packet).answers + DNSIncoming(packet).answers # noqa: B018 break diff --git a/bench/txt_properties.py b/bench/txt_properties.py index 792d5312..f9adeccf 100644 --- a/bench/txt_properties.py +++ b/bench/txt_properties.py @@ -14,7 +14,7 @@ def process_properties() -> None: info._properties = None - info.properties + info.properties # noqa: B018 count = 100000 diff --git a/build_ext.py b/build_ext.py index 4fecbdf1..26b4eb96 100644 --- a/build_ext.py +++ b/build_ext.py @@ -1,16 +1,19 @@ """Build optional cython modules.""" +import logging import os from distutils.command.build_ext import build_ext from typing import Any +_LOGGER = logging.getLogger(__name__) + class BuildExt(build_ext): def build_extensions(self) -> None: try: super().build_extensions() except Exception: - pass + _LOGGER.info("Failed to build cython extensions") def build(setup_kwargs: Any) -> None: @@ -20,8 +23,8 @@ def build(setup_kwargs: Any) -> None: from Cython.Build import cythonize setup_kwargs.update( - dict( - ext_modules=cythonize( + { + "ext_modules": cythonize( [ "src/zeroconf/_dns.py", "src/zeroconf/_cache.py", @@ -44,12 +47,10 @@ def build(setup_kwargs: Any) -> None: ], compiler_directives={"language_level": "3"}, # Python 3 ), - cmdclass=dict(build_ext=BuildExt), - ) + "cmdclass": {"build_ext": BuildExt}, + } ) - setup_kwargs["exclude_package_data"] = { - pkg: ["*.c"] for pkg in setup_kwargs["packages"] - } + setup_kwargs["exclude_package_data"] = {pkg: ["*.c"] for pkg in setup_kwargs["packages"]} except Exception: if os.environ.get("REQUIRE_CYTHON"): raise diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index ed549e01..29eb5f70 100644 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -32,6 +32,8 @@ log = logging.getLogger(__name__) +_PENDING_TASKS: set[asyncio.Task] = set() + def async_on_service_state_change( zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange @@ -41,23 +43,21 @@ def async_on_service_state_change( return base_name = name[: -len(service_type) - 1] device_name = f"{base_name}.{DEVICE_INFO_SERVICE}" - asyncio.ensure_future(_async_show_service_info(zeroconf, service_type, name)) + task = asyncio.ensure_future(_async_show_service_info(zeroconf, service_type, name)) + _PENDING_TASKS.add(task) + task.add_done_callback(_PENDING_TASKS.discard) # Also probe for device info - asyncio.ensure_future( - _async_show_service_info(zeroconf, DEVICE_INFO_SERVICE, device_name) - ) + task = asyncio.ensure_future(_async_show_service_info(zeroconf, DEVICE_INFO_SERVICE, device_name)) + _PENDING_TASKS.add(task) + task.add_done_callback(_PENDING_TASKS.discard) -async def _async_show_service_info( - zeroconf: Zeroconf, service_type: str, name: str -) -> None: +async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000, question_type=DNSQuestionType.QU) print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = [ - "%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses() - ] + addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] print(" Name: %s" % name) print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) diff --git a/examples/async_browser.py b/examples/async_browser.py index cd4c7786..bc5f252e 100644 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -18,6 +18,8 @@ AsyncZeroconfServiceTypes, ) +_PENDING_TASKS: set[asyncio.Task] = set() + def async_on_service_state_change( zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange @@ -25,20 +27,17 @@ def async_on_service_state_change( print(f"Service {name} of type {service_type} state changed: {state_change}") if state_change is not ServiceStateChange.Added: return - asyncio.ensure_future(async_display_service_info(zeroconf, service_type, name)) + task = asyncio.ensure_future(async_display_service_info(zeroconf, service_type, name)) + _PENDING_TASKS.add(task) + task.add_done_callback(_PENDING_TASKS.discard) -async def async_display_service_info( - zeroconf: Zeroconf, service_type: str, name: str -) -> None: +async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000) print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = [ - "%s:%d" % (addr, cast(int, info.port)) - for addr in info.parsed_scoped_addresses() - ] + addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] print(" Name: %s" % name) print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) @@ -66,9 +65,7 @@ async def async_run(self) -> None: services = ["_http._tcp.local.", "_hap._tcp.local."] if self.args.find: services = list( - await AsyncZeroconfServiceTypes.async_find( - aiozc=self.aiozc, ip_version=ip_version - ) + await AsyncZeroconfServiceTypes.async_find(aiozc=self.aiozc, ip_version=ip_version) ) print("\nBrowsing %s service(s), press Ctrl-C to exit...\n" % services) @@ -90,9 +87,7 @@ async def async_close(self) -> None: parser = argparse.ArgumentParser() parser.add_argument("--debug", action="store_true") - parser.add_argument( - "--find", action="store_true", help="Browse all available services" - ) + parser.add_argument("--find", action="store_true", help="Browse all available services") version_group = parser.add_mutually_exclusive_group() version_group.add_argument("--v6", action="store_true") version_group.add_argument("--v6-only", action="store_true") diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py index fca58745..31864756 100644 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -31,10 +31,7 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: for info in infos: print("Info for %s" % (info.name)) if info: - addresses = [ - "%s:%d" % (addr, cast(int, info.port)) - for addr in info.parsed_addresses() - ] + addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) print(f" Server: {info.server}") diff --git a/examples/browser.py b/examples/browser.py index 1a801a44..aebf3f5d 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -29,10 +29,7 @@ def on_service_state_change( print("Info from zeroconf.get_service_info: %r" % (info)) if info: - addresses = [ - "%s:%d" % (addr, cast(int, info.port)) - for addr in info.parsed_scoped_addresses() - ] + addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] print(" Addresses: %s" % ", ".join(addresses)) print(" Weight: %d, priority: %d" % (info.weight, info.priority)) print(f" Server: {info.server}") @@ -52,9 +49,7 @@ def on_service_state_change( parser = argparse.ArgumentParser() parser.add_argument("--debug", action="store_true") - parser.add_argument( - "--find", action="store_true", help="Browse all available services" - ) + parser.add_argument("--find", action="store_true", help="Browse all available services") version_group = parser.add_mutually_exclusive_group() version_group.add_argument("--v6-only", action="store_true") version_group.add_argument("--v4-only", action="store_true") diff --git a/examples/self_test.py b/examples/self_test.py index 63aca4f3..35f83b06 100755 --- a/examples/self_test.py +++ b/examples/self_test.py @@ -34,15 +34,10 @@ r.register_service(info) print(" Registration done.") print("2. Testing query of service information...") - print( - " Getting ZOE service: %s" - % (r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local.")) - ) + print(" Getting ZOE service: %s" % (r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local."))) print(" Query done.") print("3. Testing query of own service...") - queried_info = r.get_service_info( - "_http._tcp.local.", "My Service Name._http._tcp.local." - ) + queried_info = r.get_service_info("_http._tcp.local.", "My Service Name._http._tcp.local.") assert queried_info assert set(queried_info.parsed_addresses()) == expected print(f" Getting self: {queried_info}") diff --git a/pyproject.toml b/pyproject.toml index a5e84a15..bb53a1d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,10 +63,28 @@ cython = "^3.0.5" setuptools = "^65.6.3" pytest-timeout = "^2.1.0" -[tool.black] +[tool.ruff] +target-version = "py38" line-length = 110 -target_version = ['py37', 'py38', 'py39', 'py310', 'py311'] -skip_string_normalization = true + +[tool.ruff.lint] +ignore = [ + "S101", # use of assert + "S104", # S104 Possible binding to all interfaces + "UP031", # UP031 use f-strings -- too many to fix right now +] +select = [ + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "S", # flake8-bandit + "F", # pyflake + "E", # pycodestyle + "W", # pycodestyle + "UP", # pyupgrade + "I", # isort + "RUF", # ruff specific +] + [tool.pylint.BASIC] class-const-naming-style = "any" diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 809be9c1..7db15117 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -119,12 +119,7 @@ def async_expire(self, now: _float) -> List[DNSRecord]: This function must be run in from event loop. """ - expired = [ - record - for records in self.cache.values() - for record in records - if record.is_expired(now) - ] + expired = [record for records in self.cache.values() for record in records if record.is_expired(now)] self.async_remove_records(expired) return expired @@ -140,9 +135,7 @@ def async_get_unique(self, entry: _UniqueRecordsType) -> Optional[DNSRecord]: return None return store.get(entry) - def async_all_by_details( - self, name: _str, type_: _int, class_: _int - ) -> List[DNSRecord]: + def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DNSRecord]: """Gets all matching entries by details. This function is not thread-safe and must be called from @@ -188,9 +181,7 @@ def get(self, entry: DNSEntry) -> Optional[DNSRecord]: return cached_entry return None - def get_by_details( - self, name: str, type_: _int, class_: _int - ) -> Optional[DNSRecord]: + def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRecord]: """Gets the first matching entry by details. Returns None if no entries match. Calling this function is not recommended as it will only @@ -211,19 +202,13 @@ def get_by_details( return cached_entry return None - def get_all_by_details( - self, name: str, type_: _int, class_: _int - ) -> List[DNSRecord]: + def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRecord]: """Gets all matching entries by details.""" key = name.lower() records = self.cache.get(key) if records is None: return [] - return [ - entry - for entry in list(records) - if type_ == entry.type and class_ == entry.class_ - ] + return [entry for entry in list(records) if type_ == entry.type and class_ == entry.class_] def entries_with_server(self, server: str) -> List[DNSRecord]: """Returns a list of entries whose server matches the name.""" @@ -233,9 +218,7 @@ def entries_with_name(self, name: str) -> List[DNSRecord]: """Returns a list of entries whose key matches the name.""" return list(self.cache.get(name.lower(), [])) - def current_entry_with_name_and_alias( - self, name: str, alias: str - ) -> Optional[DNSRecord]: + def current_entry_with_name_and_alias(self, name: str, alias: str) -> Optional[DNSRecord]: now = current_time_millis() for record in reversed(self.entries_with_name(name)): if ( diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 5386df63..b3ecd851 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -173,17 +173,11 @@ def __init__( self.done = False if apple_p2p and sys.platform != "darwin": - raise RuntimeError( - "Option `apple_p2p` is not supported on non-Apple platforms." - ) + raise RuntimeError("Option `apple_p2p` is not supported on non-Apple platforms.") self.unicast = unicast - listen_socket, respond_sockets = create_sockets( - interfaces, unicast, ip_version, apple_p2p=apple_p2p - ) - log.debug( - "Listen socket %s, respond sockets %s", listen_socket, respond_sockets - ) + listen_socket, respond_sockets = create_sockets(interfaces, unicast, ip_version, apple_p2p=apple_p2p) + log.debug("Listen socket %s, respond sockets %s", listen_socket, respond_sockets) self.engine = AsyncEngine(self, listen_socket, respond_sockets) @@ -193,9 +187,7 @@ def __init__( self.question_history = QuestionHistory() self.out_queue = MulticastOutgoingQueue(self, 0, _AGGREGATION_DELAY) - self.out_delay_queue = MulticastOutgoingQueue( - self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY - ) + self.out_delay_queue = MulticastOutgoingQueue(self, _ONE_SECOND, _PROTECTED_AGGREGATION_DELAY) self.query_handler = QueryHandler(self) self.record_manager = RecordManager(self) @@ -209,11 +201,7 @@ def __init__( @property def started(self) -> bool: """Check if the instance has started.""" - return bool( - not self.done - and self.engine.running_event - and self.engine.running_event.is_set() - ) + return bool(not self.done and self.engine.running_event and self.engine.running_event.is_set()) def start(self) -> None: """Start Zeroconf.""" @@ -332,9 +320,7 @@ def register_service( assert self.loop is not None run_coro_with_timeout( await_awaitable( - self.async_register_service( - info, ttl, allow_name_change, cooperating_responders, strict - ) + self.async_register_service(info, ttl, allow_name_change, cooperating_responders, strict) ), self.loop, _REGISTER_TIME * _REGISTER_BROADCASTS, @@ -362,13 +348,9 @@ async def async_register_service( info.set_server_if_missing() await self.async_wait_for_start() - await self.async_check_service( - info, allow_name_change, cooperating_responders, strict - ) + await self.async_check_service(info, allow_name_change, cooperating_responders, strict) self.registry.async_add(info) - return asyncio.ensure_future( - self._async_broadcast_service(info, _REGISTER_TIME, None) - ) + return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) def update_service(self, info: ServiceInfo) -> None: """Registers service information to the network with a default TTL. @@ -391,9 +373,7 @@ async def async_update_service(self, info: ServiceInfo) -> Awaitable: Zeroconf will then respond to requests for information for that service.""" self.registry.async_update(info) - return asyncio.ensure_future( - self._async_broadcast_service(info, _REGISTER_TIME, None) - ) + return asyncio.ensure_future(self._async_broadcast_service(info, _REGISTER_TIME, None)) async def async_get_service_info( self, @@ -427,9 +407,7 @@ async def _async_broadcast_service( for i in range(_REGISTER_BROADCASTS): if i != 0: await asyncio.sleep(millis_to_seconds(interval)) - self.async_send( - self.generate_service_broadcast(info, ttl, broadcast_addresses) - ) + self.async_send(self.generate_service_broadcast(info, ttl, broadcast_addresses)) def generate_service_broadcast( self, @@ -500,9 +478,7 @@ async def async_unregister_service(self, info: ServiceInfo) -> Awaitable: entries = self.registry.async_get_infos_server(info.server_key) broadcast_addresses = not bool(entries) return asyncio.ensure_future( - self._async_broadcast_service( - info, _UNREGISTER_TIME, 0, broadcast_addresses - ) + self._async_broadcast_service(info, _UNREGISTER_TIME, 0, broadcast_addresses) ) def generate_unregister_all_services(self) -> Optional[DNSOutgoing]: @@ -595,9 +571,7 @@ def add_listener( This function is threadsafe """ assert self.loop is not None - self.loop.call_soon_threadsafe( - self.record_manager.async_add_listener, listener, question - ) + self.loop.call_soon_threadsafe(self.record_manager.async_add_listener, listener, question) def remove_listener(self, listener: RecordUpdateListener) -> None: """Removes a listener. @@ -605,9 +579,7 @@ def remove_listener(self, listener: RecordUpdateListener) -> None: This function is threadsafe """ assert self.loop is not None - self.loop.call_soon_threadsafe( - self.record_manager.async_remove_listener, listener - ) + self.loop.call_soon_threadsafe(self.record_manager.async_remove_listener, listener) def async_add_listener( self, @@ -639,9 +611,7 @@ def send( ) -> None: """Sends an outgoing packet threadsafe.""" assert self.loop is not None - self.loop.call_soon_threadsafe( - self.async_send, out, addr, port, v6_flow_scope, transport - ) + self.loop.call_soon_threadsafe(self.async_send, out, addr, port, v6_flow_scope, transport) def async_send( self, diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index f85969a9..15daa709 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -33,9 +33,7 @@ _LEN_SHORT = 2 _LEN_INT = 4 -_BASE_MAX_SIZE = ( - _LEN_SHORT + _LEN_SHORT + _LEN_INT + _LEN_SHORT -) # type # class # ttl # length +_BASE_MAX_SIZE = _LEN_SHORT + _LEN_SHORT + _LEN_INT + _LEN_SHORT # type # class # ttl # length _NAME_COMPRESSION_MIN_SIZE = _LEN_BYTE * 2 _EXPIRE_FULL_TIME_MS = 1000 @@ -79,11 +77,7 @@ def _set_class(self, class_: _int) -> None: self.unique = (class_ & _CLASS_UNIQUE) != 0 def _dns_entry_matches(self, other) -> bool: # type: ignore[no-untyped-def] - return ( - self.key == other.key - and self.type == other.type - and self.class_ == other.class_ - ) + return self.key == other.key and self.type == other.type and self.class_ == other.class_ def __eq__(self, other: Any) -> bool: """Equality test on key (lowercase name), type, and class""" @@ -122,11 +116,7 @@ def __init__(self, name: str, type_: int, class_: int) -> None: def answered_by(self, rec: "DNSRecord") -> bool: """Returns true if the question is answered by the record""" - return ( - self.class_ == rec.class_ - and self.type in (rec.type, _TYPE_ANY) - and self.name == rec.name - ) + return self.class_ == rec.class_ and self.type in (rec.type, _TYPE_ANY) and self.name == rec.name def __hash__(self) -> int: return self._hash @@ -138,9 +128,7 @@ def __eq__(self, other: Any) -> bool: @property def max_size(self) -> int: """Maximum size of the question in the packet.""" - return ( - len(self.name.encode("utf-8")) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT - ) # type # class + return len(self.name.encode("utf-8")) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT # type # class @property def unicast(self) -> bool: @@ -328,11 +316,7 @@ def __eq__(self, other: Any) -> bool: def _eq(self, other) -> bool: # type: ignore[no-untyped-def] """Tests equality on cpu and os.""" - return ( - self.cpu == other.cpu - and self.os == other.os - and self._dns_entry_matches(other) - ) + return self.cpu == other.cpu and self.os == other.os and self._dns_entry_matches(other) def __hash__(self) -> int: """Hash to compare like DNSHinfo.""" @@ -457,9 +441,7 @@ def __init__( self.port = port self.server = server self.server_key = server.lower() - self._hash = hash( - (self.key, type_, self.class_, priority, weight, port, self.server_key) - ) + self._hash = hash((self.key, type_, self.class_, priority, weight, port, self.server_key)) def write(self, out: "DNSOutgoing") -> None: """Used in constructing an outgoing packet""" @@ -550,9 +532,7 @@ def __hash__(self) -> int: def __repr__(self) -> str: """String representation""" return self.to_string( - self.next_name - + "," - + "|".join([self.get_type(type_) for type_ in self.rdtypes]) + self.next_name + "," + "|".join([self.get_type(type_) for type_ in self.rdtypes]) ) diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index afe22f59..e807d9ef 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -110,13 +110,9 @@ async def _async_create_endpoints(self) -> None: sock=s, ) self.protocols.append(cast(AsyncListener, protocol)) - self.readers.append( - make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) - ) + self.readers.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) if s in sender_sockets: - self.senders.append( - make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) - ) + self.senders.append(make_wrapped_transport(cast(asyncio.DatagramTransport, transport))) def _async_cache_cleanup(self) -> None: """Periodic cache cleanup.""" @@ -124,10 +120,7 @@ def _async_cache_cleanup(self) -> None: self.zc.question_history.async_expire(now) self.zc.record_manager.async_updates( now, - [ - RecordUpdate(record, record) - for record in self.zc.cache.async_expire(now) - ], + [RecordUpdate(record, record) for record in self.zc.cache.async_expire(now)], ) self.zc.record_manager.async_updates_complete(False) self._async_schedule_next_cache_cleanup() @@ -136,9 +129,7 @@ def _async_schedule_next_cache_cleanup(self) -> None: """Schedule the next cache cleanup.""" loop = self.loop assert loop is not None - self._cleanup_timer = loop.call_at( - loop.time() + _CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup - ) + self._cleanup_timer = loop.call_at(loop.time() + _CACHE_CLEANUP_INTERVAL, self._async_cache_cleanup) async def _async_close(self) -> None: """Cancel and wait for the cleanup task to finish.""" diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index 74efee2c..bab2d749 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -109,9 +109,7 @@ def construct_outgoing_unicast_answers( return out -def _add_answers_additionals( - out: DNSOutgoing, answers: _AnswerWithAdditionalsType -) -> None: +def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: # Find additionals and suppress any additionals that are already in answers sending: Set[DNSRecord] = set(answers) # Answers are sorted to group names together to increase the chance diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index 49242540..afcefc01 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -53,9 +53,7 @@ class MulticastOutgoingQueue: "_aggregation_delay", ) - def __init__( - self, zeroconf: "Zeroconf", additional_delay: _int, max_aggregation_delay: _int - ) -> None: + def __init__(self, zeroconf: "Zeroconf", additional_delay: _int, max_aggregation_delay: _int) -> None: self.zc = zeroconf self.queue: deque[AnswerGroup] = deque() # Additional delay is used to implement @@ -71,9 +69,7 @@ def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: loop = self.zc.loop if TYPE_CHECKING: assert loop is not None - random_int = RAND_INT( - self._multicast_delay_random_min, self._multicast_delay_random_max - ) + random_int = RAND_INT(self._multicast_delay_random_min, self._multicast_delay_random_max) random_delay = random_int + self._additional_delay send_after = now + random_delay send_before = now + self._aggregation_delay + self._additional_delay @@ -87,9 +83,7 @@ def async_add(self, now: _float, answers: _AnswerWithAdditionalsType) -> None: last_group.answers.update(answers) return else: - loop.call_at( - loop.time() + millis_to_seconds(random_delay), self.async_ready - ) + loop.call_at(loop.time() + millis_to_seconds(random_delay), self.async_ready) self.queue.append(AnswerGroup(send_after, send_before, answers)) def _remove_answers_from_queue(self, answers: _AnswerWithAdditionalsType) -> None: diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index a2f5e9f5..f2e11236 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -102,9 +102,7 @@ class _QueryResponse: "_mcast_aggregate_last_second", ) - def __init__( - self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float - ) -> None: + def __init__(self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float) -> None: """Build a query response.""" self._is_probe = is_probe self._questions = questions @@ -159,12 +157,8 @@ def answers( ucast = {r: self._additionals[r] for r in self._ucast} mcast_now = {r: self._additionals[r] for r in self._mcast_now} mcast_aggregate = {r: self._additionals[r] for r in self._mcast_aggregate} - mcast_aggregate_last_second = { - r: self._additionals[r] for r in self._mcast_aggregate_last_second - } - return QuestionAnswers( - ucast, mcast_now, mcast_aggregate, mcast_aggregate_last_second - ) + mcast_aggregate_last_second = {r: self._additionals[r] for r in self._mcast_aggregate_last_second} + return QuestionAnswers(ucast, mcast_now, mcast_aggregate, mcast_aggregate_last_second) def _has_mcast_within_one_quarter_ttl(self, record: DNSRecord) -> bool: """Check to see if a record has been mcasted recently. @@ -190,9 +184,7 @@ def _has_mcast_record_in_last_second(self, record: DNSRecord) -> bool: if TYPE_CHECKING: record = cast(_UniqueRecordsType, record) maybe_entry = self._cache.async_get_unique(record) - return bool( - maybe_entry is not None and self._now - maybe_entry.created < _ONE_SECOND - ) + return bool(maybe_entry is not None and self._now - maybe_entry.created < _ONE_SECOND) class QueryHandler: @@ -278,16 +270,12 @@ def _add_address_answers( missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types if answers: if missing_types: - assert ( - service.server is not None - ), "Service server must be set for NSEC record." + assert service.server is not None, "Service server must be set for NSEC record." additionals.add(service._dns_nsec(list(missing_types), None)) for answer in answers: answer_set[answer] = additionals elif type_ in missing_types: - assert ( - service.server is not None - ), "Service server must be set for NSEC record." + assert service.server is not None, "Service server must be set for NSEC record." answer_set[service._dns_nsec(list(missing_types), None)] = set() def _answer_question( @@ -302,15 +290,11 @@ def _answer_question( answer_set: _AnswerWithAdditionalsType = {} if strategy_type == _ANSWER_STRATEGY_SERVICE_TYPE_ENUMERATION: - self._add_service_type_enumeration_query_answers( - types, answer_set, known_answers - ) + self._add_service_type_enumeration_query_answers(types, answer_set, known_answers) elif strategy_type == _ANSWER_STRATEGY_POINTER: self._add_pointer_answers(services, answer_set, known_answers) elif strategy_type == _ANSWER_STRATEGY_ADDRESS: - self._add_address_answers( - services, answer_set, known_answers, question.type - ) + self._add_address_answers(services, answer_set, known_answers, question.type) elif strategy_type == _ANSWER_STRATEGY_SERVICE: # Add recommended additional answers according to # https://tools.ietf.org/html/rfc6763#section-12.2. @@ -367,9 +351,7 @@ def async_response( # pylint: disable=unused-argument if not is_unicast: if known_answers_set is None: # pragma: no branch known_answers_set = known_answers.lookup_set() - self.question_history.add_question_at_time( - question, now, known_answers_set - ) + self.question_history.add_question_at_time(question, now, known_answers_set) answer_set = self._answer_question( question, strategy.strategy_type, @@ -415,18 +397,14 @@ def _get_answer_strategies( services = self.registry.async_get_infos_type(question_lower_name) if services: strategies.append( - _AnswerStrategy( - question, _ANSWER_STRATEGY_POINTER, _EMPTY_TYPES_LIST, services - ) + _AnswerStrategy(question, _ANSWER_STRATEGY_POINTER, _EMPTY_TYPES_LIST, services) ) if type_ in (_TYPE_A, _TYPE_AAAA, _TYPE_ANY): services = self.registry.async_get_infos_server(question_lower_name) if services: strategies.append( - _AnswerStrategy( - question, _ANSWER_STRATEGY_ADDRESS, _EMPTY_TYPES_LIST, services - ) + _AnswerStrategy(question, _ANSWER_STRATEGY_ADDRESS, _EMPTY_TYPES_LIST, services) ) if type_ in (_TYPE_SRV, _TYPE_TXT, _TYPE_ANY): @@ -477,23 +455,17 @@ def handle_assembled_query( if question_answers.ucast: questions = first_packet._questions id_ = first_packet.id - out = construct_outgoing_unicast_answers( - question_answers.ucast, ucast_source, questions, id_ - ) + out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) # When sending unicast, only send back the reply # via the same socket that it was recieved from # as we know its reachable from that socket self.zc.async_send(out, addr, port, v6_flow_scope, transport) if question_answers.mcast_now: - self.zc.async_send( - construct_outgoing_multicast_answers(question_answers.mcast_now) - ) + self.zc.async_send(construct_outgoing_multicast_answers(question_answers.mcast_now)) if question_answers.mcast_aggregate: self.out_queue.async_add(first_packet.now, question_answers.mcast_aggregate) if question_answers.mcast_aggregate_last_second: # https://datatracker.ietf.org/doc/html/rfc6762#section-14 # If we broadcast it in the last second, we have to delay # at least a second before we send it again - self.out_delay_queue.async_add( - first_packet.now, question_answers.mcast_aggregate_last_second - ) + self.out_delay_queue.async_add(first_packet.now, question_answers.mcast_aggregate_last_second) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 86286dec..8ae82ba5 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -97,11 +97,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # level of rate limit and safe guards so we use 1/4 of the recommended value. record_type = record.type record_ttl = record.ttl - if ( - record_ttl - and record_type == _TYPE_PTR - and record_ttl < _DNS_PTR_MIN_TTL - ): + if record_ttl and record_type == _TYPE_PTR and record_ttl < _DNS_PTR_MIN_TTL: log.debug( "Increasing effective ttl of %s to minimum of %s to protect against excessive refreshes.", record, @@ -132,9 +128,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: removes.add(record) if unique_types: - cache.async_mark_unique_records_older_than_1s_to_expire( - unique_types, answers, now - ) + cache.async_mark_unique_records_older_than_1s_to_expire(unique_types, answers, now) if updates: self.async_updates(now, updates) diff --git a/src/zeroconf/_history.py b/src/zeroconf/_history.py index 2e58b14e..aa28519c 100644 --- a/src/zeroconf/_history.py +++ b/src/zeroconf/_history.py @@ -38,15 +38,11 @@ def __init__(self) -> None: """Init a new QuestionHistory.""" self._history: Dict[DNSQuestion, Tuple[float, Set[DNSRecord]]] = {} - def add_question_at_time( - self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord] - ) -> None: + def add_question_at_time(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> None: """Remember a question with known answers.""" self._history[question] = (now, known_answers) - def suppresses( - self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord] - ) -> bool: + def suppresses(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> bool: """Check to see if a question should be suppressed. https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 2956ad52..19cca8df 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -119,7 +119,8 @@ def _process_datagram_at_time( # Guard against duplicate packets if debug: log.debug( - "Ignoring duplicate message with no unicast questions received from %s [socket %s] (%d bytes) as [%r]", + "Ignoring duplicate message with no unicast questions" + " received from %s [socket %s] (%d bytes) as [%r]", addrs, self.sock_description, data_len, @@ -139,9 +140,7 @@ def _process_datagram_at_time( # https://github.com/python/mypy/issues/1178 addr, port, flow, scope = addrs # type: ignore if debug: # pragma: no branch - log.debug( - "IPv6 scope_id %d associated to the receiving interface", scope - ) + log.debug("IPv6 scope_id %d associated to the receiving interface", scope) v6_flow_scope = (flow, scope) addr_port = (addr, port) @@ -204,7 +203,7 @@ def handle_query_or_defer( if incoming.data == msg.data: return deferred.append(msg) - delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) + delay = millis_to_seconds(random.randint(*_TC_DELAY_RANDOM_INTERVAL)) # noqa: S311 loop = self.zc.loop assert loop is not None self._cancel_any_timers_for_addr(addr) @@ -237,9 +236,7 @@ def _respond_query( if msg: packets.append(msg) - self._query_handler.handle_assembled_query( - packets, addr, port, transport, v6_flow_scope - ) + self._query_handler.handle_assembled_query(packets, addr, port, transport, v6_flow_scope) def error_received(self, exc: Exception) -> None: """Likely socket closed or IPv6.""" @@ -251,13 +248,9 @@ def error_received(self, exc: Exception) -> None: QuietLogger.log_exception_once(exc, msg_str, exc) def connection_made(self, transport: asyncio.BaseTransport) -> None: - wrapped_transport = make_wrapped_transport( - cast(asyncio.DatagramTransport, transport) - ) + wrapped_transport = make_wrapped_transport(cast(asyncio.DatagramTransport, transport)) self.transport = wrapped_transport - self.sock_description = ( - f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" - ) + self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" def connection_lost(self, exc: Optional[Exception]) -> None: """Handle connection lost.""" diff --git a/src/zeroconf/_logger.py b/src/zeroconf/_logger.py index 9e726107..1556522e 100644 --- a/src/zeroconf/_logger.py +++ b/src/zeroconf/_logger.py @@ -23,7 +23,7 @@ import logging import sys -from typing import Any, Dict, Union, cast +from typing import Any, ClassVar, Dict, Union, cast log = logging.getLogger(__name__.split(".", maxsplit=1)[0]) log.addHandler(logging.NullHandler()) @@ -38,7 +38,7 @@ def set_logger_level_if_unset() -> None: class QuietLogger: - _seen_logs: Dict[str, Union[int, tuple]] = {} + _seen_logs: ClassVar[Dict[str, Union[int, tuple]]] = {} @classmethod def log_exception_warning(cls, *logger_data: Any) -> None: diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 0ad6efce..8670b0df 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -279,12 +279,7 @@ def _read_others(self) -> None: # ttl is an unsigned long in network order https://www.rfc-editor.org/errata/eid2130 type_ = view[offset] << 8 | view[offset + 1] class_ = view[offset + 2] << 8 | view[offset + 3] - ttl = ( - view[offset + 4] << 24 - | view[offset + 5] << 16 - | view[offset + 6] << 8 - | view[offset + 7] - ) + ttl = view[offset + 4] << 24 | view[offset + 5] << 16 | view[offset + 6] << 8 | view[offset + 7] length = view[offset + 8] << 8 | view[offset + 9] end = self.offset + length rec = None @@ -311,15 +306,11 @@ def _read_record( ) -> Optional[DNSRecord]: """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: - return DNSAddress( - domain, type_, class_, ttl, self._read_string(4), None, self.now - ) + return DNSAddress(domain, type_, class_, ttl, self._read_string(4), None, self.now) if type_ in (_TYPE_CNAME, _TYPE_PTR): return DNSPointer(domain, type_, class_, ttl, self._read_name(), self.now) if type_ == _TYPE_TXT: - return DNSText( - domain, type_, class_, ttl, self._read_string(length), self.now - ) + return DNSText(domain, type_, class_, ttl, self._read_string(length), self.now) if type_ == _TYPE_SRV: view = self.view offset = self.offset @@ -399,9 +390,7 @@ def _read_name(self) -> str: labels: List[str] = [] seen_pointers: Set[int] = set() original_offset = self.offset - self.offset = self._decode_labels_at_offset( - original_offset, labels, seen_pointers - ) + self.offset = self._decode_labels_at_offset(original_offset, labels, seen_pointers) self._name_cache[original_offset] = labels name = ".".join(labels) + "." if len(name) > MAX_NAME_LENGTH: @@ -410,9 +399,7 @@ def _read_name(self) -> str: ) return name - def _decode_labels_at_offset( - self, off: _int, labels: List[str], seen_pointers: Set[int] - ) -> int: + def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: Set[int]) -> int: # This is a tight loop that is called frequently, small optimizations can make a difference. view = self.view while off < self._data_len: @@ -422,9 +409,7 @@ def _decode_labels_at_offset( if length < 0x40: label_idx = off + DNS_COMPRESSION_HEADER_LEN - labels.append( - self.data[label_idx : label_idx + length].decode("utf-8", "replace") - ) + labels.append(self.data[label_idx : label_idx + length].decode("utf-8", "replace")) off += DNS_COMPRESSION_HEADER_LEN + length continue @@ -462,6 +447,4 @@ def _decode_labels_at_offset( ) return off + DNS_COMPRESSION_POINTER_LEN - raise IncomingDecodeError( - f"Corrupt packet received while decoding name from {self.source}" - ) + raise IncomingDecodeError(f"Corrupt packet received while decoding name from {self.source}") diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 66b526cc..9e9a5c87 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -151,9 +151,7 @@ def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: def add_answer_at_time(self, record: Optional[DNSRecord], now: float_) -> None: """Adds an answer if it does not expire by a certain time""" now_double = now - if record is not None and ( - now_double == 0 or not record.is_expired(now_double) - ): + if record is not None and (now_double == 0 or not record.is_expired(now_double)): self.answers.append((record, now)) def add_authorative_answer(self, record: DNSPointer) -> None: @@ -292,9 +290,7 @@ def write_name(self, name: str_) -> None: return if name_length == 0: name_length = len(name.encode("utf-8")) - self.names[partial_name] = ( - start_size + name_length - len(partial_name.encode("utf-8")) - ) + self.names[partial_name] = start_size + name_length - len(partial_name.encode("utf-8")) self._write_utf(labels[count]) # this is the end of a name @@ -349,9 +345,7 @@ def _write_record(self, record: DNSRecord_, now: float_) -> bool: self._replace_short(index, length) return self._check_data_limit_or_rollback(start_data_length, start_size) - def _check_data_limit_or_rollback( - self, start_data_length: int_, start_size: int_ - ) -> bool: + def _check_data_limit_or_rollback(self, start_data_length: int_, start_size: int_) -> bool: """Check data limit, if we go over, then rollback and return False.""" len_limit = _MAX_MSG_ABSOLUTE if self.allow_long else _MAX_MSG_TYPICAL self.allow_long = False @@ -369,9 +363,7 @@ def _check_data_limit_or_rollback( self.size = start_size start_size_int = start_size - rollback_names = [ - name for name, idx in self.names.items() if idx >= start_size_int - ] + rollback_names = [name for name, idx in self.names.items() if idx >= start_size_int] for name in rollback_names: del self.names[name] return False @@ -392,9 +384,7 @@ def _write_answers_from_offset(self, answer_offset: int_) -> int: answers_written += 1 return answers_written - def _write_records_from_offset( - self, records: Sequence[DNSRecord], offset: int_ - ) -> int: + def _write_records_from_offset(self, records: Sequence[DNSRecord], offset: int_) -> int: records_written = 0 for record in records[offset:]: if not self._write_record(record, 0): @@ -458,12 +448,8 @@ def packets(self) -> List[bytes]: questions_written = self._write_questions_from_offset(questions_offset) answers_written = self._write_answers_from_offset(answer_offset) - authorities_written = self._write_records_from_offset( - self.authorities, authority_offset - ) - additionals_written = self._write_records_from_offset( - self.additionals, additional_offset - ) + authorities_written = self._write_records_from_offset(self.authorities, authority_offset) + additionals_written = self._write_records_from_offset(self.additionals, additional_offset) made_progress = bool(self.data) diff --git a/src/zeroconf/_services/__init__.py b/src/zeroconf/_services/__init__.py index 9812c6f3..7a6bddeb 100644 --- a/src/zeroconf/_services/__init__.py +++ b/src/zeroconf/_services/__init__.py @@ -66,14 +66,10 @@ class SignalRegistrationInterface: def __init__(self, handlers: List[Callable[..., None]]) -> None: self._handlers = handlers - def register_handler( - self, handler: Callable[..., None] - ) -> "SignalRegistrationInterface": + def register_handler(self, handler: Callable[..., None]) -> "SignalRegistrationInterface": self._handlers.append(handler) return self - def unregister_handler( - self, handler: Callable[..., None] - ) -> "SignalRegistrationInterface": + def unregister_handler(self, handler: Callable[..., None]) -> "SignalRegistrationInterface": self._handlers.remove(handler) return self diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 1f0524f3..30361528 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -28,7 +28,7 @@ import time import warnings from functools import partial -from types import TracebackType # noqa # used in type hints +from types import TracebackType # used in type hints from typing import ( TYPE_CHECKING, Any, @@ -197,9 +197,7 @@ def __init__(self, now_millis: float, multicast: bool) -> None: self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast) self.bytes = 0 - def add( - self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer] - ) -> None: + def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer]) -> None: """Add a new set of questions and known answers to the outgoing.""" self.out.add_question(question) for answer in answers: @@ -220,9 +218,7 @@ def group_ptr_queries_with_known_answers( so we try to keep all the known answers in the same packet as the questions. """ - return _group_ptr_queries_with_known_answers( - now, multicast, question_with_known_answers - ) + return _group_ptr_queries_with_known_answers(now, multicast, question_with_known_answers) def _group_ptr_queries_with_known_answers( @@ -237,10 +233,7 @@ def _group_ptr_queries_with_known_answers( # goal of this algorithm is to quickly bucket the query + known answers without # the overhead of actually constructing the packets. query_by_size: Dict[DNSQuestion, int] = { - question: ( - question.max_size - + sum(answer.max_size_compressed for answer in known_answers) - ) + question: (question.max_size + sum(answer.max_size_compressed for answer in known_answers)) for question, known_answers in question_with_known_answers.items() } max_bucket_size = _MAX_MSG_TYPICAL - _DNS_PACKET_HEADER_LEN @@ -276,9 +269,7 @@ def generate_service_query( ) -> List[DNSOutgoing]: """Generate a service query for sending with zeroconf.send.""" questions_with_known_answers: _QuestionWithKnownAnswers = {} - qu_question = ( - not multicast if question_type is None else question_type is QU_QUESTION - ) + qu_question = not multicast if question_type is None else question_type is QU_QUESTION question_history = zc.question_history cache = zc.cache for type_ in types_: @@ -289,9 +280,7 @@ def generate_service_query( for record in cache.get_all_by_details(type_, _TYPE_PTR, _CLASS_IN) if not record.is_stale(now_millis) } - if not qu_question and question_history.suppresses( - question, now_millis, known_answers - ): + if not qu_question and question_history.suppresses(question, now_millis, known_answers): log.debug("Asking %s was suppressed by the question history", question) continue if TYPE_CHECKING: @@ -302,9 +291,7 @@ def generate_service_query( if not qu_question: question_history.add_question_at_time(question, now_millis, known_answers) - return _group_ptr_queries_with_known_answers( - now_millis, multicast, questions_with_known_answers - ) + return _group_ptr_queries_with_known_answers(now_millis, multicast, questions_with_known_answers) def _on_change_dispatcher( @@ -325,9 +312,10 @@ def _service_state_changed_from_listener( assert listener is not None if not hasattr(listener, "update_service"): warnings.warn( - "%r has no update_service method. Provide one (it can be empty if you " - "don't care about the updates), it'll become mandatory." % (listener,), + f"{listener!r} has no update_service method. Provide one (it can be empty if you " + "don't care about the updates), it'll become mandatory.", FutureWarning, + stacklevel=1, ) return partial(_on_change_dispatcher, listener) @@ -379,9 +367,7 @@ def __init__( self._next_scheduled_for_alias: Dict[str, _ScheduledPTRQuery] = {} self._query_heap: list[_ScheduledPTRQuery] = [] self._next_run: Optional[asyncio.TimerHandle] = None - self._clock_resolution_millis = ( - time.get_clock_info("monotonic").resolution * 1000 - ) + self._clock_resolution_millis = time.get_clock_info("monotonic").resolution * 1000 self._question_type = question_type def start(self, loop: asyncio.AbstractEventLoop) -> None: @@ -394,9 +380,7 @@ def start(self, loop: asyncio.AbstractEventLoop) -> None: also delay the first query of the series by a randomly chosen amount in the range 20-120 ms. """ - start_delay = millis_to_seconds( - random.randint(*self._first_random_delay_interval) - ) + start_delay = millis_to_seconds(random.randint(*self._first_random_delay_interval)) # noqa: S311 self._loop = loop self._next_run = loop.call_later(start_delay, self._process_startup_queries) @@ -485,9 +469,7 @@ def _process_startup_queries(self) -> None: now_millis = current_time_millis() # At first we will send STARTUP_QUERIES queries to get the cache populated - self.async_send_ready_queries( - self._startup_queries_sent == 0, now_millis, self._types - ) + self.async_send_ready_queries(self._startup_queries_sent == 0, now_millis, self._types) self._startup_queries_sent += 1 # Once we finish sending the initial queries we will @@ -500,9 +482,7 @@ def _process_startup_queries(self) -> None: ) return - self._next_run = self._loop.call_later( - self._startup_queries_sent**2, self._process_startup_queries - ) + self._next_run = self._loop.call_later(self._startup_queries_sent**2, self._process_startup_queries) def _process_ready_types(self) -> None: """Generate a list of ready types that is due and schedule the next time.""" @@ -543,9 +523,7 @@ def _process_ready_types(self) -> None: schedule_rescue.append(query) for query in schedule_rescue: - self.schedule_rescue_query( - query, now_millis, RESCUE_RECORD_RETRY_TTL_PERCENTAGE - ) + self.schedule_rescue_query(query, now_millis, RESCUE_RECORD_RETRY_TTL_PERCENTAGE) if ready_types: self.async_send_ready_queries(False, now_millis, ready_types) @@ -557,9 +535,7 @@ def _process_ready_types(self) -> None: else: next_when_millis = next_time_millis - self._next_run = self._loop.call_at( - millis_to_seconds(next_when_millis), self._process_ready_types - ) + self._next_run = self._loop.call_at(millis_to_seconds(next_when_millis), self._process_ready_types) def async_send_ready_queries( self, first_request: bool, now_millis: float_, ready_types: Set[str] @@ -569,14 +545,8 @@ def async_send_ready_queries( # https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 since we are # just starting up and we know our cache is likely empty. This ensures # the next outgoing will be sent with the known answers list. - question_type = ( - QU_QUESTION - if self._question_type is None and first_request - else self._question_type - ) - outs = generate_service_query( - self._zc, now_millis, ready_types, self._multicast, question_type - ) + question_type = QU_QUESTION if self._question_type is None and first_request else self._question_type + outs = generate_service_query(self._zc, now_millis, ready_types, self._multicast, question_type) if outs: for out in outs: self._zc.async_send(out, self._addr, self._port) @@ -667,13 +637,9 @@ def _async_start(self) -> None: Must be called by uses of this base class after they have finished setting their properties. """ - self.zc.async_add_listener( - self, [DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) for type_ in self.types] - ) + self.zc.async_add_listener(self, [DNSQuestion(type_, _TYPE_PTR, _CLASS_IN) for type_ in self.types]) # Only start queries after the listener is installed - self._query_sender_task = asyncio.ensure_future( - self._async_start_query_sender() - ) + self._query_sender_task = asyncio.ensure_future(self._async_start_query_sender()) @property def service_state_changed(self) -> SignalRegistrationInterface: @@ -682,9 +648,7 @@ def service_state_changed(self) -> SignalRegistrationInterface: def _names_matching_types(self, names: Iterable[str]) -> List[Tuple[str, str]]: """Return the type and name for records matching the types we are browsing.""" return [ - (type_, name) - for name in names - for type_ in self.types.intersection(cached_possible_types(name)) + (type_, name) for name in names for type_ in self.types.intersection(cached_possible_types(name)) ] def _enqueue_callback( @@ -702,16 +666,11 @@ def _enqueue_callback( state_change is SERVICE_STATE_CHANGE_REMOVED and self._pending_handlers.get(key) is not SERVICE_STATE_CHANGE_ADDED ) - or ( - state_change is SERVICE_STATE_CHANGE_UPDATED - and key not in self._pending_handlers - ) + or (state_change is SERVICE_STATE_CHANGE_UPDATED and key not in self._pending_handlers) ): self._pending_handlers[key] = state_change - def async_update_records( - self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: """Callback invoked by Zeroconf when new information arrives. Updates information required by browser in the Zeroconf cache. @@ -729,18 +688,12 @@ def async_update_records( if TYPE_CHECKING: record = cast(DNSPointer, record) pointer = record - for type_ in self.types.intersection( - cached_possible_types(pointer.name) - ): + for type_ in self.types.intersection(cached_possible_types(pointer.name)): if old_record is None: - self._enqueue_callback( - SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias - ) + self._enqueue_callback(SERVICE_STATE_CHANGE_ADDED, type_, pointer.alias) self.query_scheduler.reschedule_ptr_first_refresh(pointer) elif pointer.is_expired(now): - self._enqueue_callback( - SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias - ) + self._enqueue_callback(SERVICE_STATE_CHANGE_REMOVED, type_, pointer.alias) self.query_scheduler.cancel_ptr_refresh(pointer) else: self.query_scheduler.reschedule_ptr_first_refresh(pointer) @@ -752,10 +705,7 @@ def async_update_records( if record_type in _ADDRESS_RECORD_TYPES: cache = self._cache - names = { - service.name - for service in cache.async_entries_with_server(record.name) - } + names = {service.name for service in cache.async_entries_with_server(record.name)} # Iterate through the DNSCache and callback any services that use this address for type_, name in self._names_matching_types(names): self._enqueue_callback(SERVICE_STATE_CHANGE_UPDATED, type_, name) @@ -777,9 +727,7 @@ def async_update_records_complete(self) -> None: self._fire_service_state_changed_event(pending) self._pending_handlers.clear() - def _fire_service_state_changed_event( - self, event: Tuple[Tuple[str, str], ServiceStateChange] - ) -> None: + def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], ServiceStateChange]) -> None: """Fire a service state changed event. When running with ServiceBrowser, this will happen in the dedicated @@ -801,9 +749,7 @@ def _async_cancel(self) -> None: self.done = True self.query_scheduler.stop() self.zc.async_remove_listener(self) - assert ( - self._query_sender_task is not None - ), "Attempted to cancel a browser that was not started" + assert self._query_sender_task is not None, "Attempted to cancel a browser that was not started" self._query_sender_task.cancel() self._query_sender_task = None @@ -836,9 +782,7 @@ def __init__( if not zc.loop.is_running(): raise RuntimeError("The event loop is not running") threading.Thread.__init__(self) - super().__init__( - zc, type_, handlers, listener, addr, port, delay, question_type - ) + super().__init__(zc, type_, handlers, listener, addr, port, delay, question_type) # Add the queue before the listener is installed in _setup # to ensure that events run in the dedicated thread and do # not block the event loop diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 66313afc..2fc9dfc8 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -179,9 +179,7 @@ def __init__( ) -> None: # Accept both none, or one, but not both. if addresses is not None and parsed_addresses is not None: - raise TypeError( - "addresses and parsed_addresses cannot be provided together" - ) + raise TypeError("addresses and parsed_addresses cannot be provided together") if not type_.endswith(service_type_name(name, strict=False)): raise BadTypeInNameException self.interface_index = interface_index @@ -251,11 +249,7 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - if ( - IPADDRESS_SUPPORTS_SCOPE_ID - and len(address) == 16 - and self.interface_index is not None - ): + if IPADDRESS_SUPPORTS_SCOPE_ID and len(address) == 16 and self.interface_index is not None: addr = ip_bytes_and_scope_to_address(address, self.interface_index) else: addr = cached_ip_addresses(address) @@ -299,9 +293,7 @@ def async_clear_cache(self) -> None: self._dns_text_cache = None self._get_address_and_nsec_records_cache = None - async def async_wait( - self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None - ) -> None: + async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: """Calling task waits for a given number of milliseconds or until notified.""" if not self._new_records_futures: self._new_records_futures = set() @@ -359,10 +351,7 @@ def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: This means the first address will always be the most recently added address of the given IP version. """ - return [ - str_without_scope_id(addr) - for addr in self._ip_addresses_by_version_value(version.value) - ] + return [str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: """Equivalent to parsed_addresses, with the exception that IPv6 Link-Local @@ -374,13 +363,9 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st This means the first address will always be the most recently added address of the given IP version. """ - return [ - str(addr) for addr in self._ip_addresses_by_version_value(version.value) - ] + return [str(addr) for addr in self._ip_addresses_by_version_value(version.value)] - def _set_properties( - self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]] - ) -> None: + def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: """Sets properties and text of this info from a dictionary""" list_: List[bytes] = [] properties_contain_str = False @@ -421,9 +406,7 @@ def _set_text(self, text: bytes) -> None: def _generate_decoded_properties(self) -> None: """Generates decoded properties from the properties""" self._decoded_properties = { - k.decode("ascii", "replace"): None - if v is None - else v.decode("utf-8", "replace") + k.decode("ascii", "replace"): None if v is None else v.decode("utf-8", "replace") for k, v in self.properties.items() } @@ -477,9 +460,7 @@ def _set_ipv6_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA), ) else: - self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo( - zc, now, _TYPE_AAAA - ) + self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: """Set IPv4 addresses from the cache.""" @@ -489,13 +470,9 @@ def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A), ) else: - self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo( - zc, now, _TYPE_A - ) + self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) - def async_update_records( - self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: """Updates service information from a DNS record. This method will be run in the event loop. @@ -507,9 +484,7 @@ def async_update_records( if updated and new_records_futures: _resolve_all_futures_to_none(new_records_futures) - def _process_record_threadsafe( - self, zc: "Zeroconf", record: DNSRecord, now: float_ - ) -> bool: + def _process_record_threadsafe(self, zc: "Zeroconf", record: DNSRecord, now: float_) -> bool: """Thread safe record updating. Returns True if a new record was added. @@ -691,15 +666,11 @@ def _dns_text(self, override_ttl: Optional[int]) -> DNSText: self._dns_text_cache = record return record - def dns_nsec( - self, missing_types: List[int], override_ttl: Optional[int] = None - ) -> DNSNsec: + def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return self._dns_nsec(missing_types, override_ttl) - def _dns_nsec( - self, missing_types: List[int], override_ttl: Optional[int] - ) -> DNSNsec: + def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -711,15 +682,11 @@ def _dns_nsec( 0.0, ) - def get_address_and_nsec_records( - self, override_ttl: Optional[int] = None - ) -> Set[DNSRecord]: + def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" return self._get_address_and_nsec_records(override_ttl) - def _get_address_and_nsec_records( - self, override_ttl: Optional[int] - ) -> Set[DNSRecord]: + def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" cacheable = override_ttl is None if self._get_address_and_nsec_records_cache is not None and cacheable: @@ -730,17 +697,13 @@ def _get_address_and_nsec_records( missing_types.discard(dns_address.type) records.add(dns_address) if missing_types: - assert ( - self.server is not None - ), "Service server must be set for NSEC record." + assert self.server is not None, "Service server must be set for NSEC record." records.add(self._dns_nsec(list(missing_types), override_ttl)) if cacheable: self._get_address_and_nsec_records_cache = records return records - def _get_address_records_from_cache_by_type( - self, zc: "Zeroconf", _type: int_ - ) -> List[DNSAddress]: + def _get_address_records_from_cache_by_type(self, zc: "Zeroconf", _type: int_) -> List[DNSAddress]: """Get the addresses from the cache.""" if self.server_key is None: return [] @@ -796,9 +759,7 @@ def _load_from_cache(self, zc: "Zeroconf", now: float_) -> bool: @property def _is_complete(self) -> bool: """The ServiceInfo has all expected properties.""" - return bool( - self.text is not None and (self._ipv4_addresses or self._ipv6_addresses) - ) + return bool(self.text is not None and (self._ipv4_addresses or self._ipv6_addresses)) def request( self, @@ -883,9 +844,7 @@ async def async_request( if last <= now: return False if next_ <= now: - this_question_type = ( - question_type or QU_QUESTION if first_request else QM_QUESTION - ) + this_question_type = question_type or QU_QUESTION if first_request else QM_QUESTION out = self._generate_request_query(zc, now, this_question_type) first_request = False if out.questions: @@ -897,10 +856,7 @@ async def async_request( zc.async_send(out, addr, port) next_ = now + delay next_ += self._get_random_delay() - if ( - this_question_type is QM_QUESTION - and delay < _DUPLICATE_QUESTION_INTERVAL - ): + if this_question_type is QM_QUESTION and delay < _DUPLICATE_QUESTION_INTERVAL: # If we just asked a QM question, we need to # wait at least the duplicate question interval # before asking another QM question otherwise @@ -929,9 +885,7 @@ def _add_question_with_known_answers( ) -> None: """Add a question with known answers if its not suppressed.""" known_answers = { - answer - for answer in cache.get_all_by_details(name, type_, class_) - if not answer.is_stale(now) + answer for answer in cache.get_all_by_details(name, type_, class_) if not answer.is_stale(now) } if skip_if_known_answers and known_answers: return diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 2d4f3f8e..05ee14cb 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -79,9 +79,7 @@ def async_get_infos_server(self, server: str) -> List[ServiceInfo]: """Return all ServiceInfo matching server.""" return self._async_get_by_index(self.servers, server) - def _async_get_by_index( - self, records: Dict[str, List], key: _str - ) -> List[ServiceInfo]: + def _async_get_by_index(self, records: Dict[str, List], key: _str) -> List[ServiceInfo]: """Return all ServiceInfo matching the index.""" record_list = records.get(key) if record_list is None: diff --git a/src/zeroconf/_services/types.py b/src/zeroconf/_services/types.py index 9793ae48..63b6d19a 100644 --- a/src/zeroconf/_services/types.py +++ b/src/zeroconf/_services/types.py @@ -69,9 +69,7 @@ def find( """ local_zc = zc or Zeroconf(interfaces=interfaces, ip_version=ip_version) listener = cls() - browser = ServiceBrowser( - local_zc, _SERVICE_TYPE_ENUMERATION_NAME, listener=listener - ) + browser = ServiceBrowser(local_zc, _SERVICE_TYPE_ENUMERATION_NAME, listener=listener) # wait for responses time.sleep(timeout) diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index eda89df4..58be33d8 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -47,13 +47,9 @@ def update_record( # pylint: disable=no-self-use This method is deprecated and will be removed in a future version. update_records should be implemented instead. """ - raise RuntimeError( - "update_record is deprecated and will be removed in a future version." - ) + raise RuntimeError("update_record is deprecated and will be removed in a future version.") - def async_update_records( - self, zc: "Zeroconf", now: float_, records: List[RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: """Update multiple records in one shot. All records that are received in a single packet are passed diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index c2e66277..6d070e30 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -60,9 +60,7 @@ async def wait_for_future_set_or_timeout( """Wait for a future or timeout (in milliseconds).""" future = loop.create_future() future_set.add(future) - handle = loop.call_later( - millis_to_seconds(timeout), _set_future_none_if_not_done, future - ) + handle = loop.call_later(millis_to_seconds(timeout), _set_future_none_if_not_done, future) try: await future finally: @@ -100,9 +98,7 @@ async def await_awaitable(aw: Awaitable) -> None: await task -def run_coro_with_timeout( - aw: Coroutine, loop: asyncio.AbstractEventLoop, timeout: float -) -> Any: +def run_coro_with_timeout(aw: Coroutine, loop: asyncio.AbstractEventLoop, timeout: float) -> Any: """Run a coroutine with a timeout. The timeout should only be used as a safeguard to prevent @@ -124,15 +120,13 @@ def run_coro_with_timeout( def shutdown_loop(loop: asyncio.AbstractEventLoop) -> None: """Wait for pending tasks and stop an event loop.""" pending_tasks = set( - asyncio.run_coroutine_threadsafe(_async_get_all_tasks(loop), loop).result( - _GET_ALL_TASKS_TIMEOUT - ) + asyncio.run_coroutine_threadsafe(_async_get_all_tasks(loop), loop).result(_GET_ALL_TASKS_TIMEOUT) ) pending_tasks -= {task for task in pending_tasks if task.done()} if pending_tasks: - asyncio.run_coroutine_threadsafe( - _wait_for_loop_tasks(pending_tasks), loop - ).result(_WAIT_FOR_LOOP_TASKS_TIMEOUT) + asyncio.run_coroutine_threadsafe(_wait_for_loop_tasks(pending_tasks), loop).result( + _WAIT_FOR_LOOP_TASKS_TIMEOUT + ) loop.call_soon_threadsafe(loop.stop) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index d4ba708e..6b4657be 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -112,16 +112,12 @@ def get_ip_address_object_from_record( return cached_ip_addresses_wrapper(record.address) -def ip_bytes_and_scope_to_address( - address: bytes_, scope: int_ -) -> Optional[Union[IPv4Address, IPv6Address]]: +def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: """Convert the bytes and scope to an IP address object.""" base_address = cached_ip_addresses_wrapper(address) if base_address is not None and base_address.is_link_local: # Avoid expensive __format__ call by using PyUnicode_Join - return cached_ip_addresses_wrapper( - "".join((str(base_address), "%", str(scope))) - ) + return cached_ip_addresses_wrapper("".join((str(base_address), "%", str(scope)))) return base_address diff --git a/src/zeroconf/_utils/name.py b/src/zeroconf/_utils/name.py index 3f923cfd..cda01b28 100644 --- a/src/zeroconf/_utils/name.py +++ b/src/zeroconf/_utils/name.py @@ -80,7 +80,7 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis """ if len(type_) > 256: # https://datatracker.ietf.org/doc/html/rfc6763#section-7.2 - raise BadTypeInNameException("Full name (%s) must be > 256 bytes" % type_) + raise BadTypeInNameException(f"Full name ({type_}) must be > 256 bytes") if type_.endswith((_TCP_PROTOCOL_LOCAL_TRAILER, _NONTCP_PROTOCOL_LOCAL_TRAILER)): remaining = type_[: -len(_TCP_PROTOCOL_LOCAL_TRAILER)].split(".") @@ -88,8 +88,8 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis has_protocol = True elif strict: raise BadTypeInNameException( - "Type '%s' must end with '%s' or '%s'" - % (type_, _TCP_PROTOCOL_LOCAL_TRAILER, _NONTCP_PROTOCOL_LOCAL_TRAILER) + f"Type '{type_}' must end with " + f"'{_TCP_PROTOCOL_LOCAL_TRAILER}' or '{_NONTCP_PROTOCOL_LOCAL_TRAILER}'" ) elif type_.endswith(_LOCAL_TRAILER): remaining = type_[: -len(_LOCAL_TRAILER)].split(".") @@ -104,48 +104,39 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis raise BadTypeInNameException("No Service name found") if len(remaining) == 1 and len(remaining[0]) == 0: - raise BadTypeInNameException("Type '%s' must not start with '.'" % type_) + raise BadTypeInNameException(f"Type '{type_}' must not start with '.'") if service_name[0] != "_": - raise BadTypeInNameException( - "Service name (%s) must start with '_'" % service_name - ) + raise BadTypeInNameException(f"Service name ({service_name}) must start with '_'") test_service_name = service_name[1:] if strict and len(test_service_name) > 15: # https://datatracker.ietf.org/doc/html/rfc6763#section-7.2 - raise BadTypeInNameException( - "Service name (%s) must be <= 15 bytes" % test_service_name - ) + raise BadTypeInNameException(f"Service name ({test_service_name}) must be <= 15 bytes") if "--" in test_service_name: - raise BadTypeInNameException( - "Service name (%s) must not contain '--'" % test_service_name - ) + raise BadTypeInNameException(f"Service name ({test_service_name}) must not contain '--'") if "-" in (test_service_name[0], test_service_name[-1]): - raise BadTypeInNameException( - "Service name (%s) may not start or end with '-'" % test_service_name - ) + raise BadTypeInNameException(f"Service name ({test_service_name}) may not start or end with '-'") if not _HAS_A_TO_Z.search(test_service_name): raise BadTypeInNameException( - "Service name (%s) must contain at least one letter (eg: 'A-Z')" - % test_service_name + f"Service name ({test_service_name}) must contain at least one letter (eg: 'A-Z')" ) allowed_characters_re = ( - _HAS_ONLY_A_TO_Z_NUM_HYPHEN - if strict - else _HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE + _HAS_ONLY_A_TO_Z_NUM_HYPHEN if strict else _HAS_ONLY_A_TO_Z_NUM_HYPHEN_UNDERSCORE ) if not allowed_characters_re.search(test_service_name): raise BadTypeInNameException( - "Service name (%s) must contain only these characters: " - "A-Z, a-z, 0-9, hyphen ('-')%s" - % (test_service_name, "" if strict else ", underscore ('_')") + f"Service name ({test_service_name if strict else ''}) " + "must contain only these characters: " + "A-Z, a-z, 0-9, hyphen ('-')" + ", underscore ('_')" + if strict + else "" ) else: service_name = "" @@ -161,12 +152,11 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis if remaining: length = len(remaining[0].encode("utf-8")) if length > 63: - raise BadTypeInNameException("Too long: '%s'" % remaining[0]) + raise BadTypeInNameException(f"Too long: '{remaining[0]}'") if _HAS_ASCII_CONTROL_CHARS.search(remaining[0]): raise BadTypeInNameException( - "Ascii control character 0x00-0x1F and 0x7F illegal in '%s'" - % remaining[0] + f"Ascii control character 0x00-0x1F and 0x7F illegal in '{remaining[0]}'" ) return service_name + trailer diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index fbac9fe7..4cd50926 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -40,9 +40,7 @@ class InterfaceChoice(enum.Enum): All = 2 -InterfacesType = Union[ - Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], InterfaceChoice -] +InterfacesType = Union[Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], InterfaceChoice] @enum.unique @@ -73,42 +71,25 @@ def _encode_address(address: str) -> bytes: def get_all_addresses() -> List[str]: - return list( - { - addr.ip - for iface in ifaddr.get_adapters() - for addr in iface.ips - if addr.is_IPv4 - } - ) + return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) def get_all_addresses_v6() -> List[Tuple[Tuple[str, int, int], int]]: # IPv6 multicast uses positive indexes for interfaces # TODO: What about multi-address interfaces? return list( - { - (addr.ip, iface.index) - for iface in ifaddr.get_adapters() - for addr in iface.ips - if addr.is_IPv6 - } + {(addr.ip, iface.index) for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv6} ) -def ip6_to_address_and_index( - adapters: List[Any], ip: str -) -> Tuple[Tuple[str, int, int], int]: +def ip6_to_address_and_index(adapters: List[Any], ip: str) -> Tuple[Tuple[str, int, int], int]: if "%" in ip: ip = ip[: ip.index("%")] # Strip scope_id. ipaddr = ipaddress.ip_address(ip) for adapter in adapters: for adapter_ip in adapter.ips: # IPv6 addresses are represented as tuples - if ( - isinstance(adapter_ip.ip, tuple) - and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr - ): + if isinstance(adapter_ip.ip, tuple) and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr: return ( cast(Tuple[str, int, int], adapter_ip.ip), cast(int, adapter.index), @@ -117,9 +98,7 @@ def ip6_to_address_and_index( raise RuntimeError("No adapter found for IP address %s" % ip) -def interface_index_to_ip6_address( - adapters: List[Any], index: int -) -> Tuple[str, int, int]: +def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str, int, int]: for adapter in adapters: if adapter.index == index: for adapter_ip in adapter.ips: @@ -175,16 +154,11 @@ def normalize_interface_choice( result.extend(get_all_addresses()) if not result: raise RuntimeError( - "No interfaces to listen on, check that any interfaces have IP version %s" - % ip_version + "No interfaces to listen on, check that any interfaces have IP version %s" % ip_version ) elif isinstance(choice, list): # First, take IPv4 addresses. - result = [ - i - for i in choice - if isinstance(i, str) and ipaddress.ip_address(i).version == 4 - ] + result = [i for i in choice if isinstance(i, str) and ipaddress.ip_address(i).version == 4] # Unlike IP_ADD_MEMBERSHIP, IPV6_JOIN_GROUP requires interface indexes. result += ip6_addresses_to_indexes(choice) else: @@ -197,9 +171,7 @@ def disable_ipv6_only_or_raise(s: socket.socket) -> None: try: s.setsockopt(_IPPROTO_IPV6, socket.IPV6_V6ONLY, False) except OSError: - log.error( - "Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6" - ) + log.error("Support for dual V4-V6 sockets is not present, use IPVersion.V4 or IPVersion.V6") raise @@ -237,9 +209,7 @@ def set_mdns_port_socket_options_for_ip_version( s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, loop) except OSError as e: - if ( - bind_addr[0] != "" or get_errno(e) != errno.EINVAL - ): # Fails to set on MacOS + if bind_addr[0] != "" or get_errno(e) != errno.EINVAL: # Fails to set on MacOS raise if ip_version != IPVersion.V4Only: @@ -261,9 +231,7 @@ def new_socket( apple_p2p, bind_addr, ) - socket_family = ( - socket.AF_INET if ip_version == IPVersion.V4Only else socket.AF_INET6 - ) + socket_family = socket.AF_INET if ip_version == IPVersion.V4Only else socket.AF_INET6 s = socket.socket(socket_family, socket.SOCK_DGRAM) if ip_version == IPVersion.All: @@ -286,8 +254,7 @@ def new_socket( except OSError as ex: if ex.errno == errno.EADDRNOTAVAIL: log.warning( - "Address not available when binding to %s, " - "it is expected to happen on some systems", + "Address not available when binding to %s, " "it is expected to happen on some systems", bind_tup, ) return None @@ -306,9 +273,7 @@ def add_multicast_member( if sys.platform == "win32": # No WSAEINVAL definition in typeshed err_einval |= {cast(Any, errno).WSAEINVAL} # pylint: disable=no-member - log.debug( - "Adding %r (socket %d) to multicast group", interface, listen_socket.fileno() - ) + log.debug("Adding %r (socket %d) to multicast group", interface, listen_socket.fileno()) try: if is_v6: try: @@ -324,12 +289,8 @@ def add_multicast_member( _value = mdns_addr6_bytes + iface_bin listen_socket.setsockopt(_IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, _value) else: - _value = socket.inet_aton(_MDNS_ADDR) + socket.inet_aton( - cast(str, interface) - ) - listen_socket.setsockopt( - socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, _value - ) + _value = socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(cast(str, interface)) + listen_socket.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, _value) except OSError as e: _errno = get_errno(e) if _errno == errno.EADDRINUSE: @@ -378,15 +339,11 @@ def new_respond_socket( respond_socket = new_socket( ip_version=(IPVersion.V6Only if is_v6 else IPVersion.V4Only), apple_p2p=apple_p2p, - bind_addr=cast(Tuple[Tuple[str, int, int], int], interface)[0] - if is_v6 - else (cast(str, interface),), + bind_addr=cast(Tuple[Tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), ) if not respond_socket: return None - log.debug( - "Configuring socket %s with multicast interface %s", respond_socket, interface - ) + log.debug("Configuring socket %s with multicast interface %s", respond_socket, interface) if is_v6: iface_bin = struct.pack("@I", cast(int, interface[1])) respond_socket.setsockopt(_IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_bin) @@ -408,9 +365,7 @@ def create_sockets( if unicast: listen_socket = None else: - listen_socket = new_socket( - ip_version=ip_version, apple_p2p=apple_p2p, bind_addr=("",) - ) + listen_socket = new_socket(ip_version=ip_version, apple_p2p=apple_p2p, bind_addr=("",)) normalized_interfaces = normalize_interface_choice(interfaces, ip_version) @@ -461,14 +416,10 @@ def autodetect_ip_version(interfaces: InterfacesType) -> IPVersion: """Auto detect the IP version when it is not provided.""" if isinstance(interfaces, list): has_v6 = any( - isinstance(i, int) - or (isinstance(i, str) and ipaddress.ip_address(i).version == 6) - for i in interfaces - ) - has_v4 = any( - isinstance(i, str) and ipaddress.ip_address(i).version == 4 + isinstance(i, int) or (isinstance(i, str) and ipaddress.ip_address(i).version == 6) for i in interfaces ) + has_v4 = any(isinstance(i, str) and ipaddress.ip_address(i).version == 4 for i in interfaces) if has_v4 and has_v6: return IPVersion.All if has_v6: diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index c2a51f94..134ea3e0 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -22,7 +22,7 @@ import asyncio import contextlib -from types import TracebackType # noqa # used in type hints +from types import TracebackType # used in type hints from typing import Awaitable, Callable, Dict, List, Optional, Tuple, Type, Union from ._core import Zeroconf @@ -72,9 +72,7 @@ def __init__( delay: int = _BROWSER_TIME, question_type: Optional[DNSQuestionType] = None, ) -> None: - super().__init__( - zeroconf, type_, handlers, listener, addr, port, delay, question_type - ) + super().__init__(zeroconf, type_, handlers, listener, addr, port, delay, question_type) self._async_start() async def async_cancel(self) -> None: @@ -249,20 +247,14 @@ async def async_get_service_info( :param timeout: milliseconds to wait for a response :param question_type: The type of questions to ask (DNSQuestionType.QM or DNSQuestionType.QU) """ - return await self.zeroconf.async_get_service_info( - type_, name, timeout, question_type - ) + return await self.zeroconf.async_get_service_info(type_, name, timeout, question_type) - async def async_add_service_listener( - self, type_: str, listener: ServiceListener - ) -> None: + async def async_add_service_listener(self, type_: str, listener: ServiceListener) -> None: """Adds a listener for a particular service type. This object will then have its add_service and remove_service methods called when services of that type become available and unavailable.""" await self.async_remove_service_listener(listener) - self.async_browsers[listener] = AsyncServiceBrowser( - self.zeroconf, type_, listener - ) + self.async_browsers[listener] = AsyncServiceBrowser(self.zeroconf, type_, listener) async def async_remove_service_listener(self, listener: ServiceListener) -> None: """Removes a listener from the set that is currently listening.""" @@ -273,10 +265,7 @@ async def async_remove_service_listener(self, listener: ServiceListener) -> None async def async_remove_all_service_listeners(self) -> None: """Removes a listener from the set that is currently listening.""" await asyncio.gather( - *( - self.async_remove_service_listener(listener) - for listener in list(self.async_browsers) - ) + *(self.async_remove_service_listener(listener) for listener in list(self.async_browsers)) ) async def __aenter__(self) -> "AsyncZeroconf": diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index 6c64e144..d84cb73b 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -31,9 +31,7 @@ _LISTENER_TIME = 200 # ms _BROWSER_TIME = 10000 # ms _DUPLICATE_PACKET_SUPPRESSION_INTERVAL = 1000 # ms -_DUPLICATE_QUESTION_INTERVAL = ( - 999 # ms # Must be 1ms less than _DUPLICATE_PACKET_SUPPRESSION_INTERVAL -) +_DUPLICATE_QUESTION_INTERVAL = 999 # ms # Must be 1ms less than _DUPLICATE_PACKET_SUPPRESSION_INTERVAL _CACHE_CLEANUP_INTERVAL = 10 # s _LOADED_SYSTEM_TIMEOUT = 10 # s _STARTUP_TIMEOUT = 9 # s must be lower than _LOADED_SYSTEM_TIMEOUT diff --git a/tests/__init__.py b/tests/__init__.py index 1feebafb..82c09be7 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -36,9 +36,7 @@ class QuestionHistoryWithoutSuppression(QuestionHistory): - def suppresses( - self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord] - ) -> bool: + def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: return False diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 17950683..dc9b1435 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -293,26 +293,20 @@ def mock_record_update_incoming_msg( ) generated.add_answer_at_time( - r.DNSPointer( - service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name - ), + r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0, ) return r.DNSIncoming(generated.packets()[0]) zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) - service_browser = r.ServiceBrowser( - zeroconf, service_type, listener=MyServiceListener() - ) + service_browser = r.ServiceBrowser(zeroconf, service_type, listener=MyServiceListener()) try: wait_time = 3 # service added - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Added) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Added)) service_add_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 0 @@ -321,9 +315,7 @@ def mock_record_update_incoming_msg( # service SRV updated service_updated_event.clear() service_server = "ash-2.local." - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 1 @@ -332,9 +324,7 @@ def mock_record_update_incoming_msg( # service TXT updated service_updated_event.clear() service_text = b"path=/~matt2/" - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -343,9 +333,7 @@ def mock_record_update_incoming_msg( # service TXT updated - duplicate update should not trigger another service_updated service_updated_event.clear() service_text = b"path=/~matt2/" - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 2 @@ -356,9 +344,7 @@ def mock_record_update_incoming_msg( service_address = "10.0.1.3" # Verify we match on uppercase service_server = service_server.upper() - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 3 @@ -369,18 +355,14 @@ def mock_record_update_incoming_msg( service_server = "ash-3.local." service_text = b"path=/~matt3/" service_address = "10.0.1.3" - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Updated)) service_updated_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 assert service_removed_count == 0 # service removed - _inject_response( - zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Removed) - ) + _inject_response(zeroconf, mock_record_update_incoming_msg(r.ServiceStateChange.Removed)) service_removed_event.wait(wait_time) assert service_added_count == 1 assert service_updated_count == 4 @@ -430,17 +412,13 @@ def mock_record_update_incoming_msg( ) -> r.DNSIncoming: generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( - r.DNSPointer( - service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name - ), + r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0, ) return r.DNSIncoming(generated.packets()[0]) zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) - service_browser = r.ServiceBrowser( - zeroconf, service_types, listener=MyServiceListener() - ) + service_browser = r.ServiceBrowser(zeroconf, service_types, listener=MyServiceListener()) try: wait_time = 3 @@ -470,9 +448,7 @@ def _mock_get_expiration_time(self, percent): return self.created + (percent * self.ttl * 10) # Set an expire time that will force a refresh - with patch( - "zeroconf.DNSRecord.get_expiration_time", new=_mock_get_expiration_time - ): + with patch("zeroconf.DNSRecord.get_expiration_time", new=_mock_get_expiration_time): _inject_response( zeroconf, mock_record_update_incoming_msg( @@ -570,15 +546,10 @@ def on_service_state_change(zeroconf, service_type, state_change, name): start_time = current_time_millis() browser = ServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) - time.sleep( - millis_to_seconds( - _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 - ) - ) + time.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert ( - current_time_millis() - start_time - > _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[0] + current_time_millis() - start_time > _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[0] ) finally: browser.cancel() @@ -633,9 +604,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change] - ) + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) info = ServiceInfo( type_, registration_name, @@ -737,9 +706,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change] - ) + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) info = ServiceInfo( type_, registration_name, @@ -832,11 +799,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): [on_service_state_change], question_type=r.DNSQuestionType.QM, ) - await asyncio.sleep( - millis_to_seconds( - _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 - ) - ) + await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is False # type: ignore[union-attr] finally: @@ -876,11 +839,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): [on_service_state_change], question_type=r.DNSQuestionType.QU, ) - await asyncio.sleep( - millis_to_seconds( - _services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5 - ) - ) + await asyncio.sleep(millis_to_seconds(_services_browser._FIRST_QUERY_DELAY_RANDOM_INTERVAL[1] + 5)) try: assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] finally: @@ -898,9 +857,7 @@ def test_legacy_record_update_listener(): r.RecordUpdateListener().update_record( zc, 0, - r.DNSRecord( - "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL - ), + r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL), ) updates = [] @@ -908,9 +865,7 @@ def test_legacy_record_update_listener(): class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def update_record( - self, zc: "Zeroconf", now: float, record: r.DNSRecord - ) -> None: + def update_record(self, zc: "Zeroconf", now: float, record: r.DNSRecord) -> None: nonlocal updates updates.append(record) @@ -945,15 +900,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() assert len(updates) - assert ( - len( - [ - isinstance(update, r.DNSPointer) and update.name == type_ - for update in updates - ] - ) - >= 1 - ) + assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 zc.remove_listener(listener) # Removing a second time should not throw @@ -985,9 +932,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) _inject_response( zc, @@ -1002,9 +947,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): ) time.sleep(0.1) - assert callbacks == [ - ("_hap._tcp.local.", ServiceStateChange.Added, "xxxyyy._hap._tcp.local.") - ] + assert callbacks == [("_hap._tcp.local.", ServiceStateChange.Added, "xxxyyy._hap._tcp.local.")] service_info = zc.get_service_info(type_, registration_name) assert service_info is not None assert service_info.port == 80 @@ -1063,9 +1006,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) _inject_response( zc, @@ -1125,9 +1066,7 @@ def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) _inject_response( zc, @@ -1166,16 +1105,12 @@ def on_service_state_change(zeroconf, service_type, state_change, name): pass zc = r.Zeroconf(interfaces=["127.0.0.1"]) - browser = ServiceBrowser( - zc, ["_tivo-videostream._tcp.local."], [on_service_state_change] - ) + browser = ServiceBrowser(zc, ["_tivo-videostream._tcp.local."], [on_service_state_change]) browser.cancel() # Still fail on completely invalid with pytest.raises(r.BadTypeInNameException): - browser = ServiceBrowser( - zc, ["tivo-videostream._tcp.local."], [on_service_state_change] - ) + browser = ServiceBrowser(zc, ["tivo-videostream._tcp.local."], [on_service_state_change]) zc.close() @@ -1184,9 +1119,7 @@ def test_group_ptr_queries_with_known_answers(): now = current_time_millis() for i in range(120): name = f"_hap{i}._tcp._local." - questions_with_known_answers[ - DNSQuestion(name, const._TYPE_PTR, const._CLASS_IN) - ] = { + questions_with_known_answers[DNSQuestion(name, const._TYPE_PTR, const._CLASS_IN)] = { DNSPointer( name, const._TYPE_PTR, @@ -1196,9 +1129,7 @@ def test_group_ptr_queries_with_known_answers(): ) for counter in range(i) } - outs = _services_browser.group_ptr_queries_with_known_answers( - now, True, questions_with_known_answers - ) + outs = _services_browser.group_ptr_queries_with_known_answers(now, True, questions_with_known_answers) for out in outs: packets = out.packets() # If we generate multiple packets there must @@ -1228,18 +1159,14 @@ async def test_generate_service_query_suppress_duplicate_questions(): assert zc.question_history.suppresses(question, now, other_known_answers) # The known answer list is different, do not suppress - outs = _services_browser.generate_service_query( - zc, now, {name}, multicast=True, question_type=None - ) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert outs zc.cache.async_add_records([answer]) # The known answer list contains all the asked questions in the history # we should suppress - outs = _services_browser.generate_service_query( - zc, now, {name}, multicast=True, question_type=None - ) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert not outs # We do not suppress once the question history expires @@ -1249,23 +1176,17 @@ async def test_generate_service_query_suppress_duplicate_questions(): assert outs # We do not suppress QU queries ever - outs = _services_browser.generate_service_query( - zc, now, {name}, multicast=False, question_type=None - ) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=False, question_type=None) assert outs zc.question_history.async_expire(now + 2000) # No suppression after clearing the history - outs = _services_browser.generate_service_query( - zc, now, {name}, multicast=True, question_type=None - ) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert outs # The previous query we just sent is still remembered and # the next one is suppressed - outs = _services_browser.generate_service_query( - zc, now, {name}, multicast=True, question_type=None - ) + outs = _services_browser.generate_service_query(zc, now, {name}, multicast=True, question_type=None) assert not outs await aiozc.async_close() @@ -1285,9 +1206,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): pout = r.DNSIncoming(out.packets()[0]) sends.append(pout) - query_scheduler = _services_browser.QueryScheduler( - zc, types_, None, 0, True, delay, (0, 0), None - ) + query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) loop = asyncio.get_running_loop() # patch the zeroconf send so we can capture what is being sent @@ -1316,9 +1235,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): ) query_scheduler.reschedule_ptr_first_refresh(ptr_record) - expected_when_time = ptr_record.get_expiration_time( - const._EXPIRE_REFRESH_TIME_PERCENT - ) + expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( ptr_record.alias, @@ -1330,9 +1247,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): assert query_scheduler._query_heap == [ptr_query] query_scheduler.reschedule_ptr_first_refresh(ptr2_record) - expected_when_time = ptr2_record.get_expiration_time( - const._EXPIRE_REFRESH_TIME_PERCENT - ) + expected_when_time = ptr2_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr2_record.get_expiration_time(100) ptr2_query = _ScheduledPTRQuery( ptr2_record.alias, @@ -1384,9 +1299,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): pout = r.DNSIncoming(out.packets()[0]) sends.append(pout) - query_scheduler = _services_browser.QueryScheduler( - zc, types_, None, 0, True, delay, (0, 0), None - ) + query_scheduler = _services_browser.QueryScheduler(zc, types_, None, 0, True, delay, (0, 0), None) loop = asyncio.get_running_loop() # patch the zeroconf send so we can capture what is being sent @@ -1408,9 +1321,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): ) query_scheduler.reschedule_ptr_first_refresh(ptr_record) - expected_when_time = ptr_record.get_expiration_time( - const._EXPIRE_REFRESH_TIME_PERCENT - ) + expected_when_time = ptr_record.get_expiration_time(const._EXPIRE_REFRESH_TIME_PERCENT) expected_expire_time = ptr_record.get_expiration_time(100) ptr_query = _ScheduledPTRQuery( ptr_record.alias, @@ -1484,9 +1395,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) should_not_match = ServiceInfo( not_match_type_, not_match_registration_name, @@ -1642,15 +1551,9 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de def test_scheduled_ptr_query_dunder_methods(): - query75 = _ScheduledPTRQuery( - "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 75 - ) - query80 = _ScheduledPTRQuery( - "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 80 - ) - query75_2 = _ScheduledPTRQuery( - "zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 140, 75 - ) + query75 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 75) + query80 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 120, 80) + query75_2 = _ScheduledPTRQuery("zoomy._hap._tcp.local.", "_hap._tcp.local.", 120, 140, 75) other = object() stringified = str(query75) assert "zoomy._hap._tcp.local." in stringified @@ -1668,13 +1571,13 @@ def test_scheduled_ptr_query_dunder_methods(): assert query75 != other with pytest.raises(TypeError): - query75 < other # type: ignore[operator] + assert query75 < other # type: ignore[operator] with pytest.raises(TypeError): - query75 <= other # type: ignore[operator] + assert query75 <= other # type: ignore[operator] with pytest.raises(TypeError): - query75 > other # type: ignore[operator] + assert query75 > other # type: ignore[operator] with pytest.raises(TypeError): - query75 >= other # type: ignore[operator] + assert query75 >= other # type: ignore[operator] @pytest.mark.asyncio @@ -1712,9 +1615,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): with patch.object(zeroconf_browser, "async_send", send): service_added = asyncio.Event() - browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change] - ) + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) info = ServiceInfo( type_, registration_name, @@ -1782,9 +1683,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): # patch the zeroconf send so we can capture what is being sent with patch.object(zeroconf_browser, "async_send", send): service_added = asyncio.Event() - browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change] - ) + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) expected_ttl = const._DNS_OTHER_TTL info = ServiceInfo( type_, diff --git a/tests/services/test_info.py b/tests/services/test_info.py index aefef6c8..4a9b1ee2 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -306,22 +306,10 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert ( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None # Expect query for SRV, A, AAAA @@ -344,18 +332,9 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 3 # type: ignore[unreachable] - assert ( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None # Expect query for A, AAAA @@ -381,14 +360,8 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 2 - assert ( - r.DNSQuestion(service_server, const._TYPE_A, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_server, const._TYPE_AAAA, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_server, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_server, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions last_sent = None assert service_info is None @@ -411,9 +384,7 @@ def get_service_info_helper(zc, type, name): const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, ttl, - socket.inet_pton( - socket.AF_INET6, service_address_v6_ll - ), + socket.inet_pton(socket.AF_INET6, service_address_v6_ll), scope_id=service_scope_id, ), ] @@ -471,30 +442,16 @@ def get_service_info_helper(zc, type, name): args=(zc, service_type, service_name), ) helper_thread.start() - wait_time = ( - const._LISTENER_TIME + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5 - ) / 1000 + wait_time = (const._LISTENER_TIME + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5) / 1000 # Expect query for SRV, TXT, A, AAAA send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert ( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None # Expect query for SRV only as A, AAAA, and TXT are suppressed @@ -524,16 +481,11 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time * 0.25) assert last_sent is not None assert len(last_sent.questions) == 1 # type: ignore[unreachable] - assert ( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions assert service_info is None wait_time = ( - const._DUPLICATE_QUESTION_INTERVAL - + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] - + 5 + const._DUPLICATE_QUESTION_INTERVAL + info._AVOID_SYNC_DELAY_RANDOM_INTERVAL[1] + 5 ) / 1000 # Expect no queries as all are suppressed by the question history last_sent = None @@ -624,22 +576,10 @@ def get_service_info_helper(zc, type, name): send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 - assert ( - r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) - in last_sent.questions - ) - assert ( - r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) - in last_sent.questions - ) + assert r.DNSQuestion(service_name, const._TYPE_SRV, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_TXT, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_A, const._CLASS_IN) in last_sent.questions + assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None # Expext no further queries @@ -816,9 +756,7 @@ def test_multiple_addresses(): else ip_address(address_v6_ll), ] assert info.addresses_by_version(r.IPVersion.V4Only) == [address] - assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ - ip_address(address) - ] + assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ip_address(address)] assert info.addresses_by_version(r.IPVersion.V6Only) == [ address_v6, address_v6_ll, @@ -842,16 +780,12 @@ def test_multiple_addresses(): assert info.parsed_scoped_addresses() == [ address_parsed, address_v6_parsed, - address_v6_ll_scoped_parsed - if ipaddress_supports_scope_id - else address_v6_ll_parsed, + address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, ] assert info.parsed_scoped_addresses(r.IPVersion.V4Only) == [address_parsed] assert info.parsed_scoped_addresses(r.IPVersion.V6Only) == [ address_v6_parsed, - address_v6_ll_scoped_parsed - if ipaddress_supports_scope_id - else address_v6_ll_parsed, + address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, ] @@ -896,9 +830,7 @@ def test_scoped_addresses_from_cache(): info = ServiceInfo(type_, registration_name) info.load_from_cache(zeroconf) assert info.parsed_scoped_addresses() == ["fe80::52e:c2f2:bc5f:e9c6%12"] - assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ - ip_address("fe80::52e:c2f2:bc5f:e9c6%12") - ] + assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ip_address("fe80::52e:c2f2:bc5f:e9c6%12")] zeroconf.close() @@ -913,12 +845,8 @@ async def test_multiple_a_addresses_newest_address_first(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) cache = aiozc.zeroconf.cache host = "multahost.local." - record1 = r.DNSAddress( - host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x01" - ) - record2 = r.DNSAddress( - host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x02" - ) + record1 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x01") + record2 = r.DNSAddress(host, const._TYPE_A, const._CLASS_IN, 1000, b"\x7f\x00\x00\x02") cache.async_add_records([record1, record2]) # New kwarg way @@ -959,9 +887,7 @@ def test_filter_address_by_type_from_service_info(): registration_name = f"{name}.{type_}" ipv4 = socket.inet_aton("10.0.1.2") ipv6 = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[ipv4, ipv6] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[ipv4, ipv6]) def dns_addresses_to_addresses(dns_address: List[DNSAddress]) -> List[bytes]: return [address.address for address in dns_address] @@ -971,12 +897,8 @@ def dns_addresses_to_addresses(dns_address: List[DNSAddress]) -> List[bytes]: ipv4, ipv6, ] - assert dns_addresses_to_addresses( - info.dns_addresses(version=r.IPVersion.V4Only) - ) == [ipv4] - assert dns_addresses_to_addresses( - info.dns_addresses(version=r.IPVersion.V6Only) - ) == [ipv6] + assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.V4Only)) == [ipv4] + assert dns_addresses_to_addresses(info.dns_addresses(version=r.IPVersion.V6Only)) == [ipv6] def test_changing_name_updates_serviceinfo_key(): @@ -1102,9 +1024,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): # patch the zeroconf send with patch.object(zeroconf, "async_send", send): - zeroconf.get_service_info( - f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QU - ) + zeroconf.get_service_info(f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QU) assert first_outgoing.questions[0].unicast is True # type: ignore[union-attr] zeroconf.close() @@ -1128,9 +1048,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): # patch the zeroconf send with patch.object(zeroconf, "async_send", send): - zeroconf.get_service_info( - f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QM - ) + zeroconf.get_service_info(f"name.{type_}", type_, 500, question_type=r.DNSQuestionType.QM) assert first_outgoing.questions[0].unicast is False # type: ignore[union-attr] zeroconf.close() @@ -1139,10 +1057,7 @@ def test_request_timeout(): """Test that the timeout does not throw an exception and finishes close to the actual timeout.""" zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) start_time = r.current_time_millis() - assert ( - zeroconf.get_service_info("_notfound.local.", "notthere._notfound.local.") - is None - ) + assert zeroconf.get_service_info("_notfound.local.", "notthere._notfound.local.") is None end_time = r.current_time_millis() zeroconf.close() # 3000ms for the default timeout @@ -1232,9 +1147,7 @@ async def test_release_wait_when_new_recorded_added(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) assert await asyncio.wait_for(task, timeout=2) assert info.addresses == [b"\x7f\x00\x00\x01"] await aiozc.async_close() @@ -1297,9 +1210,7 @@ async def test_port_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1315,9 +1226,7 @@ async def test_port_changes_are_seen(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1384,9 +1293,7 @@ async def test_port_changes_are_seen_with_directed_request(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1402,9 +1309,7 @@ async def test_port_changes_are_seen_with_directed_request(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name, 80, 10, 10, desc, host) await info.async_request(aiozc.zeroconf, timeout=200, addr="127.0.0.1", port=5353) @@ -1470,9 +1375,7 @@ async def test_ipv4_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] @@ -1488,9 +1391,7 @@ async def test_ipv4_changes_are_seen(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) @@ -1562,9 +1463,7 @@ async def test_ipv6_changes_are_seen(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V6Only) == [ @@ -1582,9 +1481,7 @@ async def test_ipv6_changes_are_seen(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) @@ -1644,15 +1541,11 @@ async def test_bad_ip_addresses_ignored_in_cache(): 0, ) # Manually add a bad record to the cache - aiozc.zeroconf.cache.async_add_records( - [DNSAddress(host, const._TYPE_A, const._CLASS_IN, 10000, b"\x00")] - ) + aiozc.zeroconf.cache.async_add_records([DNSAddress(host, const._TYPE_A, const._CLASS_IN, 10000, b"\x00")]) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) info.load_from_cache(aiozc.zeroconf) assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] @@ -1711,9 +1604,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1733,9 +1624,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1787,9 +1676,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): ) await aiozc.zeroconf.async_wait_for_start() await asyncio.sleep(0) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1819,9 +1706,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): ), 0, ) - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) info = ServiceInfo(type_, registration_name) await info.async_request(aiozc.zeroconf, timeout=200) @@ -1843,10 +1728,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): # New kwarg way info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) - tasks = [ - asyncio.create_task(info.async_request(aiozc.zeroconf, timeout=200000)) - for _ in range(10) - ] + tasks = [asyncio.create_task(info.async_request(aiozc.zeroconf, timeout=200000)) for _ in range(10)] await asyncio.sleep(0.1) for task in tasks: assert not task.done() @@ -1898,9 +1780,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): await asyncio.sleep(0) for task in tasks: assert not task.done() - aiozc.zeroconf.record_manager.async_updates_from_response( - r.DNSIncoming(generated.packets()[0]) - ) + aiozc.zeroconf.record_manager.async_updates_from_response(r.DNSIncoming(generated.packets()[0])) _, pending = await asyncio.wait(tasks, timeout=2) assert not pending assert info.addresses == [b"\x7f\x00\x00\x01"] diff --git a/tests/services/test_types.py b/tests/services/test_types.py index d9340283..f50ea42c 100644 --- a/tests/services/test_types.py +++ b/tests/services/test_types.py @@ -112,9 +112,7 @@ def test_integration_with_listener_ipv6(disable_duplicate_packet_suppression): ) zeroconf_registrar.registry.async_add(info) try: - service_types = ZeroconfServiceTypes.find( - ip_version=r.IPVersion.V6Only, timeout=2 - ) + service_types = ZeroconfServiceTypes.find(ip_version=r.IPVersion.V6Only, timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar) service_types = ZeroconfServiceTypes.find(zc=zeroconf_registrar, timeout=2) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 053ed26b..a765a50a 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -120,12 +120,7 @@ async def test_sync_within_event_loop_executor() -> None: def sync_code(): zc = Zeroconf(interfaces=["127.0.0.1"]) - assert ( - zc.get_service_info( - "_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10 - ) - is None - ) + assert zc.get_service_info("_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10) is None zc.close() await asyncio.get_event_loop().run_in_executor(None, sync_code) @@ -625,13 +620,9 @@ async def test_service_info_async_request() -> None: # Start a tasks BEFORE the registration that will keep trying # and see the registration a bit later - get_service_info_task1 = asyncio.ensure_future( - aiozc.async_get_service_info(type_, registration_name) - ) + get_service_info_task1 = asyncio.ensure_future(aiozc.async_get_service_info(type_, registration_name)) await asyncio.sleep(_LISTENER_TIME / 1000 / 2) - get_service_info_task2 = asyncio.ensure_future( - aiozc.async_get_service_info(type_, registration_name) - ) + get_service_info_task2 = asyncio.ensure_future(aiozc.async_get_service_info(type_, registration_name)) desc = {"path": "/~paulsm/"} info = ServiceInfo( @@ -916,14 +907,10 @@ async def test_async_zeroconf_service_types(): await asyncio.sleep(0.2) _clear_cache(zeroconf_registrar.zeroconf) try: - service_types = await AsyncZeroconfServiceTypes.async_find( - interfaces=["127.0.0.1"], timeout=2 - ) + service_types = await AsyncZeroconfServiceTypes.async_find(interfaces=["127.0.0.1"], timeout=2) assert type_ in service_types _clear_cache(zeroconf_registrar.zeroconf) - service_types = await AsyncZeroconfServiceTypes.async_find( - aiozc=zeroconf_registrar, timeout=2 - ) + service_types = await AsyncZeroconfServiceTypes.async_find(aiozc=zeroconf_registrar, timeout=2) assert type_ in service_types finally: @@ -935,9 +922,7 @@ async def test_guard_against_running_serviceinfo_request_event_loop() -> None: """Test that running ServiceInfo.request from the event loop throws.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) - service_info = AsyncServiceInfo( - "_hap._tcp.local.", "doesnotmatter._hap._tcp.local." - ) + service_info = AsyncServiceInfo("_hap._tcp.local.", "doesnotmatter._hap._tcp.local.") with pytest.raises(RuntimeError): service_info.request(aiozc.zeroconf, 3000) await aiozc.async_close() @@ -975,9 +960,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) zc.cache.async_add_records( [info.dns_pointer(), info.dns_service(), *info.dns_addresses(), info.dns_text()] ) @@ -1053,9 +1036,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): service_added = asyncio.Event() service_removed = asyncio.Event() - browser = AsyncServiceBrowser( - zeroconf_browser, type_, [on_service_state_change] - ) + browser = AsyncServiceBrowser(zeroconf_browser, type_, [on_service_state_change]) info = ServiceInfo( type_, registration_name, @@ -1230,9 +1211,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[address]) zc.cache.async_add_records( [ info.dns_pointer(), @@ -1303,12 +1282,7 @@ async def test_async_request_timeout(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() start_time = current_time_millis() - assert ( - await aiozc.async_get_service_info( - "_notfound.local.", "notthere._notfound.local." - ) - is None - ) + assert await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") is None end_time = current_time_millis() await aiozc.async_close() # 3000ms for the default timeout @@ -1322,9 +1296,7 @@ async def test_async_request_non_running_instance(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() with pytest.raises(NotRunningException): - await aiozc.async_get_service_info( - "_notfound.local.", "notthere._notfound.local." - ) + await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") @pytest.mark.asyncio diff --git a/tests/test_cache.py b/tests/test_cache.py index 4b3859bd..363fcb0e 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -256,10 +256,7 @@ def test_current_entry_with_name_and_alias(self): ) cache = r.DNSCache() cache.async_add_records([record1, record2]) - assert ( - cache.current_entry_with_name_and_alias("irrelevant", "x.irrelevant") - == record1 - ) + assert cache.current_entry_with_name_and_alias("irrelevant", "x.irrelevant") == record1 def test_name(self): record1 = r.DNSService( diff --git a/tests/test_core.py b/tests/test_core.py index 10545357..fc2685fa 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -93,9 +93,7 @@ def test_close_multiple_times(self): def test_launch_and_close_v4_v6(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.All) rv.close() - rv = r.Zeroconf( - interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All - ) + rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All) rv.close() @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") @@ -103,21 +101,15 @@ def test_launch_and_close_v4_v6(self): def test_launch_and_close_v6_only(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.V6Only) rv.close() - rv = r.Zeroconf( - interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only - ) + rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only) rv.close() - @unittest.skipIf( - sys.platform == "darwin", reason="apple_p2p failure path not testable on mac" - ) + @unittest.skipIf(sys.platform == "darwin", reason="apple_p2p failure path not testable on mac") def test_launch_and_close_apple_p2p_not_mac(self): with pytest.raises(RuntimeError): r.Zeroconf(apple_p2p=True) - @unittest.skipIf( - sys.platform != "darwin", reason="apple_p2p happy path only testable on mac" - ) + @unittest.skipIf(sys.platform != "darwin", reason="apple_p2p happy path only testable on mac") def test_launch_and_close_apple_p2p_on_mac(self): rv = r.Zeroconf(apple_p2p=True) rv.close() @@ -146,9 +138,7 @@ def mock_incoming_msg( ttl = 0 generated.add_answer_at_time( - r.DNSPointer( - service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name - ), + r.DNSPointer(service_type, const._TYPE_PTR, const._CLASS_IN, ttl, service_name), 0, ) generated.add_answer_at_time( @@ -229,16 +219,10 @@ def mock_split_incoming_msg( try: # service added _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Added)) - dns_text = zeroconf.cache.get_by_details( - service_name, const._TYPE_TXT, const._CLASS_IN - ) + dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) assert dns_text is not None - assert ( - cast(r.DNSText, dns_text).text == service_text - ) # service_text is b'path=/~paulsm/' - all_dns_text = zeroconf.cache.get_all_by_details( - service_name, const._TYPE_TXT, const._CLASS_IN - ) + assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~paulsm/' + all_dns_text = zeroconf.cache.get_all_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) assert [dns_text] == all_dns_text # https://tools.ietf.org/html/rfc6762#section-10.2 @@ -252,35 +236,23 @@ def mock_split_incoming_msg( # service updated. currently only text record can be updated service_text = b"path=/~humingchun/" _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Updated)) - dns_text = zeroconf.cache.get_by_details( - service_name, const._TYPE_TXT, const._CLASS_IN - ) + dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) assert dns_text is not None - assert ( - cast(r.DNSText, dns_text).text == service_text - ) # service_text is b'path=/~humingchun/' + assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~humingchun/' time.sleep(1.1) # The split message only has a SRV and A record. # This should not evict TXT records from the cache - _inject_response( - zeroconf, mock_split_incoming_msg(r.ServiceStateChange.Updated) - ) + _inject_response(zeroconf, mock_split_incoming_msg(r.ServiceStateChange.Updated)) time.sleep(1.1) - dns_text = zeroconf.cache.get_by_details( - service_name, const._TYPE_TXT, const._CLASS_IN - ) + dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) assert dns_text is not None - assert ( - cast(r.DNSText, dns_text).text == service_text - ) # service_text is b'path=/~humingchun/' + assert cast(r.DNSText, dns_text).text == service_text # service_text is b'path=/~humingchun/' # service removed _inject_response(zeroconf, mock_incoming_msg(r.ServiceStateChange.Removed)) - dns_text = zeroconf.cache.get_by_details( - service_name, const._TYPE_TXT, const._CLASS_IN - ) + dns_text = zeroconf.cache.get_by_details(service_name, const._TYPE_TXT, const._CLASS_IN) assert dns_text is not None assert dns_text.is_expired(current_time_millis() + 1000) @@ -450,12 +422,7 @@ def test_logging_packets(caplog): def test_get_service_info_failure_path(): """Verify get_service_info return None when the underlying call returns False.""" zc = Zeroconf(interfaces=["127.0.0.1"]) - assert ( - zc.get_service_info( - "_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10 - ) - is None - ) + assert zc.get_service_info("_neverused._tcp.local.", "xneverused._neverused._tcp.local.", 10) is None zc.close() @@ -471,9 +438,7 @@ def test_sending_unicast(): b"path=/~paulsm/", ) generated.add_answer_at_time(entry, 0) - zc.send( - generated, "2001:db8::1", const._MDNS_PORT - ) # https://www.iana.org/go/rfc3849 + zc.send(generated, "2001:db8::1", const._MDNS_PORT) # https://www.iana.org/go/rfc3849 time.sleep(0.2) assert zc.cache.get(entry) is None @@ -783,9 +748,7 @@ def _background_register(): @pytest.mark.asyncio -@unittest.skipIf( - sys.version_info[:3][1] < 8, "Requires Python 3.8 or later to patch _async_setup" -) +@unittest.skipIf(sys.version_info[:3][1] < 8, "Requires Python 3.8 or later to patch _async_setup") @patch("zeroconf._core._STARTUP_TIMEOUT", 0) @patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): diff --git a/tests/test_dns.py b/tests/test_dns.py index b4ac6f88..95d4b553 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -48,9 +48,7 @@ def test_dns_hinfo_repr_eq(self): repr(hinfo) def test_dns_pointer_repr(self): - pointer = r.DNSPointer( - "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" - ) + pointer = r.DNSPointer("irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123") repr(pointer) @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") @@ -78,9 +76,7 @@ def test_dns_address_repr(self): assert repr(address_ipv6).endswith("::1") def test_dns_question_repr(self): - question = r.DNSQuestion( - "irrelevant", const._TYPE_SRV, const._CLASS_IN | const._CLASS_UNIQUE - ) + question = r.DNSQuestion("irrelevant", const._TYPE_SRV, const._CLASS_IN | const._CLASS_UNIQUE) repr(question) assert not question != question @@ -98,9 +94,7 @@ def test_dns_service_repr(self): repr(service) def test_dns_record_abc(self): - record = r.DNSRecord( - "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL - ) + record = r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL) self.assertRaises(r.AbstractMethodException, record.__eq__, record) with pytest.raises((r.AbstractMethodException, TypeError)): record.write(None) # type: ignore[arg-type] @@ -225,12 +219,8 @@ def test_dns_record_hashablity_does_not_consider_ttl(): """Test DNSRecord are hashable.""" # Verify the TTL is not considered in the hash - record1 = r.DNSAddress( - "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same" - ) - record2 = r.DNSAddress( - "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same" - ) + record1 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same") + record2 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same") record_set = {record1, record2} assert len(record_set) == 1 @@ -238,9 +228,7 @@ def test_dns_record_hashablity_does_not_consider_ttl(): record_set.add(record1) assert len(record_set) == 1 - record3_dupe = r.DNSAddress( - "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same" - ) + record3_dupe = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same") assert record2 == record3_dupe assert record2.__hash__() == record3_dupe.__hash__() @@ -259,9 +247,7 @@ def test_dns_record_hashablity_does_not_consider_unique(): const._DNS_OTHER_TTL, b"same", ) - record2 = r.DNSAddress( - "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same" - ) + record2 = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_OTHER_TTL, b"same") assert record1.class_ == record2.class_ assert record1.__hash__() == record2.__hash__() @@ -314,12 +300,8 @@ def test_dns_hinfo_record_hashablity(): def test_dns_pointer_record_hashablity(): """Test DNSPointer are hashable.""" - ptr1 = r.DNSPointer( - "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" - ) - ptr2 = r.DNSPointer( - "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456" - ) + ptr1 = r.DNSPointer("irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123") + ptr2 = r.DNSPointer("irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456") record_set = {ptr1, ptr2} assert len(record_set) == 2 @@ -327,9 +309,7 @@ def test_dns_pointer_record_hashablity(): record_set.add(ptr1) assert len(record_set) == 2 - ptr2_dupe = r.DNSPointer( - "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456" - ) + ptr2_dupe = r.DNSPointer("irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "456") assert ptr2 == ptr2 assert ptr2.__hash__() == ptr2_dupe.__hash__() @@ -339,9 +319,7 @@ def test_dns_pointer_record_hashablity(): def test_dns_pointer_comparison_is_case_insensitive(): """Test DNSPointer comparison is case insensitive.""" - ptr1 = r.DNSPointer( - "irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123" - ) + ptr1 = r.DNSPointer("irrelevant", const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, "123") ptr2 = r.DNSPointer( "irrelevant".upper(), const._TYPE_PTR, @@ -530,12 +508,8 @@ def test_rrset_does_not_consider_ttl(): longarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 100, b"same") shortarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 10, b"same") - longaaaarec = r.DNSAddress( - "irrelevant", const._TYPE_AAAA, const._CLASS_IN, 100, b"same" - ) - shortaaaarec = r.DNSAddress( - "irrelevant", const._TYPE_AAAA, const._CLASS_IN, 10, b"same" - ) + longaaaarec = r.DNSAddress("irrelevant", const._TYPE_AAAA, const._CLASS_IN, 100, b"same") + shortaaaarec = r.DNSAddress("irrelevant", const._TYPE_AAAA, const._CLASS_IN, 10, b"same") rrset = DNSRRSet([longarec, shortaaaarec]) @@ -544,9 +518,7 @@ def test_rrset_does_not_consider_ttl(): assert not rrset.suppresses(longaaaarec) assert rrset.suppresses(shortaaaarec) - verylongarec = r.DNSAddress( - "irrelevant", const._TYPE_A, const._CLASS_IN, 1000, b"same" - ) + verylongarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 1000, b"same") longarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 100, b"same") mediumarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 60, b"same") shortarec = r.DNSAddress("irrelevant", const._TYPE_A, const._CLASS_IN, 10, b"same") diff --git a/tests/test_engine.py b/tests/test_engine.py index 7a10b48d..88307e32 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -38,15 +38,9 @@ async def test_reaper(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf = aiozc.zeroconf cache = zeroconf.cache - original_entries = list( - itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) - ) - record_with_10s_ttl = r.DNSAddress( - "a", const._TYPE_SOA, const._CLASS_IN, 10, b"a" - ) - record_with_1s_ttl = r.DNSAddress( - "a", const._TYPE_SOA, const._CLASS_IN, 1, b"b" - ) + original_entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) + record_with_10s_ttl = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 10, b"a") + record_with_1s_ttl = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"b") zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) now = r.current_time_millis() @@ -59,22 +53,14 @@ async def test_reaper(): "known-to-other._hap._tcp.local.", ) } - zeroconf.question_history.add_question_at_time( - question, now, other_known_answers - ) + zeroconf.question_history.add_question_at_time(question, now, other_known_answers) assert zeroconf.question_history.suppresses(question, now, other_known_answers) - entries_with_cache = list( - itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) - ) + entries_with_cache = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) await asyncio.sleep(1.2) - entries = list( - itertools.chain(*(cache.entries_with_name(name) for name in cache.names())) - ) + entries = list(itertools.chain(*(cache.entries_with_name(name) for name in cache.names()))) assert zeroconf.cache.get(record_with_1s_ttl) is None await aiozc.async_close() - assert not zeroconf.question_history.suppresses( - question, now, other_known_answers - ) + assert not zeroconf.question_history.suppresses(question, now, other_known_answers) assert entries != original_entries assert entries_with_cache != original_entries assert record_with_10s_ttl in entries @@ -87,12 +73,8 @@ async def test_reaper_aborts_when_done(): with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf = aiozc.zeroconf - record_with_10s_ttl = r.DNSAddress( - "a", const._TYPE_SOA, const._CLASS_IN, 10, b"a" - ) - record_with_1s_ttl = r.DNSAddress( - "a", const._TYPE_SOA, const._CLASS_IN, 1, b"b" - ) + record_with_10s_ttl = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 10, b"a") + record_with_1s_ttl = r.DNSAddress("a", const._TYPE_SOA, const._CLASS_IN, 1, b"b") zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) assert zeroconf.cache.get(record_with_10s_ttl) is not None assert zeroconf.cache.get(record_with_1s_ttl) is not None diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 33eac2d4..1373d6c3 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -38,9 +38,7 @@ def tearDownClass(cls): del cls.browser def test_bad_service_info_name(self): - self.assertRaises( - r.BadTypeInNameException, self.browser.get_service_info, "type", "type_not" - ) + self.assertRaises(r.BadTypeInNameException, self.browser.get_service_info, "type", "type_not") def test_bad_service_names(self): bad_names_to_try = ( @@ -85,9 +83,7 @@ def test_good_instance_names(self): assert r.service_type_name("1.2.3._mqtt._tcp.local.") == "_mqtt._tcp.local." assert r.service_type_name("x.sub._http._tcp.local.") == "_http._tcp.local." assert ( - r.service_type_name( - "6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local." - ) + r.service_type_name("6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local.") == "_http._tcp.local." ) @@ -143,10 +139,7 @@ def test_good_service_names(self): for name, result in good_names_to_try: assert r.service_type_name(name) == result - assert ( - r.service_type_name("_one_two._tcp.local.", strict=False) - == "_one_two._tcp.local." - ) + assert r.service_type_name("_one_two._tcp.local.", strict=False) == "_one_two._tcp.local." def test_invalid_addresses(self): type_ = "_test-srvc-type._tcp.local." diff --git a/tests/test_handlers.py b/tests/test_handlers.py index e2e69aea..50816d2b 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -77,7 +77,7 @@ def _process_outgoing_packet(out): """Sends an outgoing packet.""" nonlocal nbr_answers, nbr_additionals, nbr_authorities - for answer, time_ in out.answers: + for answer, _ in out.answers: nbr_answers += 1 assert answer.ttl == get_ttl(answer.type) for answer in out.additionals: @@ -103,16 +103,12 @@ def _process_outgoing_packet(out): query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_SRV, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_TXT, const._CLASS_IN)) - query.add_question( - r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN) - ) + query.add_question(r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN)) question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) assert question_answers - _process_outgoing_packet( - construct_outgoing_multicast_answers(question_answers.mcast_aggregate) - ) + _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) # The additonals should all be suppresed since they are all in the answers section # There will be one NSEC additional to indicate the lack of AAAA record @@ -146,16 +142,12 @@ def _process_outgoing_packet(out): query.add_question(r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_SRV, const._CLASS_IN)) query.add_question(r.DNSQuestion(info.name, const._TYPE_TXT, const._CLASS_IN)) - query.add_question( - r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN) - ) + query.add_question(r.DNSQuestion(info.server or info.name, const._TYPE_A, const._CLASS_IN)) question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False ) assert question_answers - _process_outgoing_packet( - construct_outgoing_multicast_answers(question_answers.mcast_aggregate) - ) + _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) # There will be one NSEC additional to indicate the lack of AAAA record assert nbr_answers == 4 and nbr_additionals == 1 and nbr_authorities == 0 @@ -315,9 +307,7 @@ def test_any_query_for_ptr(): desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) zc.registry.async_add(info) _clear_cache(zc) @@ -325,9 +315,7 @@ def test_any_query_for_ptr(): question = r.DNSQuestion(type_, const._TYPE_ANY, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers mcast_answers = list(question_answers.mcast_aggregate) assert mcast_answers[0].name == type_ @@ -348,18 +336,14 @@ def test_aaaa_query(): desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) zc.registry.async_add(info) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(server_name, const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] @@ -379,18 +363,14 @@ def test_aaaa_query_upper_case(): desc = {"path": "/~paulsm/"} server_name = "ash-2.local." ipv6_address = socket.inet_pton(socket.AF_INET6, "2001:db8::1") - info = ServiceInfo( - type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address] - ) + info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, server_name, addresses=[ipv6_address]) zc.registry.async_add(info) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(server_name.upper(), const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers mcast_answers = list(question_answers.mcast_now) assert mcast_answers[0].address == ipv6_address # type: ignore[attr-defined] @@ -431,9 +411,7 @@ def test_a_and_aaaa_record_fate_sharing(): question = r.DNSQuestion(server_name, const._TYPE_AAAA, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert aaaa_record in question_answers.mcast_now @@ -446,9 +424,7 @@ def test_a_and_aaaa_record_fate_sharing(): question = r.DNSQuestion(server_name, const._TYPE_A, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers additionals = set().union(*question_answers.mcast_now.values()) assert a_record in question_answers.mcast_now @@ -709,7 +685,8 @@ def _validate_complete_response(answers): assert not question_answers.mcast_aggregate _validate_complete_response(question_answers.mcast_now) - # With QU set and an authorative answer (probe) should respond to both unitcast and multicast since the response hasn't been seen since 75% of the ttl + # With QU set and an authorative answer (probe) should respond to both unitcast + # and multicast since the response hasn't been seen since 75% of the ttl query = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN) question.unicast = True # Set the QU bit @@ -725,11 +702,7 @@ def _validate_complete_response(answers): _inject_response( zc, - r.DNSIncoming( - construct_outgoing_multicast_answers(question_answers.mcast_now).packets()[ - 0 - ] - ), + r.DNSIncoming(construct_outgoing_multicast_answers(question_answers.mcast_now).packets()[0]), ) # With the cache repopulated; should respond to only unicast when the answer has been recently multicast query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -776,9 +749,7 @@ def test_known_answer_supression(): question = r.DNSQuestion(type_, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -790,9 +761,7 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_pointer(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -804,9 +773,7 @@ def test_known_answer_supression(): question = r.DNSQuestion(server_name, const._TYPE_A, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert question_answers.mcast_now @@ -819,9 +786,7 @@ def test_known_answer_supression(): for dns_address in info.dns_addresses(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -835,12 +800,10 @@ def test_known_answer_supression(): for dns_address in info.dns_addresses(): generated.add_answer_at_time(dns_address, now) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast - expected_nsec_record = cast(r.DNSNsec, list(question_answers.mcast_now)[0]) + expected_nsec_record = cast(r.DNSNsec, next(iter(question_answers.mcast_now))) assert const._TYPE_A not in expected_nsec_record.rdtypes assert const._TYPE_AAAA in expected_nsec_record.rdtypes assert not question_answers.mcast_aggregate @@ -851,9 +814,7 @@ def test_known_answer_supression(): question = r.DNSQuestion(registration_name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert question_answers.mcast_now @@ -865,9 +826,7 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_service(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -879,9 +838,7 @@ def test_known_answer_supression(): question = r.DNSQuestion(registration_name, const._TYPE_TXT, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -893,9 +850,7 @@ def test_known_answer_supression(): generated.add_question(question) generated.add_answer_at_time(info.dns_text(), now) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -970,9 +925,7 @@ def test_multi_packet_known_answer_supression(): generated.add_answer_at_time(info3.dns_pointer(), now) packets = generated.packets() assert len(packets) > 1 - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1025,14 +978,10 @@ def test_known_answer_supression_service_type_enumeration_query(): # Test PTR supression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion( - const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN - ) + question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1040,9 +989,7 @@ def test_known_answer_supression_service_type_enumeration_query(): assert not question_answers.mcast_aggregate_last_second generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion( - const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN - ) + question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) generated.add_answer_at_time( r.DNSPointer( @@ -1065,9 +1012,7 @@ def test_known_answer_supression_service_type_enumeration_query(): now, ) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1119,14 +1064,10 @@ def test_upper_case_enumeration_query(): # Test PTR supression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion( - const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN - ) + question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1142,14 +1083,10 @@ def test_enumeration_query_with_no_registered_services(): zc = Zeroconf(interfaces=["127.0.0.1"]) _clear_cache(zc) generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - question = r.DNSQuestion( - const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN - ) + question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert not question_answers # unregister zc.close() @@ -1205,9 +1142,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Add the A record to the cache with 50% ttl remaining a_record = info.dns_addresses()[0] - a_record.set_created_ttl( - current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl - ) + a_record.set_created_ttl(current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl) assert not a_record.is_recent(current_time_millis()) info._dns_address_cache = None # we are mutating the record so clear the cache zc.cache.async_add_records([a_record]) @@ -1258,9 +1193,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Remove the 100% PTR record and add a 50% PTR record zc.cache.async_remove_records([ptr_record]) - ptr_record.set_created_ttl( - current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl - ) + ptr_record.set_created_ttl(current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl) assert not ptr_record.is_recent(current_time_millis()) zc.cache.async_add_records([ptr_record]) # With QU should respond to only multicast since the has less @@ -1285,7 +1218,8 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): assert ptr_record in question_answers.mcast_now # Ask 2 QU questions, with info the PTR is at 50%, with info2 the PTR is at 100% - # We should get back a unicast reply for info2, but info should be multicasted since its within 75% of its TTL + # We should get back a unicast reply for info2, but info should be + # multicasted since its within 75% of its TTL # With QU should respond to only multicast since the has less # than 75% of its ttl remaining query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -1298,9 +1232,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): question.unicast = True # Set the QU bit assert question.unicast is True query.add_question(question) - zc.cache.async_add_records( - [info2.dns_pointer()] - ) # Add 100% TTL for info2 to the cache + zc.cache.async_add_records([info2.dns_pointer()]) # Add 100% TTL for info2 to the cache question_answers = zc.query_handler.async_response( [r.DNSIncoming(packet) for packet in query.packets()], False @@ -1351,9 +1283,7 @@ async def test_cache_flush_bit(): addresses=[socket.inet_aton("10.0.1.2")], ) a_record = info.dns_addresses()[0] - zc.cache.async_add_records( - [info.dns_pointer(), a_record, info.dns_text(), info.dns_service()] - ) + zc.cache.async_add_records([info.dns_pointer(), a_record, info.dns_text(), info.dns_service()]) info.addresses = [socket.inet_aton("10.0.1.5"), socket.inet_aton("10.0.1.6")] new_records = info.dns_addresses() @@ -1402,9 +1332,7 @@ async def test_cache_flush_bit(): assert cached_record is not None assert cached_record.ttl == 1 - for entry in zc.cache.async_all_by_details( - server_name, const._TYPE_A, const._CLASS_IN - ): + for entry in zc.cache.async_all_by_details(server_name, const._TYPE_A, const._CLASS_IN): assert isinstance(entry, r.DNSAddress) if entry.address == fresh_address: assert entry.ttl > 1 @@ -1434,9 +1362,7 @@ async def test_record_update_manager_add_listener_callsback_existing_records(): class MyListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records( - self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -1457,9 +1383,7 @@ def async_update_records( ) a_record = info.dns_addresses()[0] ptr_record = info.dns_pointer() - zc.cache.async_add_records( - [ptr_record, a_record, info.dns_text(), info.dns_service()] - ) + zc.cache.async_add_records([ptr_record, a_record, info.dns_text(), info.dns_service()]) listener = MyListener() @@ -1516,9 +1440,7 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu generated.add_answer_at_time(known_answer, 0) now = r.current_time_millis() packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert not question_answers.ucast assert not question_answers.mcast_now @@ -1560,9 +1482,7 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): generated.add_answer_at_time(known_answer, 0) now = r.current_time_millis() packets = generated.packets() - question_answers = zc.query_handler.async_response( - [r.DNSIncoming(packet) for packet in packets], False - ) + question_answers = zc.query_handler.async_response([r.DNSIncoming(packet) for packet in packets], False) assert question_answers assert "qu._hap._tcp.local." in str(question_answers) assert not question_answers.ucast # has not multicast recently @@ -1576,7 +1496,7 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): @pytest.mark.asyncio async def test_guard_against_low_ptr_ttl(): - """Ensure we enforce a minimum for PTR record ttls to avoid excessive refresh queries from ServiceBrowsers. + """Ensure we enforce a min for PTR record ttls to avoid excessive refresh queries from ServiceBrowsers. Some poorly designed IoT devices can set excessively low PTR TTLs would will cause ServiceBrowsers to flood the network @@ -1780,9 +1700,7 @@ async def test_response_aggregation_timings(run_isolated): @pytest.mark.asyncio -async def test_response_aggregation_timings_multiple( - run_isolated, disable_duplicate_packet_suppression -): +async def test_response_aggregation_timings_multiple(run_isolated, disable_duplicate_packet_suppression): """Verify multicast responses that are aggregated do not take longer than 620ms to send. 620ms is the maximum random delay of 120ms and 500ms additional for aggregation.""" @@ -1817,9 +1735,7 @@ async def test_response_aggregation_timings_multiple( with patch.object(aiozc.zeroconf, "async_send") as send_mock: send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) - protocol.last_time = ( - 0 # manually reset the last time to avoid duplicate packet suppression - ) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression await asyncio.sleep(0.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1830,9 +1746,7 @@ async def test_response_aggregation_timings_multiple( send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) - protocol.last_time = ( - 0 # manually reset the last time to avoid duplicate packet suppression - ) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression await asyncio.sleep(1.2) calls = send_mock.mock_calls assert len(calls) == 1 @@ -1843,13 +1757,9 @@ async def test_response_aggregation_timings_multiple( send_mock.reset_mock() protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) - protocol.last_time = ( - 0 # manually reset the last time to avoid duplicate packet suppression - ) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression protocol.datagram_received(query2.packets()[0], ("127.0.0.1", const._MDNS_PORT)) - protocol.last_time = ( - 0 # manually reset the last time to avoid duplicate packet suppression - ) + protocol.last_time = 0 # manually reset the last time to avoid duplicate packet suppression # The delay should increase with two packets and # 900ms is beyond the maximum aggregation delay # when there is no network protection delay @@ -1958,9 +1868,7 @@ async def test_response_aggregation_random_delay(): # The third group should always be coalesced into first group since it will always come before outgoing_queue._multicast_delay_random_min = 100 outgoing_queue._multicast_delay_random_max = 200 - outgoing_queue.async_add( - now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()} - ) + outgoing_queue.async_add(now, {info3.dns_pointer(): set(), info4.dns_pointer(): set()}) assert len(outgoing_queue.queue) == 1 assert info.dns_pointer() in outgoing_queue.queue[0].answers @@ -2056,17 +1964,14 @@ async def test_add_listener_warns_when_not_using_record_update_listener(caplog): class MyListener: """A RecordUpdateListener that does not implement update_records.""" - def async_update_records( - self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.add_listener(MyListener(), None) # type: ignore[arg-type] await asyncio.sleep(0) # flush out any call soons assert ( - "listeners passed to async_add_listener must inherit from RecordUpdateListener" - in caplog.text + "listeners passed to async_add_listener must inherit from RecordUpdateListener" in caplog.text or "TypeError: Argument 'listener' has incorrect type" in caplog.text ) @@ -2091,9 +1996,7 @@ async def test_async_updates_iteration_safe(): class OtherListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records( - self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -2102,9 +2005,7 @@ def async_update_records( class ListenerThatAddsListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records( - self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate] - ) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.async_add_listener(other, None) diff --git a/tests/test_init.py b/tests/test_init.py index 3ba285d5..d7a01224 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -173,9 +173,7 @@ def generate_many_hosts(self, zc, type_, name, number_hosts): def generate_host(out, host_name, type_): name = ".".join((host_name, type_)) out.add_answer_at_time( - r.DNSPointer( - type_, const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, name - ), + r.DNSPointer(type_, const._TYPE_PTR, const._CLASS_IN, const._DNS_OTHER_TTL, name), 0, ) out.add_answer_at_time( diff --git a/tests/test_listener.py b/tests/test_listener.py index 6faab4e8..f6752af7 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -47,7 +47,7 @@ def test_guard_against_oversized_packets(): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - for i in range(5000): + for _i in range(5000): generated.add_answer_at_time( r.DNSText( "packet{i}.local.", @@ -281,9 +281,7 @@ def handle_query_or_defer( _handle_query_or_defer.reset_mock() # Now call with garbage - listener._process_datagram_at_time( - False, len(b"garbage"), new_time, b"garbage", addrs - ) + listener._process_datagram_at_time(False, len(b"garbage"), new_time, b"garbage", addrs) _handle_query_or_defer.assert_not_called() _handle_query_or_defer.reset_mock() diff --git a/tests/test_protocol.py b/tests/test_protocol.py index e682a34c..ee9ed930 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -49,9 +49,7 @@ def test_parse_own_packet_flags(self): def test_parse_own_packet_question(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) - generated.add_question( - r.DNSQuestion("testname.local.", const._TYPE_SRV, const._CLASS_IN) - ) + generated.add_question(r.DNSQuestion("testname.local.", const._TYPE_SRV, const._CLASS_IN)) r.DNSIncoming(generated.packets()[0]) def test_parse_own_packet_nsec(self): @@ -252,18 +250,14 @@ def test_suppress_answer(self): def test_dns_hinfo(self): generated = r.DNSOutgoing(0) - generated.add_additional_answer( - DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "os") - ) + generated.add_additional_answer(DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "os")) parsed = r.DNSIncoming(generated.packets()[0]) answer = cast(r.DNSHinfo, parsed.answers()[0]) assert answer.cpu == "cpu" assert answer.os == "os" generated = r.DNSOutgoing(0) - generated.add_additional_answer( - DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "x" * 257) - ) + generated.add_additional_answer(DNSHinfo("irrelevant", const._TYPE_HINFO, 0, 0, "cpu", "x" * 257)) self.assertRaises(r.NamePartTooLongException, generated.packets) def test_many_questions(self): @@ -271,9 +265,7 @@ def test_many_questions(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for i in range(100): - question = r.DNSQuestion( - f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN - ) + question = r.DNSQuestion(f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) questions.append(question) assert len(generated.questions) == 100 @@ -293,9 +285,7 @@ def test_many_questions_with_many_known_answers(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for _ in range(30): - question = r.DNSQuestion( - "_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN - ) + question = r.DNSQuestion("_hap._tcp.local.", const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) questions.append(question) assert len(generated.questions) == 30 @@ -378,7 +368,7 @@ def test_only_one_answer_can_by_large(self): """ generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) query = r.DNSIncoming(r.DNSOutgoing(const._FLAGS_QR_QUERY).packets()[0]) - for i in range(3): + for _i in range(3): generated.add_answer( query, r.DNSText( @@ -433,9 +423,7 @@ def test_questions_do_not_end_up_every_packet(self): generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) for i in range(35): - question = r.DNSQuestion( - f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN - ) + question = r.DNSQuestion(f"testname{i}.local.", const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) answer = r.DNSService( f"testname{i}.local.", @@ -494,9 +482,7 @@ def test_response_header_bits(self): def test_numbers(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) bytes = generated.packets()[0] - (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack( - "!4H", bytes[4:12] - ) + (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack("!4H", bytes[4:12]) assert num_questions == 0 assert num_answers == 0 assert num_authorities == 0 @@ -505,12 +491,10 @@ def test_numbers(self): def test_numbers_questions(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) question = r.DNSQuestion("testname.local.", const._TYPE_SRV, const._CLASS_IN) - for i in range(10): + for _i in range(10): generated.add_question(question) bytes = generated.packets()[0] - (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack( - "!4H", bytes[4:12] - ) + (num_questions, num_answers, num_authorities, num_additionals) = struct.unpack("!4H", bytes[4:12]) assert num_questions == 10 assert num_answers == 0 assert num_authorities == 0 @@ -551,9 +535,7 @@ def test_incoming_ipv6(self): addr = "2606:2800:220:1:248:1893:25c8:1946" # example.com packed = socket.inet_pton(socket.AF_INET6, addr) generated = r.DNSOutgoing(0) - answer = r.DNSAddress( - "domain", const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed - ) + answer = r.DNSAddress("domain", const._TYPE_AAAA, const._CLASS_IN | const._CLASS_UNIQUE, 1, packed) generated.add_additional_answer(answer) packet = generated.packets()[0] parsed = r.DNSIncoming(packet) @@ -715,9 +697,7 @@ def test_dns_compression_rollback_for_corruption(): assert incoming.valid is True assert ( len(incoming.answers()) - == incoming.num_answers - + incoming.num_authorities - + incoming.num_additionals + == incoming.num_answers + incoming.num_authorities + incoming.num_additionals ) @@ -788,16 +768,18 @@ def test_tc_bit_not_set_in_answer_packet(): assert third_packet.valid is True -# 4003 15.973052 192.168.107.68 224.0.0.251 MDNS 76 Standard query 0xffc4 PTR _raop._tcp.local, "QM" question +# MDNS 76 Standard query 0xffc4 PTR _raop._tcp.local, "QM" question def test_qm_packet_parser(): """Test we can parse a query packet with the QM bit.""" - qm_packet = b"\xff\xc4\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x05_raop\x04_tcp\x05local\x00\x00\x0c\x00\x01" + qm_packet = ( + b"\xff\xc4\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x05_raop\x04_tcp\x05local\x00\x00\x0c\x00\x01" + ) parsed = DNSIncoming(qm_packet) assert parsed.questions[0].unicast is False assert ",QM," in str(parsed.questions[0]) -# 389951 1450.577370 192.168.107.111 224.0.0.251 MDNS 115 Standard query 0x0000 PTR _companion-link._tcp.local, "QU" question OPT +# MDNS 115 Standard query 0x0000 PTR _companion-link._tcp.local, "QU" question OPT def test_qu_packet_parser(): """Test we can parse a query packet with the QU bit.""" qu_packet = ( diff --git a/tests/test_services.py b/tests/test_services.py index 8145ae60..7cc075e7 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -9,7 +9,8 @@ import time import unittest from threading import Event -from typing import Dict, Any +from typing import Any, Dict + import pytest import zeroconf as r @@ -83,14 +84,14 @@ def update_service(self, zeroconf, type, name): zeroconf_browser = Zeroconf(interfaces=["127.0.0.1"]) zeroconf_browser.add_service_listener(type_, listener) - properties = dict( - prop_none=None, - prop_string=b"a_prop", - prop_float=1.0, - prop_blank=b"a blanked string", - prop_true=1, - prop_false=0, - ) + properties = { + "prop_none": None, + "prop_string": b"a_prop", + "prop_float": 1.0, + "prop_blank": b"a blanked string", + "prop_true": 1, + "prop_false": 0, + } zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) desc: Dict[str, Any] = {"path": "/~paulsm/"} @@ -141,9 +142,7 @@ def update_service(self, zeroconf, type, name): assert info.decoded_properties["prop_none"] is None assert info.decoded_properties["prop_string"] == b"a_prop".decode("utf-8") assert info.decoded_properties["prop_float"] == "1.0" - assert info.decoded_properties["prop_blank"] == b"a blanked string".decode( - "utf-8" - ) + assert info.decoded_properties["prop_blank"] == b"a blanked string".decode("utf-8") assert info.decoded_properties["prop_true"] == "1" assert info.decoded_properties["prop_false"] == "0" @@ -207,17 +206,13 @@ def update_service(self, zeroconf, type, name): info = zeroconf_browser.get_service_info(type_, registration_name) assert info is not None assert info.properties[b"prop_blank"] == properties["prop_blank"] - assert info.decoded_properties["prop_blank"] == b"an updated string".decode( - "utf-8" - ) + assert info.decoded_properties["prop_blank"] == b"an updated string".decode("utf-8") cached_info = ServiceInfo(subtype, registration_name) cached_info.load_from_cache(zeroconf_browser) assert cached_info.properties is not None assert cached_info.properties[b"prop_blank"] == properties["prop_blank"] - assert cached_info.decoded_properties[ - "prop_blank" - ] == b"an updated string".decode("utf-8") + assert cached_info.decoded_properties["prop_blank"] == b"an updated string".decode("utf-8") zeroconf_registrar.unregister_service(info_service) service_removed.wait(1) diff --git a/tests/test_updates.py b/tests/test_updates.py index 4cffc0f6..2ebaee89 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -40,9 +40,7 @@ def test_legacy_record_update_listener(): r.RecordUpdateListener().update_record( zc, 0, - r.DNSRecord( - "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL - ), + r.DNSRecord("irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL), ) updates = [] @@ -50,9 +48,7 @@ def test_legacy_record_update_listener(): class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def update_record( - self, zc: "Zeroconf", now: float, record: r.DNSRecord - ) -> None: + def update_record(self, zc: "Zeroconf", now: float, record: r.DNSRecord) -> None: nonlocal updates updates.append(record) @@ -87,15 +83,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() assert len(updates) - assert ( - len( - [ - isinstance(update, r.DNSPointer) and update.name == type_ - for update in updates - ] - ) - >= 1 - ) + assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 zc.remove_listener(listener) # Removing a second time should not throw @@ -106,12 +94,8 @@ def on_service_state_change(zeroconf, service_type, state_change, name): def test_record_update_compat(): """Test a RecordUpdate can fetch by index.""" - new = r.DNSPointer( - "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "new" - ) - old = r.DNSPointer( - "irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "old" - ) + new = r.DNSPointer("irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "new") + old = r.DNSPointer("irrelevant", const._TYPE_SRV, const._CLASS_IN, const._DNS_HOST_TTL, "old") update = RecordUpdate(new, old) assert update[0] == new assert update[1] == old diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index cf4b4e8e..7b086fbc 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -115,9 +115,7 @@ def test_cumulative_timeouts_less_than_close_plus_buffer(): raised if something goes wrong. """ assert ( - aioutils._TASK_AWAIT_TIMEOUT - + aioutils._GET_ALL_TASKS_TIMEOUT - + aioutils._WAIT_FOR_LOOP_TASKS_TIMEOUT + aioutils._TASK_AWAIT_TIMEOUT + aioutils._GET_ALL_TASKS_TIMEOUT + aioutils._WAIT_FOR_LOOP_TASKS_TIMEOUT ) < 1 + _CLOSE_TIMEOUT + _LOADED_SYSTEM_TIMEOUT @@ -136,9 +134,7 @@ async def _saved_sleep_task(): def _run_in_loop(): aioutils.run_coro_with_timeout(_saved_sleep_task(), loop, 0.1) - with pytest.raises(EventLoopBlocked), patch.object( - aioutils, "_LOADED_SYSTEM_TIMEOUT", 0.0 - ): + with pytest.raises(EventLoopBlocked), patch.object(aioutils, "_LOADED_SYSTEM_TIMEOUT", 0.0): await loop.run_in_executor(None, _run_in_loop) assert task is not None diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 4066eba4..35803c7e 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -16,11 +16,7 @@ def test_cached_ip_addresses_wrapper(): assert ipaddress.cached_ip_addresses("") is None assert ipaddress.cached_ip_addresses("foo") is None assert ( - str( - ipaddress.cached_ip_addresses( - b"&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F" - ) - ) + str(ipaddress.cached_ip_addresses(b"&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F")) == "2606:2800:220:1:248:1893:25c8:1946" ) assert ipaddress.cached_ip_addresses("::1") == ipaddress.IPv6Address("::1") @@ -75,9 +71,7 @@ def test_get_ip_address_object_from_record(): scope_id=3, ) assert record.scope_id == 3 - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( - "fe80::1%3" - ) + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address("fe80::1%3") record = DNSAddress( "domain.local", const._TYPE_AAAA, @@ -86,9 +80,7 @@ def test_get_ip_address_object_from_record(): packed, ) assert record.scope_id is None - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( - "fe80::1" - ) + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address("fe80::1") record = DNSAddress( "domain.local", const._TYPE_A, @@ -99,6 +91,4 @@ def test_get_ip_address_object_from_record(): ) assert record.scope_id == 0 # Ensure scope_id of 0 is not appended to the address - assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address( - "fe80::1" - ) + assert ipaddress.get_ip_address_object_from_record(record) == ipaddress.IPv6Address("fe80::1") diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index d4c57c40..c814e094 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -25,9 +25,7 @@ def test_service_type_name_overlong_full_name(): with pytest.raises(BadTypeInNameException): nameutils.service_type_name(f"{long_name}._tivo-videostream._tcp.local.") with pytest.raises(BadTypeInNameException): - nameutils.service_type_name( - f"{long_name}._tivo-videostream._tcp.local.", strict=False - ) + nameutils.service_type_name(f"{long_name}._tivo-videostream._tcp.local.", strict=False) @pytest.mark.parametrize( @@ -69,17 +67,13 @@ def test_possible_types(): assert nameutils.possible_types(".") == set() assert nameutils.possible_types("local.") == set() assert nameutils.possible_types("_tcp.local.") == set() - assert nameutils.possible_types("_test-srvc-type._tcp.local.") == { - "_test-srvc-type._tcp.local." - } + assert nameutils.possible_types("_test-srvc-type._tcp.local.") == {"_test-srvc-type._tcp.local."} assert nameutils.possible_types("_any._tcp.local.") == {"_any._tcp.local."} assert nameutils.possible_types(".._x._tcp.local.") == {"_x._tcp.local."} assert nameutils.possible_types("x.y._http._tcp.local.") == {"_http._tcp.local."} assert nameutils.possible_types("1.2.3._mqtt._tcp.local.") == {"_mqtt._tcp.local."} assert nameutils.possible_types("x.sub._http._tcp.local.") == {"_http._tcp.local."} - assert nameutils.possible_types( - "6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local." - ) == { + assert nameutils.possible_types("6d86f882b90facee9170ad3439d72a4d6ee9f511._zget._http._tcp.local.") == { "_http._tcp.local.", "_zget._http._tcp.local.", } diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 5a229b0d..a89ea565 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -71,18 +71,14 @@ def test_ip6_addresses_to_indexes(): "zeroconf._utils.net.ifaddr.get_adapters", return_value=_generate_mock_adapters(), ): - assert netutils.ip6_addresses_to_indexes(interfaces) == [ - (("2001:db8::", 1, 1), 1) - ] + assert netutils.ip6_addresses_to_indexes(interfaces) == [(("2001:db8::", 1, 1), 1)] interfaces_2 = ["2001:db8::"] with patch( "zeroconf._utils.net.ifaddr.get_adapters", return_value=_generate_mock_adapters(), ): - assert netutils.ip6_addresses_to_indexes(interfaces_2) == [ - (("2001:db8::", 1, 1), 1) - ] + assert netutils.ip6_addresses_to_indexes(interfaces_2) == [(("2001:db8::", 1, 1), 1)] def test_normalize_interface_choice_errors(): @@ -108,9 +104,7 @@ def test_normalize_interface_choice_errors(): def test_add_multicast_member_socket_errors(errno, expected_result): """Test we handle socket errors when adding multicast members.""" if errno: - setsockopt_mock = unittest.mock.Mock( - side_effect=OSError(errno, f"Error: {errno}") - ) + setsockopt_mock = unittest.mock.Mock(side_effect=OSError(errno, f"Error: {errno}")) else: setsockopt_mock = unittest.mock.Mock() fileno_mock = unittest.mock.PropertyMock(return_value=10) @@ -146,18 +140,14 @@ def _log_error(*args): ) -@pytest.mark.skipif( - not hasattr(socket, "SO_REUSEPORT"), reason="System does not have SO_REUSEPORT" -) +@pytest.mark.skipif(not hasattr(socket, "SO_REUSEPORT"), reason="System does not have SO_REUSEPORT") def test_set_so_reuseport_if_available_is_present(): """Test that setting socket.SO_REUSEPORT only OSError errno.ENOPROTOOPT is trapped.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError): netutils.set_so_reuseport_if_available(sock) - with patch( - "socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None) - ): + with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): netutils.set_so_reuseport_if_available(sock) @@ -170,30 +160,22 @@ def test_set_so_reuseport_if_available_not_present(): def test_set_mdns_port_socket_options_for_ip_version(): - """Test OSError with errno with EINVAL and bind address '' from setsockopt IP_MULTICAST_TTL does not raise.""" + """Test OSError with errno with EINVAL and bind address ''. + + from setsockopt IP_MULTICAST_TTL does not raise.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Should raise on EPERM always - with pytest.raises(OSError), patch( - "socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None) - ): - netutils.set_mdns_port_socket_options_for_ip_version( - sock, ("",), r.IPVersion.V4Only - ) + with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) # Should raise on EINVAL always when bind address is not '' - with pytest.raises(OSError), patch( - "socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None) - ): - netutils.set_mdns_port_socket_options_for_ip_version( - sock, ("127.0.0.1",), r.IPVersion.V4Only - ) + with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("127.0.0.1",), r.IPVersion.V4Only) # Should not raise on EINVAL when bind address is '' with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version( - sock, ("",), r.IPVersion.V4Only - ) + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) def test_add_multicast_member(): @@ -201,9 +183,7 @@ def test_add_multicast_member(): interface = "127.0.0.1" # EPERM should always raise - with pytest.raises(OSError), patch( - "socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None) - ): + with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): netutils.add_multicast_member(sock, interface) # EADDRINUSE should return False @@ -211,9 +191,7 @@ def test_add_multicast_member(): assert netutils.add_multicast_member(sock, interface) is False # EADDRNOTAVAIL should return False - with patch( - "socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None) - ): + with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None)): assert netutils.add_multicast_member(sock, interface) is False # EINVAL should return False @@ -221,16 +199,12 @@ def test_add_multicast_member(): assert netutils.add_multicast_member(sock, interface) is False # ENOPROTOOPT should return False - with patch( - "socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None) - ): + with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): assert netutils.add_multicast_member(sock, interface) is False # ENODEV should raise for ipv4 - with pytest.raises(OSError), patch( - "socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None) - ): - netutils.add_multicast_member(sock, interface) is False + with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): + assert netutils.add_multicast_member(sock, interface) is False # ENODEV should return False for ipv6 with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): From 2ca71027fd8d3a92f44874e0945029e206d986e1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 15:39:53 -1000 Subject: [PATCH 241/434] chore: bump cython to 3.0.11 (#1402) --- poetry.lock | 128 ++++++++++++++++++++++++++++------------------------ 1 file changed, 68 insertions(+), 60 deletions(-) diff --git a/poetry.lock b/poetry.lock index af80c226..a79e019b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -111,69 +111,77 @@ toml = ["tomli"] [[package]] name = "cython" -version = "3.0.8" +version = "3.0.11" description = "The Cython compiler for writing C extensions in the Python language." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Cython-3.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a846e0a38e2b24e9a5c5dc74b0e54c6e29420d88d1dafabc99e0fc0f3e338636"}, - {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45523fdc2b78d79b32834cc1cc12dc2ca8967af87e22a3ee1bff20e77c7f5520"}, - {file = "Cython-3.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa0b7f3f841fe087410cab66778e2d3fb20ae2d2078a2be3dffe66c6574be39"}, - {file = "Cython-3.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e87294e33e40c289c77a135f491cd721bd089f193f956f7b8ed5aa2d0b8c558f"}, - {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a1df7a129344b1215c20096d33c00193437df1a8fcca25b71f17c23b1a44f782"}, - {file = "Cython-3.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:13c2a5e57a0358da467d97667297bf820b62a1a87ae47c5f87938b9bb593acbd"}, - {file = "Cython-3.0.8-cp310-cp310-win32.whl", hash = "sha256:96b028f044f5880e3cb18ecdcfc6c8d3ce9d0af28418d5ab464509f26d8adf12"}, - {file = "Cython-3.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:8140597a8b5cc4f119a1190f5a2228a84f5ca6d8d9ec386cfce24663f48b2539"}, - {file = "Cython-3.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aae26f9663e50caf9657148403d9874eea41770ecdd6caf381d177c2b1bb82ba"}, - {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:547eb3cdb2f8c6f48e6865d5a741d9dd051c25b3ce076fbca571727977b28ac3"}, - {file = "Cython-3.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a567d4b9ba70b26db89d75b243529de9e649a2f56384287533cf91512705bee"}, - {file = "Cython-3.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51d1426263b0e82fb22bda8ea60dc77a428581cc19e97741011b938445d383f1"}, - {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c26daaeccda072459b48d211415fd1e5507c06bcd976fa0d5b8b9f1063467d7b"}, - {file = "Cython-3.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:289ce7838208211cd166e975865fd73b0649bf118170b6cebaedfbdaf4a37795"}, - {file = "Cython-3.0.8-cp311-cp311-win32.whl", hash = "sha256:c8aa05f5e17f8042a3be052c24f2edc013fb8af874b0bf76907d16c51b4e7871"}, - {file = "Cython-3.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:000dc9e135d0eec6ecb2b40a5b02d0868a2f8d2e027a41b0fe16a908a9e6de02"}, - {file = "Cython-3.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:90d3fe31db55685d8cb97d43b0ec39ef614fcf660f83c77ed06aa670cb0e164f"}, - {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e24791ddae2324e88e3c902a765595c738f19ae34ee66bfb1a6dac54b1833419"}, - {file = "Cython-3.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f020fa1c0552052e0660790b8153b79e3fc9a15dbd8f1d0b841fe5d204a6ae6"}, - {file = "Cython-3.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18bfa387d7a7f77d7b2526af69a65dbd0b731b8d941aaff5becff8e21f6d7717"}, - {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fe81b339cffd87c0069c6049b4d33e28bdd1874625ee515785bf42c9fdff3658"}, - {file = "Cython-3.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80fd94c076e1e1b1ee40a309be03080b75f413e8997cddcf401a118879863388"}, - {file = "Cython-3.0.8-cp312-cp312-win32.whl", hash = "sha256:85077915a93e359a9b920280d214dc0cf8a62773e1f3d7d30fab8ea4daed670c"}, - {file = "Cython-3.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:0cb2dcc565c7851f75d496f724a384a790fab12d1b82461b663e66605bec429a"}, - {file = "Cython-3.0.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:870d2a0a7e3cbd5efa65aecdb38d715ea337a904ea7bb22324036e78fb7068e7"}, - {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e8f2454128974905258d86534f4fd4f91d2f1343605657ecab779d80c9d6d5e"}, - {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1949d6aa7bc792554bee2b67a9fe41008acbfe22f4f8df7b6ec7b799613a4b3"}, - {file = "Cython-3.0.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9f2c6e1b8f3bcd6cb230bac1843f85114780bb8be8614855b1628b36bb510e0"}, - {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:05d7eddc668ae7993643f32c7661f25544e791edb745758672ea5b1a82ecffa6"}, - {file = "Cython-3.0.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bfabe115deef4ada5d23c87bddb11289123336dcc14347011832c07db616dd93"}, - {file = "Cython-3.0.8-cp36-cp36m-win32.whl", hash = "sha256:0c38c9f0bcce2df0c3347285863621be904ac6b64c5792d871130569d893efd7"}, - {file = "Cython-3.0.8-cp36-cp36m-win_amd64.whl", hash = "sha256:6c46939c3983217d140999de7c238c3141f56b1ea349e47ca49cae899969aa2c"}, - {file = "Cython-3.0.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:115f0a50f752da6c99941b103b5cb090da63eb206abbc7c2ad33856ffc73f064"}, - {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9c0f29246734561c90f36e70ed0506b61aa3d044e4cc4cba559065a2a741fae"}, - {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ab75242869ff71e5665fe5c96f3378e79e792fa3c11762641b6c5afbbbbe026"}, - {file = "Cython-3.0.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6717c06e9cfc6c1df18543cd31a21f5d8e378a40f70c851fa2d34f0597037abc"}, - {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9d3f74388db378a3c6fd06e79a809ed98df3f56484d317b81ee762dbf3c263e0"}, - {file = "Cython-3.0.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae7ac561fd8253a9ae96311e91d12af5f701383564edc11d6338a7b60b285a6f"}, - {file = "Cython-3.0.8-cp37-cp37m-win32.whl", hash = "sha256:97b2a45845b993304f1799664fa88da676ee19442b15fdcaa31f9da7e1acc434"}, - {file = "Cython-3.0.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9e2be2b340fea46fb849d378f9b80d3c08ff2e81e2bfbcdb656e2e3cd8c6b2dc"}, - {file = "Cython-3.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2cde23c555470db3f149ede78b518e8274853745289c956a0e06ad8d982e4db9"}, - {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7990ca127e1f1beedaf8fc8bf66541d066ef4723ad7d8d47a7cbf842e0f47580"}, - {file = "Cython-3.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b983c8e6803f016146c26854d9150ddad5662960c804ea7f0c752c9266752f0"}, - {file = "Cython-3.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a973268d7ca1a2bdf78575e459a94a78e1a0a9bb62a7db0c50041949a73b02ff"}, - {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:61a237bc9dd23c7faef0fcfce88c11c65d0c9bb73c74ccfa408b3a012073c20e"}, - {file = "Cython-3.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a3d67f079598af49e90ff9655bf85bd358f093d727eb21ca2708f467c489cae"}, - {file = "Cython-3.0.8-cp38-cp38-win32.whl", hash = "sha256:17a642bb01a693e34c914106566f59844b4461665066613913463a719e0dd15d"}, - {file = "Cython-3.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:2cdfc32252f3b6dc7c94032ab744dcedb45286733443c294d8f909a4854e7f83"}, - {file = "Cython-3.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa97893d99385386925d00074654aeae3a98867f298d1e12ceaf38a9054a9bae"}, - {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05c0bf9d085c031df8f583f0d506aa3be1692023de18c45d0aaf78685bbb944"}, - {file = "Cython-3.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de892422582f5758bd8de187e98ac829330ec1007bc42c661f687792999988a7"}, - {file = "Cython-3.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:314f2355a1f1d06e3c431eaad4708cf10037b5e91e4b231d89c913989d0bdafd"}, - {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:78825a3774211e7d5089730f00cdf7f473042acc9ceb8b9eeebe13ed3a5541de"}, - {file = "Cython-3.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:df8093deabc55f37028190cf5e575c26aad23fc673f34b85d5f45076bc37ce39"}, - {file = "Cython-3.0.8-cp39-cp39-win32.whl", hash = "sha256:1aca1b97e0095b3a9a6c33eada3f661a4ed0d499067d121239b193e5ba3bb4f0"}, - {file = "Cython-3.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:16873d78be63bd38ffb759da7ab82814b36f56c769ee02b1d5859560e4c3ac3c"}, - {file = "Cython-3.0.8-py2.py3-none-any.whl", hash = "sha256:171b27051253d3f9108e9759e504ba59ff06e7f7ba944457f94deaf9c21bf0b6"}, - {file = "Cython-3.0.8.tar.gz", hash = "sha256:8333423d8fd5765e7cceea3a9985dd1e0a5dfeb2734629e1a2ed2d6233d39de6"}, + {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, + {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, + {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, + {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, + {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, + {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, + {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, + {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, + {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, + {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, + {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, + {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, + {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, + {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, + {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, + {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, + {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, + {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, + {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, + {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, + {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, + {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, + {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, + {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, + {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, + {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, ] [[package]] From cf1ea819f50a084596180a2ac1491b14b328525a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:43:49 -1000 Subject: [PATCH 242/434] chore(deps-dev): bump setuptools from 65.7.0 to 70.0.0 in the pip group (#1395) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 15 +++++++-------- pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index a79e019b..86a21c7b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,19 +320,18 @@ pytest = ">=5.0.0" [[package]] name = "setuptools" -version = "65.7.0" +version = "70.3.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-65.7.0-py3-none-any.whl", hash = "sha256:8ab4f1dbf2b4a65f7eec5ad0c620e84c34111a68d3349833494b9088212214dd"}, - {file = "setuptools-65.7.0.tar.gz", hash = "sha256:4d3c92fac8f1118bb77a22181355e29c239cabfe2b9effdaa665c66b711136d7"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "tomli" @@ -348,4 +347,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "259e5ec479b559f3c02fdb7224f17b4979b66419c1f82b273d837ecd75b743ac" +content-hash = "71e11c707ebc1753e9e0f618e950bbc5b418c730eadd0a4236f1caf2b2e07d98" diff --git a/pyproject.toml b/pyproject.toml index bb53a1d3..f7c2dd21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ pytest = ">=7.2,<9.0" pytest-cov = "^4.0.0" pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" -setuptools = "^65.6.3" +setuptools = ">=65.6.3,<71.0.0" pytest-timeout = "^2.1.0" [tool.ruff] From b7c45e28ec2a6aa9e9fdd8a1954ea538776d494c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:49:42 -1000 Subject: [PATCH 243/434] chore(deps-dev): bump setuptools from 65.7.0 to 73.0.1 (#1398) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 13 +++++++------ pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 86a21c7b..8d4f3b62 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,18 +320,19 @@ pytest = ">=5.0.0" [[package]] name = "setuptools" -version = "70.3.0" +version = "73.0.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, - {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, + {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, + {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, ] [package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] [[package]] name = "tomli" @@ -347,4 +348,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "71e11c707ebc1753e9e0f618e950bbc5b418c730eadd0a4236f1caf2b2e07d98" +content-hash = "28cb517c0e51804b062b4993a153f4f3428287de8a5d727677559432e3efd9a4" diff --git a/pyproject.toml b/pyproject.toml index f7c2dd21..33b499ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ pytest = ">=7.2,<9.0" pytest-cov = "^4.0.0" pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<71.0.0" +setuptools = ">=65.6.3,<74.0.0" pytest-timeout = "^2.1.0" [tool.ruff] From f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 15:50:02 -1000 Subject: [PATCH 244/434] feat: improve performance of ip address caching (#1392) --- src/zeroconf/_utils/ipaddress.py | 35 ++++++++++++++++++++++++++++++-- tests/utils/test_ipaddress.py | 12 ++++++++++- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index 6b4657be..3346e6d7 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -28,13 +28,24 @@ from .._dns import DNSAddress from ..const import _TYPE_AAAA +if sys.version_info >= (3, 9, 0): + from functools import cache +else: + cache = lru_cache(maxsize=None) + bytes_ = bytes int_ = int IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) class ZeroconfIPv4Address(IPv4Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified") + __slots__ = ( + "_str", + "_is_link_local", + "_is_unspecified", + "_is_loopback", + "__hash__", + ) def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv4 address.""" @@ -42,6 +53,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._str = super().__str__() self._is_link_local = super().is_link_local self._is_unspecified = super().is_unspecified + self._is_loopback = super().is_loopback + self.__hash__ = cache(lambda: IPv4Address.__hash__(self)) # type: ignore[method-assign] def __str__(self) -> str: """Return the string representation of the IPv4 address.""" @@ -57,9 +70,20 @@ def is_unspecified(self) -> bool: """Return True if this is an unspecified address.""" return self._is_unspecified + @property + def is_loopback(self) -> bool: + """Return True if this is a loop back.""" + return self._is_loopback + class ZeroconfIPv6Address(IPv6Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified") + __slots__ = ( + "_str", + "_is_link_local", + "_is_unspecified", + "_is_loopback", + "__hash__", + ) def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv6 address.""" @@ -67,6 +91,8 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._str = super().__str__() self._is_link_local = super().is_link_local self._is_unspecified = super().is_unspecified + self._is_loopback = super().is_loopback + self.__hash__ = cache(lambda: IPv6Address.__hash__(self)) # type: ignore[method-assign] def __str__(self) -> str: """Return the string representation of the IPv6 address.""" @@ -82,6 +108,11 @@ def is_unspecified(self) -> bool: """Return True if this is an unspecified address.""" return self._is_unspecified + @property + def is_loopback(self) -> bool: + """Return True if this is a loop back.""" + return self._is_loopback + @lru_cache(maxsize=512) def _cached_ip_addresses( diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 35803c7e..ddade486 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -19,7 +19,17 @@ def test_cached_ip_addresses_wrapper(): str(ipaddress.cached_ip_addresses(b"&\x06(\x00\x02 \x00\x01\x02H\x18\x93%\xc8\x19F")) == "2606:2800:220:1:248:1893:25c8:1946" ) - assert ipaddress.cached_ip_addresses("::1") == ipaddress.IPv6Address("::1") + loop_back_ipv6 = ipaddress.cached_ip_addresses("::1") + assert loop_back_ipv6 == ipaddress.IPv6Address("::1") + assert loop_back_ipv6.is_loopback is True + + assert hash(loop_back_ipv6) == hash(ipaddress.IPv6Address("::1")) + + loop_back_ipv4 = ipaddress.cached_ip_addresses("127.0.0.1") + assert loop_back_ipv4 == ipaddress.IPv4Address("127.0.0.1") + assert loop_back_ipv4.is_loopback is True + + assert hash(loop_back_ipv4) == hash(ipaddress.IPv4Address("127.0.0.1")) ipv4 = ipaddress.cached_ip_addresses("169.254.0.0") assert ipv4 is not None From 2ee954de379bc5b5beeb5891b8c937573ea5441b Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 27 Aug 2024 02:02:42 +0000 Subject: [PATCH 245/434] 0.133.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a2026cba..ff1c5239 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,15 @@ +## v0.133.0 (2024-08-27) + +### Feature + +* Improve performance of ip address caching ([#1392](https://github.com/python-zeroconf/python-zeroconf/issues/1392)) ([`f7c7708`](https://github.com/python-zeroconf/python-zeroconf/commit/f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2)) +* Enable building of arm64 macOS builds ([#1384](https://github.com/python-zeroconf/python-zeroconf/issues/1384)) ([`0df2ce0`](https://github.com/python-zeroconf/python-zeroconf/commit/0df2ce0e6f7313831da6a63d477019982d5df55c)) +* Add classifier for python 3.13 ([#1393](https://github.com/python-zeroconf/python-zeroconf/issues/1393)) ([`7fb2bb2`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb2bb21421c70db0eb288fa7e73d955f58b0f5d)) +* Python 3.13 support ([#1390](https://github.com/python-zeroconf/python-zeroconf/issues/1390)) ([`98cfa83`](https://github.com/python-zeroconf/python-zeroconf/commit/98cfa83710e43880698353821bae61108b08cb2f)) + ## v0.132.2 (2024-04-13) ### Fix diff --git a/pyproject.toml b/pyproject.toml index 33b499ee..180d6236 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.132.2" +version = "0.133.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index f3130307..e058d06f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.132.2" +__version__ = "0.133.0" __license__ = "LGPL" From 89e90782f02ef2bde8738789e92160a6379457a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:16:00 -0500 Subject: [PATCH 246/434] chore(deps-dev): bump setuptools from 73.0.1 to 74.0.0 (#1403) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 14 +++++++++----- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8d4f3b62..189fcfb4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,19 +320,23 @@ pytest = ">=5.0.0" [[package]] name = "setuptools" -version = "73.0.1" +version = "74.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, + {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "tomli" @@ -348,4 +352,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "28cb517c0e51804b062b4993a153f4f3428287de8a5d727677559432e3efd9a4" +content-hash = "7cd88ed2bd45ce5dfdc9169de986d8f851d666c5f1b36c9a605dcb4efc5a6bc9" diff --git a/pyproject.toml b/pyproject.toml index 180d6236..4c334329 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ pytest = ">=7.2,<9.0" pytest-cov = "^4.0.0" pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<74.0.0" +setuptools = ">=65.6.3,<75.0.0" pytest-timeout = "^2.1.0" [tool.ruff] From dd8ce11c28a3feda89a01e70f3488d8360cb4b3d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:32:14 -0500 Subject: [PATCH 247/434] chore(pre-commit.ci): pre-commit autoupdate (#1406) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1e37e5a0..7c916f81 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.2 + rev: v0.6.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 30f85fdc2eed9e42b987635f95f5b025ec3bd764 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 7 Sep 2024 21:39:30 -0500 Subject: [PATCH 248/434] chore(deps-dev): bump pytest-cov from 4.1.0 to 5.0.0 (#1405) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 189fcfb4..b891449c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -288,13 +288,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -302,7 +302,7 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-timeout" @@ -352,4 +352,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "7cd88ed2bd45ce5dfdc9169de986d8f851d666c5f1b36c9a605dcb4efc5a6bc9" +content-hash = "501cb081442d418e6462854507575a73105dff190b3911f41837f2cb68dd6834" diff --git a/pyproject.toml b/pyproject.toml index 4c334329..2a284912 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" -pytest-cov = "^4.0.0" +pytest-cov = ">=4,<6" pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" setuptools = ">=65.6.3,<75.0.0" From 111c91ab395a7520e477eb0e75d5924fba3c64c7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Sep 2024 21:40:03 -0500 Subject: [PATCH 249/434] feat: improve performance when IP addresses change frequently (#1407) --- src/zeroconf/_services/info.py | 39 ++++++++++++++++++++------------ src/zeroconf/_utils/ipaddress.py | 28 ++++++++--------------- tests/services/test_info.py | 5 ++++ 3 files changed, 39 insertions(+), 33 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 2fc9dfc8..fef43fa0 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -23,7 +23,6 @@ import asyncio import random import sys -from ipaddress import IPv4Address, IPv6Address, _BaseAddress from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast from .._cache import DNSCache @@ -50,6 +49,8 @@ wait_for_future_set_or_timeout, ) from .._utils.ipaddress import ( + ZeroconfIPv4Address, + ZeroconfIPv6Address, cached_ip_addresses, get_ip_address_object_from_record, ip_bytes_and_scope_to_address, @@ -187,8 +188,8 @@ def __init__( self.type = type_ self._name = name self.key = name.lower() - self._ipv4_addresses: List[IPv4Address] = [] - self._ipv6_addresses: List[IPv6Address] = [] + self._ipv4_addresses: List[ZeroconfIPv4Address] = [] + self._ipv6_addresses: List[ZeroconfIPv6Address] = [] if addresses is not None: self.addresses = addresses elif parsed_addresses is not None: @@ -260,11 +261,11 @@ def addresses(self, value: List[bytes]) -> None: ) if addr.version == 4: if TYPE_CHECKING: - assert isinstance(addr, IPv4Address) + assert isinstance(addr, ZeroconfIPv4Address) self._ipv4_addresses.append(addr) else: if TYPE_CHECKING: - assert isinstance(addr, IPv6Address) + assert isinstance(addr, ZeroconfIPv6Address) self._ipv6_addresses.append(addr) @property @@ -321,7 +322,7 @@ def addresses_by_version(self, version: IPVersion) -> List[bytes]: def ip_addresses_by_version( self, version: IPVersion - ) -> Union[List[IPv4Address], List[IPv6Address], List[_BaseAddress]]: + ) -> Union[List[ZeroconfIPv4Address], List[ZeroconfIPv6Address]]: """List ip_address objects matching IP version. Addresses are guaranteed to be returned in LIFO (last in, first out) @@ -334,7 +335,7 @@ def ip_addresses_by_version( def _ip_addresses_by_version_value( self, version_value: int_ - ) -> Union[List[IPv4Address], List[IPv6Address]]: + ) -> Union[List[ZeroconfIPv4Address], List[ZeroconfIPv6Address]]: """Backend for addresses_by_version that uses the raw value.""" if version_value == _IPVersion_All_value: return [*self._ipv4_addresses, *self._ipv6_addresses] # type: ignore[return-value] @@ -440,9 +441,9 @@ def get_name(self) -> str: def _get_ip_addresses_from_cache_lifo( self, zc: "Zeroconf", now: float_, type: int_ - ) -> List[Union[IPv4Address, IPv6Address]]: + ) -> List[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: """Set IPv6 addresses from the cache.""" - address_list: List[Union[IPv4Address, IPv6Address]] = [] + address_list: List[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]] = [] for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue @@ -456,7 +457,7 @@ def _set_ipv6_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: """Set IPv6 addresses from the cache.""" if TYPE_CHECKING: self._ipv6_addresses = cast( - "List[IPv6Address]", + "List[ZeroconfIPv6Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA), ) else: @@ -466,7 +467,7 @@ def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: """Set IPv4 addresses from the cache.""" if TYPE_CHECKING: self._ipv4_addresses = cast( - "List[IPv4Address]", + "List[ZeroconfIPv4Address]", self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A), ) else: @@ -509,24 +510,32 @@ def _process_record_threadsafe(self, zc: "Zeroconf", record: DNSRecord, now: flo if ip_addr.version == 4: if TYPE_CHECKING: - assert isinstance(ip_addr, IPv4Address) + assert isinstance(ip_addr, ZeroconfIPv4Address) ipv4_addresses = self._ipv4_addresses if ip_addr not in ipv4_addresses: ipv4_addresses.insert(0, ip_addr) return True - elif ip_addr != ipv4_addresses[0]: + # Use int() to compare the addresses as integers + # since by default IPv4Address.__eq__ compares the + # the addresses on version and int which more than + # we need here since we know the version is 4. + elif ip_addr.zc_integer != ipv4_addresses[0].zc_integer: ipv4_addresses.remove(ip_addr) ipv4_addresses.insert(0, ip_addr) return False if TYPE_CHECKING: - assert isinstance(ip_addr, IPv6Address) + assert isinstance(ip_addr, ZeroconfIPv6Address) ipv6_addresses = self._ipv6_addresses if ip_addr not in self._ipv6_addresses: ipv6_addresses.insert(0, ip_addr) return True - elif ip_addr != self._ipv6_addresses[0]: + # Use int() to compare the addresses as integers + # since by default IPv6Address.__eq__ compares the + # the addresses on version and int which more than + # we need here since we know the version is 6. + elif ip_addr.zc_integer != self._ipv6_addresses[0].zc_integer: ipv6_addresses.remove(ip_addr) ipv6_addresses.insert(0, ip_addr) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index 3346e6d7..72bb9ce8 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -39,13 +39,7 @@ class ZeroconfIPv4Address(IPv4Address): - __slots__ = ( - "_str", - "_is_link_local", - "_is_unspecified", - "_is_loopback", - "__hash__", - ) + __slots__ = ("_str", "_is_link_local", "_is_unspecified", "_is_loopback", "__hash__", "zc_integer") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv4 address.""" @@ -55,6 +49,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._is_unspecified = super().is_unspecified self._is_loopback = super().is_loopback self.__hash__ = cache(lambda: IPv4Address.__hash__(self)) # type: ignore[method-assign] + self.zc_integer = int(self) def __str__(self) -> str: """Return the string representation of the IPv4 address.""" @@ -77,13 +72,7 @@ def is_loopback(self) -> bool: class ZeroconfIPv6Address(IPv6Address): - __slots__ = ( - "_str", - "_is_link_local", - "_is_unspecified", - "_is_loopback", - "__hash__", - ) + __slots__ = ("_str", "_is_link_local", "_is_unspecified", "_is_loopback", "__hash__", "zc_integer") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv6 address.""" @@ -93,6 +82,7 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: self._is_unspecified = super().is_unspecified self._is_loopback = super().is_loopback self.__hash__ = cache(lambda: IPv6Address.__hash__(self)) # type: ignore[method-assign] + self.zc_integer = int(self) def __str__(self) -> str: """Return the string representation of the IPv6 address.""" @@ -117,7 +107,7 @@ def is_loopback(self) -> bool: @lru_cache(maxsize=512) def _cached_ip_addresses( address: Union[str, bytes, int], -) -> Optional[Union[IPv4Address, IPv6Address]]: +) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: """Cache IP addresses.""" try: return ZeroconfIPv4Address(address) @@ -136,14 +126,16 @@ def _cached_ip_addresses( def get_ip_address_object_from_record( record: DNSAddress, -) -> Optional[Union[IPv4Address, IPv6Address]]: +) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: """Get the IP address object from the record.""" if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id: return ip_bytes_and_scope_to_address(record.address, record.scope_id) return cached_ip_addresses_wrapper(record.address) -def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Union[IPv4Address, IPv6Address]]: +def ip_bytes_and_scope_to_address( + address: bytes_, scope: int_ +) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: """Convert the bytes and scope to an IP address object.""" base_address = cached_ip_addresses_wrapper(address) if base_address is not None and base_address.is_link_local: @@ -152,7 +144,7 @@ def ip_bytes_and_scope_to_address(address: bytes_, scope: int_) -> Optional[Unio return base_address -def str_without_scope_id(addr: Union[IPv4Address, IPv6Address]) -> str: +def str_without_scope_id(addr: Union[ZeroconfIPv4Address, ZeroconfIPv6Address]) -> str: """Return the string representation of the address without the scope id.""" if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: address_str = str(addr) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 4a9b1ee2..9d4a4958 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1469,6 +1469,10 @@ async def test_ipv6_changes_are_seen(): assert info.addresses_by_version(IPVersion.V6Only) == [ b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" ] + info.load_from_cache(aiozc.zeroconf) + assert info.addresses_by_version(IPVersion.V6Only) == [ + b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + ] generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) generated.add_answer_at_time( @@ -1494,6 +1498,7 @@ async def test_ipv6_changes_are_seen(): b"\x00\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", b"\xde\xad\xbe\xef\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", ] + await aiozc.async_close() From 9262626895d354ed7376aa567043b793c37a985e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Sep 2024 21:43:20 -0500 Subject: [PATCH 250/434] fix: improve helpfulness of ServiceInfo.request assertions (#1408) --- src/zeroconf/_services/info.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index fef43fa0..d18c8402 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -791,7 +791,8 @@ def request( :param addr: address to send the request to :param port: port to send the request to """ - assert zc.loop is not None and zc.loop.is_running() + assert zc.loop is not None, "Zeroconf instance must have a loop, was it not started?" + assert zc.loop.is_running(), "Zeroconf instance loop must be running, was it already stopped?" if zc.loop == get_running_loop(): raise RuntimeError("Use AsyncServiceInfo.async_request from the event loop") return bool( From e3bf880d73c745119171b1d13cb4761c8dbd2dbf Mon Sep 17 00:00:00 2001 From: github-actions Date: Sun, 8 Sep 2024 03:04:36 +0000 Subject: [PATCH 251/434] 0.134.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 10 ++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ff1c5239..910acfa6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ +## v0.134.0 (2024-09-08) + +### Feature + +* Improve performance when IP addresses change frequently ([#1407](https://github.com/python-zeroconf/python-zeroconf/issues/1407)) ([`111c91a`](https://github.com/python-zeroconf/python-zeroconf/commit/111c91ab395a7520e477eb0e75d5924fba3c64c7)) + +### Fix + +* Improve helpfulness of ServiceInfo.request assertions ([#1408](https://github.com/python-zeroconf/python-zeroconf/issues/1408)) ([`9262626`](https://github.com/python-zeroconf/python-zeroconf/commit/9262626895d354ed7376aa567043b793c37a985e)) + ## v0.133.0 (2024-08-27) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 2a284912..19cebdc6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.133.0" +version = "0.134.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index e058d06f..8ffaf160 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.133.0" +__version__ = "0.134.0" __license__ = "LGPL" From a3172f87b18a4034f9e69325e06a9061b443e4f4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:08:33 -0500 Subject: [PATCH 252/434] chore(pre-commit.ci): pre-commit autoupdate (#1410) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7c916f81..74b04776 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.3 + rev: v0.6.7 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 7bb2cbba19d3b5d21c65ddd2e3f72f6013cf97bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:08:46 -0500 Subject: [PATCH 253/434] chore(deps-dev): bump pytest-timeout from 2.2.0 to 2.3.1 (#1404) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index b891449c..feddb314 100644 --- a/poetry.lock +++ b/poetry.lock @@ -306,17 +306,17 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-timeout" -version = "2.2.0" +version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, - {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, ] [package.dependencies] -pytest = ">=5.0.0" +pytest = ">=7.0.0" [[package]] name = "setuptools" From 7d6c277df95fc3703f92aa36680c4f2d3474fbcf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:09:00 -0500 Subject: [PATCH 254/434] chore(deps-dev): bump pytest from 8.3.2 to 8.3.3 (#1412) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index feddb314..b7e5f4c5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -248,13 +248,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] From 1827474ca4c39b9ecbdafce69c3f3ee3a79338f6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 13:49:02 -0500 Subject: [PATCH 255/434] chore(deps-dev): bump setuptools from 74.0.0 to 75.1.0 (#1414) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index b7e5f4c5..7989b098 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,18 +320,18 @@ pytest = ">=7.0.0" [[package]] name = "setuptools" -version = "74.0.0" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-74.0.0-py3-none-any.whl", hash = "sha256:0274581a0037b638b9fc1c6883cc71c0210865aaa76073f7882376b641b84e8f"}, - {file = "setuptools-74.0.0.tar.gz", hash = "sha256:a85e96b8be2b906f3e3e789adec6a9323abf79758ecfa3065bd740d81158b11e"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] @@ -352,4 +352,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "501cb081442d418e6462854507575a73105dff190b3911f41837f2cb68dd6834" +content-hash = "778ccbd9b059daea1ccbc3a93e0186fa30737e8c5234cdc04edf505a1f71606a" diff --git a/pyproject.toml b/pyproject.toml index 19cebdc6..57099ca2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,7 +60,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<6" pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<75.0.0" +setuptools = ">=65.6.3,<76.0.0" pytest-timeout = "^2.1.0" [tool.ruff] From 1df2e691ff11c9592e1cdad5599fb6601eb1aa3f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 24 Sep 2024 13:49:26 -0500 Subject: [PATCH 256/434] feat: improve performance of DNSCache backend (#1415) --- src/zeroconf/_cache.pxd | 23 +++++++++++++++++++---- src/zeroconf/_cache.py | 11 +++++++---- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index af27a1d5..d4417466 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -13,9 +13,12 @@ from ._dns cimport ( cdef object _UNIQUE_RECORD_TYPES -cdef object _TYPE_PTR +cdef unsigned int _TYPE_PTR cdef cython.uint _ONE_SECOND +@cython.locals( + record_cache=dict, +) cdef _remove_key(cython.dict cache, object key, DNSRecord record) @@ -42,7 +45,7 @@ cdef class DNSCache: records=cython.dict, record=DNSRecord, ) - cpdef list async_all_by_details(self, str name, object type_, object class_) + cpdef list async_all_by_details(self, str name, unsigned int type_, unsigned int class_) cpdef cython.dict async_entries_with_name(self, str name) @@ -51,19 +54,23 @@ cdef class DNSCache: @cython.locals( cached_entry=DNSRecord, ) - cpdef DNSRecord get_by_details(self, str name, object type_, object class_) + cpdef DNSRecord get_by_details(self, str name, unsigned int type_, unsigned int class_) @cython.locals( records=cython.dict, entry=DNSRecord, ) - cpdef cython.list get_all_by_details(self, str name, object type_, object class_) + cpdef cython.list get_all_by_details(self, str name, unsigned int type_, unsigned int class_) @cython.locals( store=cython.dict, + service_record=DNSService ) cdef bint _async_add(self, DNSRecord record) + @cython.locals( + service_record=DNSService + ) cdef void _async_remove(self, DNSRecord record) @cython.locals( @@ -71,3 +78,11 @@ cdef class DNSCache: created_double=double, ) cpdef void async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) + + cpdef entries_with_name(self, str name) + + @cython.locals( + record=DNSRecord, + now=double + ) + cpdef current_entry_with_name_and_alias(self, str name, str alias) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 7db15117..333b6196 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -49,8 +49,9 @@ def _remove_key(cache: _DNSRecordCacheType, key: _str, record: _DNSRecord) -> No This function must be run in from event loop. """ - del cache[key][record] - if not cache[key]: + record_cache = cache[key] + del record_cache[record] + if not record_cache: del cache[key] @@ -81,7 +82,8 @@ def _async_add(self, record: _DNSRecord) -> bool: new = record not in store and not isinstance(record, DNSNsec) store[record] = record if isinstance(record, DNSService): - self.service_cache.setdefault(record.server_key, {})[record] = record + service_record = record + self.service_cache.setdefault(record.server_key, {})[service_record] = service_record return new def async_add_records(self, entries: Iterable[DNSRecord]) -> bool: @@ -103,7 +105,8 @@ def _async_remove(self, record: _DNSRecord) -> None: This function must be run in from event loop. """ if isinstance(record, DNSService): - _remove_key(self.service_cache, record.server_key, record) + service_record = record + _remove_key(self.service_cache, service_record.server_key, service_record) _remove_key(self.cache, record.key, record) def async_remove_records(self, entries: Iterable[DNSRecord]) -> None: From 938fe214089d6eb7438f0f03ac19a3a724566d37 Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 24 Sep 2024 19:13:13 +0000 Subject: [PATCH 257/434] 0.135.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6 ++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 910acfa6..e8c6590d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ +## v0.135.0 (2024-09-24) + +### Feature + +* Improve performance of DNSCache backend ([#1415](https://github.com/python-zeroconf/python-zeroconf/issues/1415)) ([`1df2e69`](https://github.com/python-zeroconf/python-zeroconf/commit/1df2e691ff11c9592e1cdad5599fb6601eb1aa3f)) + ## v0.134.0 (2024-09-08) ### Feature diff --git a/pyproject.toml b/pyproject.toml index 57099ca2..fed1c323 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.134.0" +version = "0.135.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 8ffaf160..58bda33d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.134.0" +__version__ = "0.135.0" __license__ = "LGPL" From 119122939ef0251b771bd5361ef17665331f7078 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 16:53:59 -0500 Subject: [PATCH 258/434] chore(pre-commit.ci): pre-commit autoupdate (#1419) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74b04776..72f39073 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v3.29.0 + rev: v3.29.1 hooks: - id: commitizen stages: [commit-msg] @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.7 + rev: v0.6.8 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 9e498dff6ea218d3818b4e8faa9b250554ee352d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:48:50 -1000 Subject: [PATCH 259/434] chore: pre-commit autoupdate (#1421) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 72f39073..8b50394d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks exclude: "CHANGELOG.md" -default_stages: [commit] +default_stages: [pre-commit] ci: autofix_commit_msg: "chore(pre-commit.ci): auto fixes" @@ -14,7 +14,7 @@ repos: - id: commitizen stages: [commit-msg] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: debug-statements - id: check-builtin-literals @@ -34,12 +34,12 @@ repos: - id: prettier args: ["--tab-width", "2"] - repo: https://github.com/asottile/pyupgrade - rev: v3.17.0 + rev: v3.18.0 hooks: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.8 + rev: v0.7.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -53,7 +53,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.11.2 + rev: v1.12.1 hooks: - id: mypy additional_dependencies: [] From 6441b0e467815fff82af3d4b2622f26629a136e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:49:02 -1000 Subject: [PATCH 260/434] chore(deps-dev): bump setuptools from 75.1.0 to 75.2.0 (#1423) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7989b098..1c99cd11 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,13 +320,13 @@ pytest = ">=7.0.0" [[package]] name = "setuptools" -version = "75.1.0" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] From 3991b4256b8de5b37db7a6144e5112f711b2efef Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Sat, 26 Oct 2024 03:12:57 +0200 Subject: [PATCH 261/434] fix: correct typos (#1422) --- src/zeroconf/_cache.py | 2 +- src/zeroconf/_core.py | 4 ++-- src/zeroconf/_handlers/query_handler.py | 4 ++-- src/zeroconf/_handlers/record_manager.py | 2 +- src/zeroconf/_listener.py | 2 +- src/zeroconf/_protocol/incoming.py | 2 +- src/zeroconf/_services/info.py | 2 +- src/zeroconf/_utils/time.py | 2 +- 8 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 333b6196..f34c4c16 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -172,7 +172,7 @@ def async_entries_with_server(self, name: str) -> Dict[DNSRecord, DNSRecord]: # The below functions are threadsafe and do not need to be run in the # event loop, however they all make copies so they significantly - # inefficent + # inefficient. def get(self, entry: DNSEntry) -> Optional[DNSRecord]: """Gets an entry by key. Will return None if there is no diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index b3ecd851..68cb8a9a 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -84,10 +84,10 @@ _UNREGISTER_TIME, ) -# The maximum amont of time to delay a multicast +# The maximum amount of time to delay a multicast # response in order to aggregate answers _AGGREGATION_DELAY = 500 # ms -# The maximum amont of time to delay a multicast +# The maximum amount of time to delay a multicast # response in order to aggregate answers after # it has already been delayed to protect the network # from excessive traffic. We use a shorter time diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index f2e11236..3acb1b44 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -441,7 +441,7 @@ def handle_assembled_query( ) -> None: """Respond to a (re)assembled query. - If the protocol recieved packets with the TC bit set, it will + If the protocol received packets with the TC bit set, it will wait a bit for the rest of the packets and only call handle_assembled_query once it has a complete set of packets or the timer expires. If the TC bit is not set, a single @@ -457,7 +457,7 @@ def handle_assembled_query( id_ = first_packet.id out = construct_outgoing_unicast_answers(question_answers.ucast, ucast_source, questions, id_) # When sending unicast, only send back the reply - # via the same socket that it was recieved from + # via the same socket that it was received from # as we know its reachable from that socket self.zc.async_send(out, addr, port, v6_flow_scope, transport) if question_answers.mcast_now: diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 8ae82ba5..53ab3ed1 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -146,7 +146,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: # that any ServiceBrowser that is going to call # zc.get_service_info will see the cached value # but ONLY after all the record updates have been - # processsed. + # processed. new = False if other_adds or address_adds: new = cache.async_add_records(address_adds) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 19cca8df..4490965f 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -241,7 +241,7 @@ def _respond_query( def error_received(self, exc: Exception) -> None: """Likely socket closed or IPv6.""" # We preformat the message string with the socket as we want - # log_exception_once to log a warrning message once PER EACH + # log_exception_once to log a warning message once PER EACH # different socket in case there are problems with multiple # sockets msg_str = f"Error with socket {self.sock_description}): %s" diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 8670b0df..f7b1d773 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -171,7 +171,7 @@ def num_additionals(self) -> int: return self._num_additionals def _initial_parse(self) -> None: - """Parse the data needed to initalize the packet object.""" + """Parse the data needed to initialize the packet object.""" self._read_header() self._read_questions() if not self._num_questions: diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index d18c8402..8a85ad10 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -221,7 +221,7 @@ def name(self) -> str: @name.setter def name(self, name: str) -> None: - """Replace the the name and reset the key.""" + """Replace the name and reset the key.""" self._name = name self.key = name.lower() self._dns_service_cache = None diff --git a/src/zeroconf/_utils/time.py b/src/zeroconf/_utils/time.py index 2ed8ca92..055e0658 100644 --- a/src/zeroconf/_utils/time.py +++ b/src/zeroconf/_utils/time.py @@ -28,7 +28,7 @@ def current_time_millis() -> _float: """Current time in milliseconds. - The current implemention uses `time.monotonic` + The current implementation uses `time.monotonic` but may change in the future. The design requires the time to match asyncio.loop.time() From 6535963b5b789ce445e77bb728a5b7ee4263e582 Mon Sep 17 00:00:00 2001 From: Amir Date: Fri, 25 Oct 2024 18:13:26 -0700 Subject: [PATCH 262/434] fix: add ignore for .c file for wheels (#1424) --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index fed1c323..0874aca3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,8 @@ include = [ { path = "docs", format = "sdist" }, { path = "tests", format = "sdist" }, ] +# Make sure we don't package temporary C files generated by the build process +exclude = [ "**/*.c" ] [tool.poetry.urls] "Bug Tracker" = "https://github.com/python-zeroconf/python-zeroconf/issues" From 1596145452721e0de4e2a724b055e8e290792d3e Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sat, 26 Oct 2024 03:14:04 +0200 Subject: [PATCH 263/434] feat: use SPDX license identifier (#1425) --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0874aca3..ec49e728 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "zeroconf" version = "0.135.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] -license = "LGPL" +license = "LGPL-2.1-or-later" readme = "README.rst" repository = "https://github.com/python-zeroconf/python-zeroconf" documentation = "https://python-zeroconf.readthedocs.io" @@ -11,7 +11,6 @@ classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)', 'Operating System :: POSIX', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', From 2f201558d0ab089cdfebb18d2d7bb5785b2cce16 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 25 Oct 2024 15:40:18 -1000 Subject: [PATCH 264/434] fix: update python-semantic-release to fix release process (#1426) --- .github/workflows/ci.yml | 54 +++++++++++++++++++++++++++------------- pyproject.toml | 22 ++++++++++++++-- 2 files changed, 57 insertions(+), 19 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 217cc11c..2359c420 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -93,36 +93,54 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} release: - runs-on: ubuntu-latest - environment: release - if: github.ref == 'refs/heads/master' needs: - test - lint - commitlint + runs-on: ubuntu-latest + environment: release + concurrency: release + permissions: + id-token: write + contents: write + outputs: + released: ${{ steps.release.outputs.released }} + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 + ref: ${{ github.head_ref || github.ref_name }} - # Run semantic release: - # - Update CHANGELOG.md - # - Update version in code - # - Create git tag - # - Create GitHub release - # - Publish to PyPI - - name: Python Semantic Release - uses: relekang/python-semantic-release@v7.34.6 - # env: - # REPOSITORY_URL: https://test.pypi.org/legacy/ - # TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ + # Do a dry run of PSR + - name: Test release + uses: python-semantic-release/python-semantic-release@v9.12.0 + if: github.ref_name != 'master' + with: + root_options: --noop + + # On main branch: actual PSR + upload to PyPI & GitHub + - name: Release + uses: python-semantic-release/python-semantic-release@v9.12.0 + id: release + if: github.ref_name == 'master' + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + if: steps.release.outputs.released == 'true' + + - name: Publish package distributions to GitHub Releases + uses: python-semantic-release/upload-to-gh-release@main + if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} - pypi_token: ${{ secrets.PYPI_TOKEN }} build_wheels: needs: [release] + if: needs.release.outputs.released == 'true' name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -139,6 +157,8 @@ jobs: # Used to host cibuildwheel - name: Set up Python uses: actions/setup-python@v5 + with: + python-version: "3.11" - name: Install python-semantic-release run: pipx install python-semantic-release==7.34.6 @@ -161,7 +181,7 @@ jobs: platforms: arm64 - name: Build wheels - uses: pypa/cibuildwheel@v2.20.0 + uses: pypa/cibuildwheel@v2.21.3 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 cp38-*_arm64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 diff --git a/pyproject.toml b/pyproject.toml index ec49e728..7bd2960f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,11 +46,29 @@ script = "build_ext.py" [tool.semantic_release] branch = "master" -version_toml = "pyproject.toml:tool.poetry.version" -version_variable = "src/zeroconf/__init__.py:__version__" +version_toml = ["pyproject.toml:tool.poetry.version"] +version_variables = [ + "src/zeroconf/__init__.py:__version__" +] build_command = "pip install poetry && poetry build" tag_format = "{version}" +[tool.semantic_release.changelog] +exclude_commit_patterns = [ + "chore*", + "ci*", +] + +[tool.semantic_release.changelog.environment] +keep_trailing_newline = true + +[tool.semantic_release.branches.master] +match = "master" + +[tool.semantic_release.branches.noop] +match = "(?!master$)" +prerelease = true + [tool.poetry.dependencies] python = "^3.8" async-timeout = {version = ">=3.0.0", python = "<3.11"} From 8eac029bd8376abb2a2bdcc32be2edfcb5a8bf7b Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 26 Oct 2024 02:05:32 +0000 Subject: [PATCH 265/434] 0.136.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 6169 +++++++++++++++++++++++++++++--------- pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 4837 insertions(+), 1336 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8c6590d..a15e049a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,1980 +1,5481 @@ -# Changelog +# CHANGELOG + + +## v0.136.0 (2024-10-26) + +### Bug Fixes + +* fix: update python-semantic-release to fix release process (#1426) ([`2f20155`](https://github.com/python-zeroconf/python-zeroconf/commit/2f201558d0ab089cdfebb18d2d7bb5785b2cce16)) + +* fix: add ignore for .c file for wheels (#1424) ([`6535963`](https://github.com/python-zeroconf/python-zeroconf/commit/6535963b5b789ce445e77bb728a5b7ee4263e582)) + +* fix: correct typos (#1422) ([`3991b42`](https://github.com/python-zeroconf/python-zeroconf/commit/3991b4256b8de5b37db7a6144e5112f711b2efef)) + +### Features + +* feat: use SPDX license identifier (#1425) ([`1596145`](https://github.com/python-zeroconf/python-zeroconf/commit/1596145452721e0de4e2a724b055e8e290792d3e)) - ## v0.135.0 (2024-09-24) -### Feature +### Features + +* feat: improve performance of DNSCache backend (#1415) ([`1df2e69`](https://github.com/python-zeroconf/python-zeroconf/commit/1df2e691ff11c9592e1cdad5599fb6601eb1aa3f)) -* Improve performance of DNSCache backend ([#1415](https://github.com/python-zeroconf/python-zeroconf/issues/1415)) ([`1df2e69`](https://github.com/python-zeroconf/python-zeroconf/commit/1df2e691ff11c9592e1cdad5599fb6601eb1aa3f)) ## v0.134.0 (2024-09-08) -### Feature +### Bug Fixes + +* fix: improve helpfulness of ServiceInfo.request assertions (#1408) ([`9262626`](https://github.com/python-zeroconf/python-zeroconf/commit/9262626895d354ed7376aa567043b793c37a985e)) -* Improve performance when IP addresses change frequently ([#1407](https://github.com/python-zeroconf/python-zeroconf/issues/1407)) ([`111c91a`](https://github.com/python-zeroconf/python-zeroconf/commit/111c91ab395a7520e477eb0e75d5924fba3c64c7)) +### Features -### Fix +* feat: improve performance when IP addresses change frequently (#1407) ([`111c91a`](https://github.com/python-zeroconf/python-zeroconf/commit/111c91ab395a7520e477eb0e75d5924fba3c64c7)) -* Improve helpfulness of ServiceInfo.request assertions ([#1408](https://github.com/python-zeroconf/python-zeroconf/issues/1408)) ([`9262626`](https://github.com/python-zeroconf/python-zeroconf/commit/9262626895d354ed7376aa567043b793c37a985e)) ## v0.133.0 (2024-08-27) -### Feature +### Features + +* feat: improve performance of ip address caching (#1392) ([`f7c7708`](https://github.com/python-zeroconf/python-zeroconf/commit/f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2)) + +* feat: enable building of arm64 macOS builds (#1384) + +Co-authored-by: Alex Ciobanu +Co-authored-by: J. Nick Koston ([`0df2ce0`](https://github.com/python-zeroconf/python-zeroconf/commit/0df2ce0e6f7313831da6a63d477019982d5df55c)) + +* feat: add classifier for python 3.13 (#1393) ([`7fb2bb2`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb2bb21421c70db0eb288fa7e73d955f58b0f5d)) + +* feat: python 3.13 support (#1390) ([`98cfa83`](https://github.com/python-zeroconf/python-zeroconf/commit/98cfa83710e43880698353821bae61108b08cb2f)) -* Improve performance of ip address caching ([#1392](https://github.com/python-zeroconf/python-zeroconf/issues/1392)) ([`f7c7708`](https://github.com/python-zeroconf/python-zeroconf/commit/f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2)) -* Enable building of arm64 macOS builds ([#1384](https://github.com/python-zeroconf/python-zeroconf/issues/1384)) ([`0df2ce0`](https://github.com/python-zeroconf/python-zeroconf/commit/0df2ce0e6f7313831da6a63d477019982d5df55c)) -* Add classifier for python 3.13 ([#1393](https://github.com/python-zeroconf/python-zeroconf/issues/1393)) ([`7fb2bb2`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb2bb21421c70db0eb288fa7e73d955f58b0f5d)) -* Python 3.13 support ([#1390](https://github.com/python-zeroconf/python-zeroconf/issues/1390)) ([`98cfa83`](https://github.com/python-zeroconf/python-zeroconf/commit/98cfa83710e43880698353821bae61108b08cb2f)) ## v0.132.2 (2024-04-13) -### Fix +### Bug Fixes + +* fix: update references to minimum-supported python version of 3.8 (#1369) ([`599524a`](https://github.com/python-zeroconf/python-zeroconf/commit/599524a5ce1e4c1731519dd89377c2a852e59935)) + +* fix: bump cibuildwheel to fix wheel builds (#1371) ([`83e4ce3`](https://github.com/python-zeroconf/python-zeroconf/commit/83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210)) -* Update references to minimum-supported python version of 3.8 ([#1369](https://github.com/python-zeroconf/python-zeroconf/issues/1369)) ([`599524a`](https://github.com/python-zeroconf/python-zeroconf/commit/599524a5ce1e4c1731519dd89377c2a852e59935)) -* Bump cibuildwheel to fix wheel builds ([#1371](https://github.com/python-zeroconf/python-zeroconf/issues/1371)) ([`83e4ce3`](https://github.com/python-zeroconf/python-zeroconf/commit/83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210)) ## v0.132.1 (2024-04-12) -### Fix +### Bug Fixes + +* fix: set change during iteration when dispatching listeners (#1370) ([`e9f8aa5`](https://github.com/python-zeroconf/python-zeroconf/commit/e9f8aa5741ae2d490c33a562b459f0af1014dbb0)) -* Set change during iteration when dispatching listeners ([#1370](https://github.com/python-zeroconf/python-zeroconf/issues/1370)) ([`e9f8aa5`](https://github.com/python-zeroconf/python-zeroconf/commit/e9f8aa5741ae2d490c33a562b459f0af1014dbb0)) ## v0.132.0 (2024-04-01) -### Feature +### Bug Fixes + +* fix: avoid including scope_id in IPv6Address object if its zero (#1367) ([`edc4a55`](https://github.com/python-zeroconf/python-zeroconf/commit/edc4a556819956c238a11332052000dcbcb07e3d)) + +### Features -* Make async_get_service_info available on the Zeroconf object ([#1366](https://github.com/python-zeroconf/python-zeroconf/issues/1366)) ([`c4c2dee`](https://github.com/python-zeroconf/python-zeroconf/commit/c4c2deeb05279ddbb0eba1330c7ae58795fea001)) -* Drop python 3.7 support ([#1359](https://github.com/python-zeroconf/python-zeroconf/issues/1359)) ([`4877829`](https://github.com/python-zeroconf/python-zeroconf/commit/4877829e6442de5426db152d11827b1ba85dbf59)) +* feat: make async_get_service_info available on the Zeroconf object (#1366) ([`c4c2dee`](https://github.com/python-zeroconf/python-zeroconf/commit/c4c2deeb05279ddbb0eba1330c7ae58795fea001)) -### Fix +* feat: drop python 3.7 support (#1359) ([`4877829`](https://github.com/python-zeroconf/python-zeroconf/commit/4877829e6442de5426db152d11827b1ba85dbf59)) -* Avoid including scope_id in IPv6Address object if its zero ([#1367](https://github.com/python-zeroconf/python-zeroconf/issues/1367)) ([`edc4a55`](https://github.com/python-zeroconf/python-zeroconf/commit/edc4a556819956c238a11332052000dcbcb07e3d)) ## v0.131.0 (2023-12-19) -### Feature +### Features + +* feat: small speed up to constructing outgoing packets (#1354) ([`517d7d0`](https://github.com/python-zeroconf/python-zeroconf/commit/517d7d00ca7738c770077738125aec0e4824c000)) + +* feat: speed up processing incoming packets (#1352) ([`6c15325`](https://github.com/python-zeroconf/python-zeroconf/commit/6c153258a995cf9459a6f23267b7e379b5e2550f)) + +* feat: speed up the query handler (#1350) ([`9eac0a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb)) -* Small speed up to constructing outgoing packets ([#1354](https://github.com/python-zeroconf/python-zeroconf/issues/1354)) ([`517d7d0`](https://github.com/python-zeroconf/python-zeroconf/commit/517d7d00ca7738c770077738125aec0e4824c000)) -* Speed up processing incoming packets ([#1352](https://github.com/python-zeroconf/python-zeroconf/issues/1352)) ([`6c15325`](https://github.com/python-zeroconf/python-zeroconf/commit/6c153258a995cf9459a6f23267b7e379b5e2550f)) -* Speed up the query handler ([#1350](https://github.com/python-zeroconf/python-zeroconf/issues/1350)) ([`9eac0a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb)) ## v0.130.0 (2023-12-16) -### Feature +### Bug Fixes + +* fix: scheduling race with the QueryScheduler (#1347) ([`cf40470`](https://github.com/python-zeroconf/python-zeroconf/commit/cf40470b89f918d3c24d7889d3536f3ffa44846c)) + +* fix: ensure question history suppresses duplicates (#1338) ([`6f23656`](https://github.com/python-zeroconf/python-zeroconf/commit/6f23656576daa04e3de44e100f3ddd60ee4c560d)) + +* fix: microsecond precision loss in the query handler (#1339) ([`6560fad`](https://github.com/python-zeroconf/python-zeroconf/commit/6560fad584e0d392962c9a9248759f17c416620e)) + +* fix: ensure IPv6 scoped address construction uses the string cache (#1336) ([`f78a196`](https://github.com/python-zeroconf/python-zeroconf/commit/f78a196db632c4fe017a34f1af8a58903c15a575)) + +### Features + +* feat: make ServiceInfo aware of question history (#1348) ([`b9aae1d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9aae1de07bf1491e873bc314f8a1d7996127ad3)) + +* feat: small speed up to ServiceInfo construction (#1346) ([`b329d99`](https://github.com/python-zeroconf/python-zeroconf/commit/b329d99917bb731b4c70bf20c7c010eeb85ad9fd)) + +* feat: significantly improve efficiency of the ServiceBrowser scheduler (#1335) ([`c65d869`](https://github.com/python-zeroconf/python-zeroconf/commit/c65d869aec731b803484871e9d242a984f9f5848)) + +* feat: small speed up to processing incoming records (#1345) ([`7de655b`](https://github.com/python-zeroconf/python-zeroconf/commit/7de655b6f05012f20a3671e0bcdd44a1913d7b52)) -* Make ServiceInfo aware of question history ([#1348](https://github.com/python-zeroconf/python-zeroconf/issues/1348)) ([`b9aae1d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9aae1de07bf1491e873bc314f8a1d7996127ad3)) -* Small speed up to ServiceInfo construction ([#1346](https://github.com/python-zeroconf/python-zeroconf/issues/1346)) ([`b329d99`](https://github.com/python-zeroconf/python-zeroconf/commit/b329d99917bb731b4c70bf20c7c010eeb85ad9fd)) -* Significantly improve efficiency of the ServiceBrowser scheduler ([#1335](https://github.com/python-zeroconf/python-zeroconf/issues/1335)) ([`c65d869`](https://github.com/python-zeroconf/python-zeroconf/commit/c65d869aec731b803484871e9d242a984f9f5848)) -* Small speed up to processing incoming records ([#1345](https://github.com/python-zeroconf/python-zeroconf/issues/1345)) ([`7de655b`](https://github.com/python-zeroconf/python-zeroconf/commit/7de655b6f05012f20a3671e0bcdd44a1913d7b52)) -* Small performance improvement for converting time ([#1342](https://github.com/python-zeroconf/python-zeroconf/issues/1342)) ([`73d3ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/73d3ab90dd3b59caab771235dd6dbedf05bfe0b3)) -* Small performance improvement for ServiceInfo asking questions ([#1341](https://github.com/python-zeroconf/python-zeroconf/issues/1341)) ([`810a309`](https://github.com/python-zeroconf/python-zeroconf/commit/810a3093c5a9411ee97740b468bd706bdf4a95de)) -* Small performance improvement constructing outgoing questions ([#1340](https://github.com/python-zeroconf/python-zeroconf/issues/1340)) ([`157185f`](https://github.com/python-zeroconf/python-zeroconf/commit/157185f28bf1e83e6811e2a5cd1fa9b38966f780)) +* feat: small performance improvement for converting time (#1342) ([`73d3ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/73d3ab90dd3b59caab771235dd6dbedf05bfe0b3)) -### Fix +* feat: small performance improvement for ServiceInfo asking questions (#1341) ([`810a309`](https://github.com/python-zeroconf/python-zeroconf/commit/810a3093c5a9411ee97740b468bd706bdf4a95de)) + +* feat: small performance improvement constructing outgoing questions (#1340) ([`157185f`](https://github.com/python-zeroconf/python-zeroconf/commit/157185f28bf1e83e6811e2a5cd1fa9b38966f780)) -* Scheduling race with the QueryScheduler ([#1347](https://github.com/python-zeroconf/python-zeroconf/issues/1347)) ([`cf40470`](https://github.com/python-zeroconf/python-zeroconf/commit/cf40470b89f918d3c24d7889d3536f3ffa44846c)) -* Ensure question history suppresses duplicates ([#1338](https://github.com/python-zeroconf/python-zeroconf/issues/1338)) ([`6f23656`](https://github.com/python-zeroconf/python-zeroconf/commit/6f23656576daa04e3de44e100f3ddd60ee4c560d)) -* Microsecond precision loss in the query handler ([#1339](https://github.com/python-zeroconf/python-zeroconf/issues/1339)) ([`6560fad`](https://github.com/python-zeroconf/python-zeroconf/commit/6560fad584e0d392962c9a9248759f17c416620e)) -* Ensure IPv6 scoped address construction uses the string cache ([#1336](https://github.com/python-zeroconf/python-zeroconf/issues/1336)) ([`f78a196`](https://github.com/python-zeroconf/python-zeroconf/commit/f78a196db632c4fe017a34f1af8a58903c15a575)) ## v0.129.0 (2023-12-13) -### Feature +### Features + +* feat: add decoded_properties method to ServiceInfo (#1332) ([`9b595a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9b595a1dcacf109c699953219d70fe36296c7318)) -* Add decoded_properties method to ServiceInfo ([#1332](https://github.com/python-zeroconf/python-zeroconf/issues/1332)) ([`9b595a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9b595a1dcacf109c699953219d70fe36296c7318)) -* Ensure ServiceInfo.properties always returns bytes ([#1333](https://github.com/python-zeroconf/python-zeroconf/issues/1333)) ([`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) -* Cache is_unspecified for zeroconf ip address objects ([#1331](https://github.com/python-zeroconf/python-zeroconf/issues/1331)) ([`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) +* feat: ensure ServiceInfo.properties always returns bytes (#1333) ([`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) -### Technically breaking change +* feat: cache is_unspecified for zeroconf ip address objects (#1331) ([`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) -* `ServiceInfo.properties` always returns a dictionary with type `dict[bytes, bytes | None]` instead of a mix `str` and `bytes`. It was only possible to get a mixed dictionary if it was manually passed in when `ServiceInfo` was constructed. ## v0.128.5 (2023-12-13) -### Fix +### Bug Fixes + +* fix: performance regression with ServiceInfo IPv6Addresses (#1330) ([`e2f9f81`](https://github.com/python-zeroconf/python-zeroconf/commit/e2f9f81dbc54c3dd527eeb3298897d63f99d33f4)) -* Performance regression with ServiceInfo IPv6Addresses ([#1330](https://github.com/python-zeroconf/python-zeroconf/issues/1330)) ([`e2f9f81`](https://github.com/python-zeroconf/python-zeroconf/commit/e2f9f81dbc54c3dd527eeb3298897d63f99d33f4)) ## v0.128.4 (2023-12-10) -### Fix +### Bug Fixes + +* fix: re-expose ServiceInfo._set_properties for backwards compat (#1327) ([`39c4005`](https://github.com/python-zeroconf/python-zeroconf/commit/39c40051d7a63bdc63a3e2dfa20bd944fee4e761)) -* Re-expose ServiceInfo._set_properties for backwards compat ([#1327](https://github.com/python-zeroconf/python-zeroconf/issues/1327)) ([`39c4005`](https://github.com/python-zeroconf/python-zeroconf/commit/39c40051d7a63bdc63a3e2dfa20bd944fee4e761)) ## v0.128.3 (2023-12-10) -### Fix +### Bug Fixes + +* fix: correct nsec record writing (#1326) ([`cd7a16a`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7a16a32c37b2f7a2e90d3c749525a5393bad57)) -* Correct nsec record writing ([#1326](https://github.com/python-zeroconf/python-zeroconf/issues/1326)) ([`cd7a16a`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7a16a32c37b2f7a2e90d3c749525a5393bad57)) ## v0.128.2 (2023-12-10) -### Fix +### Bug Fixes + +* fix: timestamps missing double precision (#1324) ([`ecea4e4`](https://github.com/python-zeroconf/python-zeroconf/commit/ecea4e4217892ca8cf763074ac3e5d1b898acd21)) + +* fix: match cython version for dev deps to build deps (#1325) ([`a0dac46`](https://github.com/python-zeroconf/python-zeroconf/commit/a0dac46c01202b3d5a0823ac1928fc1d75332522)) -* Timestamps missing double precision ([#1324](https://github.com/python-zeroconf/python-zeroconf/issues/1324)) ([`ecea4e4`](https://github.com/python-zeroconf/python-zeroconf/commit/ecea4e4217892ca8cf763074ac3e5d1b898acd21)) -* Match cython version for dev deps to build deps ([#1325](https://github.com/python-zeroconf/python-zeroconf/issues/1325)) ([`a0dac46`](https://github.com/python-zeroconf/python-zeroconf/commit/a0dac46c01202b3d5a0823ac1928fc1d75332522)) ## v0.128.1 (2023-12-10) -### Fix +### Bug Fixes + +* fix: correct handling of IPv6 addresses with scope_id in ServiceInfo (#1322) ([`1682991`](https://github.com/python-zeroconf/python-zeroconf/commit/1682991b985b1f7b2bf0cff1a7eb7793070e7cb1)) -* Correct handling of IPv6 addresses with scope_id in ServiceInfo ([#1322](https://github.com/python-zeroconf/python-zeroconf/issues/1322)) ([`1682991`](https://github.com/python-zeroconf/python-zeroconf/commit/1682991b985b1f7b2bf0cff1a7eb7793070e7cb1)) ## v0.128.0 (2023-12-02) -### Feature +### Features + +* feat: speed up unpacking TXT record data in ServiceInfo (#1318) ([`a200842`](https://github.com/python-zeroconf/python-zeroconf/commit/a20084281e66bdb9c37183a5eb992435f5b866ac)) -* Speed up unpacking TXT record data in ServiceInfo ([#1318](https://github.com/python-zeroconf/python-zeroconf/issues/1318)) ([`a200842`](https://github.com/python-zeroconf/python-zeroconf/commit/a20084281e66bdb9c37183a5eb992435f5b866ac)) ## v0.127.0 (2023-11-15) -### Feature +### Features + +* feat: small speed up to writing outgoing packets (#1316) ([`cd28476`](https://github.com/python-zeroconf/python-zeroconf/commit/cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65)) + +* feat: speed up incoming packet reader (#1314) ([`0d60b61`](https://github.com/python-zeroconf/python-zeroconf/commit/0d60b61538a5d4b6f44b2369333b6e916a0a55b4)) + +* feat: small speed up to processing incoming dns records (#1315) ([`bfe4c24`](https://github.com/python-zeroconf/python-zeroconf/commit/bfe4c24881a7259713425df5ab00ffe487518841)) -* Small speed up to writing outgoing packets ([#1316](https://github.com/python-zeroconf/python-zeroconf/issues/1316)) ([`cd28476`](https://github.com/python-zeroconf/python-zeroconf/commit/cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65)) -* Speed up incoming packet reader ([#1314](https://github.com/python-zeroconf/python-zeroconf/issues/1314)) ([`0d60b61`](https://github.com/python-zeroconf/python-zeroconf/commit/0d60b61538a5d4b6f44b2369333b6e916a0a55b4)) -* Small speed up to processing incoming dns records ([#1315](https://github.com/python-zeroconf/python-zeroconf/issues/1315)) ([`bfe4c24`](https://github.com/python-zeroconf/python-zeroconf/commit/bfe4c24881a7259713425df5ab00ffe487518841)) ## v0.126.0 (2023-11-13) -### Feature +### Features + +* feat: speed up outgoing packet writer (#1313) ([`55cf4cc`](https://github.com/python-zeroconf/python-zeroconf/commit/55cf4ccdff886a136db4e2133d3e6cdd001a8bd6)) + +* feat: speed up writing name compression for outgoing packets (#1312) ([`9caeabb`](https://github.com/python-zeroconf/python-zeroconf/commit/9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b)) -* Speed up outgoing packet writer ([#1313](https://github.com/python-zeroconf/python-zeroconf/issues/1313)) ([`55cf4cc`](https://github.com/python-zeroconf/python-zeroconf/commit/55cf4ccdff886a136db4e2133d3e6cdd001a8bd6)) -* Speed up writing name compression for outgoing packets ([#1312](https://github.com/python-zeroconf/python-zeroconf/issues/1312)) ([`9caeabb`](https://github.com/python-zeroconf/python-zeroconf/commit/9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b)) ## v0.125.0 (2023-11-12) -### Feature +### Features + +* feat: speed up service browser queries when browsing many types (#1311) ([`d192d33`](https://github.com/python-zeroconf/python-zeroconf/commit/d192d33b1f05aa95a89965e86210aec086673a17)) -* Speed up service browser queries when browsing many types ([#1311](https://github.com/python-zeroconf/python-zeroconf/issues/1311)) ([`d192d33`](https://github.com/python-zeroconf/python-zeroconf/commit/d192d33b1f05aa95a89965e86210aec086673a17)) ## v0.124.0 (2023-11-12) -### Feature +### Features + +* feat: avoid decoding known answers if we have no answers to give (#1308) ([`605dc9c`](https://github.com/python-zeroconf/python-zeroconf/commit/605dc9ccd843a535802031f051b3d93310186ad1)) + +* feat: small speed up to process incoming packets (#1309) ([`56ef908`](https://github.com/python-zeroconf/python-zeroconf/commit/56ef90865189c01d2207abcc5e2efe3a7a022fa1)) -* Avoid decoding known answers if we have no answers to give ([#1308](https://github.com/python-zeroconf/python-zeroconf/issues/1308)) ([`605dc9c`](https://github.com/python-zeroconf/python-zeroconf/commit/605dc9ccd843a535802031f051b3d93310186ad1)) -* Small speed up to process incoming packets ([#1309](https://github.com/python-zeroconf/python-zeroconf/issues/1309)) ([`56ef908`](https://github.com/python-zeroconf/python-zeroconf/commit/56ef90865189c01d2207abcc5e2efe3a7a022fa1)) ## v0.123.0 (2023-11-12) -### Feature +### Features + +* feat: speed up instances only used to lookup answers (#1307) ([`0701b8a`](https://github.com/python-zeroconf/python-zeroconf/commit/0701b8ab6009891cbaddaa1d17116d31fd1b2f78)) -* Speed up instances only used to lookup answers ([#1307](https://github.com/python-zeroconf/python-zeroconf/issues/1307)) ([`0701b8a`](https://github.com/python-zeroconf/python-zeroconf/commit/0701b8ab6009891cbaddaa1d17116d31fd1b2f78)) ## v0.122.3 (2023-11-09) -### Fix +### Bug Fixes + +* fix: do not build musllinux aarch64 wheels to reduce release time (#1306) ([`79aafb0`](https://github.com/python-zeroconf/python-zeroconf/commit/79aafb0acf7ca6b17976be7ede748008deada27b)) -* Do not build musllinux aarch64 wheels to reduce release time ([#1306](https://github.com/python-zeroconf/python-zeroconf/issues/1306)) ([`79aafb0`](https://github.com/python-zeroconf/python-zeroconf/commit/79aafb0acf7ca6b17976be7ede748008deada27b)) ## v0.122.2 (2023-11-09) -### Fix +### Bug Fixes + +* fix: do not build aarch64 wheels for PyPy (#1305) ([`7e884db`](https://github.com/python-zeroconf/python-zeroconf/commit/7e884db4d958459e64257aba860dba2450db0687)) -* Do not build aarch64 wheels for PyPy ([#1305](https://github.com/python-zeroconf/python-zeroconf/issues/1305)) ([`7e884db`](https://github.com/python-zeroconf/python-zeroconf/commit/7e884db4d958459e64257aba860dba2450db0687)) ## v0.122.1 (2023-11-09) -### Fix +### Bug Fixes + +* fix: skip wheel builds for eol python and older python with aarch64 (#1304) ([`6c8f5a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6c8f5a5dec2072aa6a8f889c5d8a4623ab392234)) -* Skip wheel builds for eol python and older python with aarch64 ([#1304](https://github.com/python-zeroconf/python-zeroconf/issues/1304)) ([`6c8f5a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6c8f5a5dec2072aa6a8f889c5d8a4623ab392234)) ## v0.122.0 (2023-11-08) -### Feature +### Features + +* feat: build aarch64 wheels (#1302) ([`4fe58e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83)) -* Build aarch64 wheels ([#1302](https://github.com/python-zeroconf/python-zeroconf/issues/1302)) ([`4fe58e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83)) ## v0.121.0 (2023-11-08) -### Feature +### Features + +* feat: speed up record updates (#1301) ([`d2af6a0`](https://github.com/python-zeroconf/python-zeroconf/commit/d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4)) -* Speed up record updates ([#1301](https://github.com/python-zeroconf/python-zeroconf/issues/1301)) ([`d2af6a0`](https://github.com/python-zeroconf/python-zeroconf/commit/d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4)) ## v0.120.0 (2023-11-05) -### Feature +### Features + +* feat: speed up incoming packet processing with a memory view (#1290) ([`f1f0a25`](https://github.com/python-zeroconf/python-zeroconf/commit/f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7)) + +* feat: speed up decoding labels from incoming data (#1291) ([`c37ead4`](https://github.com/python-zeroconf/python-zeroconf/commit/c37ead4d7000607e81706a97b4cdffd80cf8cf99)) + +* feat: speed up ServiceBrowsers with a pxd for the signal interface (#1289) ([`8a17f20`](https://github.com/python-zeroconf/python-zeroconf/commit/8a17f2053a89db4beca9e8c1de4640faf27726b4)) -* Speed up incoming packet processing with a memory view ([#1290](https://github.com/python-zeroconf/python-zeroconf/issues/1290)) ([`f1f0a25`](https://github.com/python-zeroconf/python-zeroconf/commit/f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7)) -* Speed up decoding labels from incoming data ([#1291](https://github.com/python-zeroconf/python-zeroconf/issues/1291)) ([`c37ead4`](https://github.com/python-zeroconf/python-zeroconf/commit/c37ead4d7000607e81706a97b4cdffd80cf8cf99)) -* Speed up ServiceBrowsers with a pxd for the signal interface ([#1289](https://github.com/python-zeroconf/python-zeroconf/issues/1289)) ([`8a17f20`](https://github.com/python-zeroconf/python-zeroconf/commit/8a17f2053a89db4beca9e8c1de4640faf27726b4)) ## v0.119.0 (2023-10-18) -### Feature +### Features + +* feat: update cibuildwheel to build wheels on latest cython final release (#1285) ([`e8c9083`](https://github.com/python-zeroconf/python-zeroconf/commit/e8c9083bb118764a85b12fac9055152a2f62a212)) -* Update cibuildwheel to build wheels on latest cython final release ([#1285](https://github.com/python-zeroconf/python-zeroconf/issues/1285)) ([`e8c9083`](https://github.com/python-zeroconf/python-zeroconf/commit/e8c9083bb118764a85b12fac9055152a2f62a212)) ## v0.118.1 (2023-10-18) -### Fix +### Bug Fixes + +* fix: reduce size of wheels by excluding generated .c files (#1284) ([`b6afa4b`](https://github.com/python-zeroconf/python-zeroconf/commit/b6afa4b2775a1fdb090145eccdc5711c98e7147a)) -* Reduce size of wheels by excluding generated .c files ([#1284](https://github.com/python-zeroconf/python-zeroconf/issues/1284)) ([`b6afa4b`](https://github.com/python-zeroconf/python-zeroconf/commit/b6afa4b2775a1fdb090145eccdc5711c98e7147a)) ## v0.118.0 (2023-10-14) -### Feature +### Features + +* feat: small improvements to ServiceBrowser performance (#1283) ([`0fc031b`](https://github.com/python-zeroconf/python-zeroconf/commit/0fc031b1e7bf1766d5a1d39d70d300b86e36715e)) -* Small improvements to ServiceBrowser performance ([#1283](https://github.com/python-zeroconf/python-zeroconf/issues/1283)) ([`0fc031b`](https://github.com/python-zeroconf/python-zeroconf/commit/0fc031b1e7bf1766d5a1d39d70d300b86e36715e)) ## v0.117.0 (2023-10-14) -### Feature +### Features + +* feat: small cleanups to incoming data handlers (#1282) ([`4f4bd9f`](https://github.com/python-zeroconf/python-zeroconf/commit/4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9)) -* Small cleanups to incoming data handlers ([#1282](https://github.com/python-zeroconf/python-zeroconf/issues/1282)) ([`4f4bd9f`](https://github.com/python-zeroconf/python-zeroconf/commit/4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9)) ## v0.116.0 (2023-10-13) -### Feature +### Features + +* feat: reduce type checking overhead at run time (#1281) ([`8f30099`](https://github.com/python-zeroconf/python-zeroconf/commit/8f300996e5bd4316b2237f0502791dd0d6a855fe)) -* Reduce type checking overhead at run time ([#1281](https://github.com/python-zeroconf/python-zeroconf/issues/1281)) ([`8f30099`](https://github.com/python-zeroconf/python-zeroconf/commit/8f300996e5bd4316b2237f0502791dd0d6a855fe)) ## v0.115.2 (2023-10-05) -### Fix +### Bug Fixes + +* fix: ensure ServiceInfo cache is cleared when adding to the registry (#1279) + +* There were production use cases that mutated the service info and re-registered it that need to be accounted for ([`2060eb2`](https://github.com/python-zeroconf/python-zeroconf/commit/2060eb2cc43489c34bea08924c3f40b875d5a498)) -* Ensure ServiceInfo cache is cleared when adding to the registry ([#1279](https://github.com/python-zeroconf/python-zeroconf/issues/1279)) ([`2060eb2`](https://github.com/python-zeroconf/python-zeroconf/commit/2060eb2cc43489c34bea08924c3f40b875d5a498)) ## v0.115.1 (2023-10-01) -### Fix +### Bug Fixes + +* fix: add missing python definition for addresses_by_version (#1278) ([`52ee02b`](https://github.com/python-zeroconf/python-zeroconf/commit/52ee02b16860e344c402124f4b2e2869536ec839)) -* Add missing python definition for addresses_by_version ([#1278](https://github.com/python-zeroconf/python-zeroconf/issues/1278)) ([`52ee02b`](https://github.com/python-zeroconf/python-zeroconf/commit/52ee02b16860e344c402124f4b2e2869536ec839)) ## v0.115.0 (2023-09-26) -### Feature +### Features + +* feat: speed up outgoing multicast queue (#1277) ([`a13fd49`](https://github.com/python-zeroconf/python-zeroconf/commit/a13fd49d77474fd5858de809e48cbab1ccf89173)) -* Speed up outgoing multicast queue ([#1277](https://github.com/python-zeroconf/python-zeroconf/issues/1277)) ([`a13fd49`](https://github.com/python-zeroconf/python-zeroconf/commit/a13fd49d77474fd5858de809e48cbab1ccf89173)) ## v0.114.0 (2023-09-25) -### Feature +### Features + +* feat: speed up responding to queries (#1275) ([`3c6b18c`](https://github.com/python-zeroconf/python-zeroconf/commit/3c6b18cdf4c94773ad6f4497df98feb337939ee9)) -* Speed up responding to queries ([#1275](https://github.com/python-zeroconf/python-zeroconf/issues/1275)) ([`3c6b18c`](https://github.com/python-zeroconf/python-zeroconf/commit/3c6b18cdf4c94773ad6f4497df98feb337939ee9)) ## v0.113.0 (2023-09-24) -### Feature +### Features + +* feat: improve performance of loading records from cache in ServiceInfo (#1274) ([`6257d49`](https://github.com/python-zeroconf/python-zeroconf/commit/6257d49952e02107f800f4ad4894716508edfcda)) -* Improve performance of loading records from cache in ServiceInfo ([#1274](https://github.com/python-zeroconf/python-zeroconf/issues/1274)) ([`6257d49`](https://github.com/python-zeroconf/python-zeroconf/commit/6257d49952e02107f800f4ad4894716508edfcda)) ## v0.112.0 (2023-09-14) -### Feature +### Features + +* feat: improve AsyncServiceBrowser performance (#1273) ([`0c88ecf`](https://github.com/python-zeroconf/python-zeroconf/commit/0c88ecf5ef6b9b256f991e7a630048de640999a6)) -* Improve AsyncServiceBrowser performance ([#1273](https://github.com/python-zeroconf/python-zeroconf/issues/1273)) ([`0c88ecf`](https://github.com/python-zeroconf/python-zeroconf/commit/0c88ecf5ef6b9b256f991e7a630048de640999a6)) ## v0.111.0 (2023-09-14) -### Feature +### Features + +* feat: speed up question and answer internals (#1272) ([`d24722b`](https://github.com/python-zeroconf/python-zeroconf/commit/d24722bfa4201d48ab482d35b0ef004f070ada80)) -* Speed up question and answer internals ([#1272](https://github.com/python-zeroconf/python-zeroconf/issues/1272)) ([`d24722b`](https://github.com/python-zeroconf/python-zeroconf/commit/d24722bfa4201d48ab482d35b0ef004f070ada80)) ## v0.110.0 (2023-09-14) -### Feature +### Features + +* feat: small speed ups to ServiceBrowser (#1271) ([`22c433d`](https://github.com/python-zeroconf/python-zeroconf/commit/22c433ddaea3049ac49933325ba938fd87a529c0)) -* Small speed ups to ServiceBrowser ([#1271](https://github.com/python-zeroconf/python-zeroconf/issues/1271)) ([`22c433d`](https://github.com/python-zeroconf/python-zeroconf/commit/22c433ddaea3049ac49933325ba938fd87a529c0)) ## v0.109.0 (2023-09-14) -### Feature +### Features + +* feat: speed up ServiceBrowsers with a cython pxd (#1270) ([`4837876`](https://github.com/python-zeroconf/python-zeroconf/commit/48378769c3887b5746ca00de30067a4c0851765c)) -* Speed up ServiceBrowsers with a cython pxd ([#1270](https://github.com/python-zeroconf/python-zeroconf/issues/1270)) ([`4837876`](https://github.com/python-zeroconf/python-zeroconf/commit/48378769c3887b5746ca00de30067a4c0851765c)) ## v0.108.0 (2023-09-11) -### Feature +### Features + +* feat: improve performance of constructing outgoing queries (#1267) ([`00c439a`](https://github.com/python-zeroconf/python-zeroconf/commit/00c439a6400b7850ef9fdd75bc8d82d4e64b1da0)) -* Improve performance of constructing outgoing queries ([#1267](https://github.com/python-zeroconf/python-zeroconf/issues/1267)) ([`00c439a`](https://github.com/python-zeroconf/python-zeroconf/commit/00c439a6400b7850ef9fdd75bc8d82d4e64b1da0)) ## v0.107.0 (2023-09-11) -### Feature +### Features + +* feat: speed up responding to queries (#1266) ([`24a0a00`](https://github.com/python-zeroconf/python-zeroconf/commit/24a0a00b3e457979e279a2eeadc8fad2ab09e125)) -* Speed up responding to queries ([#1266](https://github.com/python-zeroconf/python-zeroconf/issues/1266)) ([`24a0a00`](https://github.com/python-zeroconf/python-zeroconf/commit/24a0a00b3e457979e279a2eeadc8fad2ab09e125)) ## v0.106.0 (2023-09-11) -### Feature +### Features + +* feat: speed up answering questions (#1265) ([`37bfaf2`](https://github.com/python-zeroconf/python-zeroconf/commit/37bfaf2f630358e8c68652f3b3120931a6f94910)) -* Speed up answering questions ([#1265](https://github.com/python-zeroconf/python-zeroconf/issues/1265)) ([`37bfaf2`](https://github.com/python-zeroconf/python-zeroconf/commit/37bfaf2f630358e8c68652f3b3120931a6f94910)) ## v0.105.0 (2023-09-10) -### Feature +### Features + +* feat: speed up ServiceInfo with a cython pxd (#1264) ([`7ca690a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ca690ac3fa75e7474d3412944bbd5056cb313dd)) -* Speed up ServiceInfo with a cython pxd ([#1264](https://github.com/python-zeroconf/python-zeroconf/issues/1264)) ([`7ca690a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ca690ac3fa75e7474d3412944bbd5056cb313dd)) ## v0.104.0 (2023-09-10) -### Feature +### Features + +* feat: speed up generating answers (#1262) ([`50a8f06`](https://github.com/python-zeroconf/python-zeroconf/commit/50a8f066b6ab90bc9e3300f81cf9332550b720df)) -* Speed up generating answers ([#1262](https://github.com/python-zeroconf/python-zeroconf/issues/1262)) ([`50a8f06`](https://github.com/python-zeroconf/python-zeroconf/commit/50a8f066b6ab90bc9e3300f81cf9332550b720df)) ## v0.103.0 (2023-09-09) -### Feature +### Features + +* feat: avoid calling get_running_loop when resolving ServiceInfo (#1261) ([`33a2714`](https://github.com/python-zeroconf/python-zeroconf/commit/33a2714cadff96edf016b869cc63b0661d16ef2c)) -* Avoid calling get_running_loop when resolving ServiceInfo ([#1261](https://github.com/python-zeroconf/python-zeroconf/issues/1261)) ([`33a2714`](https://github.com/python-zeroconf/python-zeroconf/commit/33a2714cadff96edf016b869cc63b0661d16ef2c)) ## v0.102.0 (2023-09-07) -### Feature +### Features + +* feat: significantly speed up writing outgoing dns records (#1260) ([`bf2f366`](https://github.com/python-zeroconf/python-zeroconf/commit/bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077)) -* Significantly speed up writing outgoing dns records ([#1260](https://github.com/python-zeroconf/python-zeroconf/issues/1260)) ([`bf2f366`](https://github.com/python-zeroconf/python-zeroconf/commit/bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077)) ## v0.101.0 (2023-09-07) -### Feature +### Features + +* feat: speed up writing outgoing dns records (#1259) ([`248655f`](https://github.com/python-zeroconf/python-zeroconf/commit/248655f0276223b089373c70ec13a0385dfaa4d6)) -* Speed up writing outgoing dns records ([#1259](https://github.com/python-zeroconf/python-zeroconf/issues/1259)) ([`248655f`](https://github.com/python-zeroconf/python-zeroconf/commit/248655f0276223b089373c70ec13a0385dfaa4d6)) ## v0.100.0 (2023-09-07) -### Feature +### Features + +* feat: small speed up to writing outgoing dns records (#1258) ([`1ed6bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/1ed6bd2ec4db0612b71384f923ffff1efd3ce878)) -* Small speed up to writing outgoing dns records ([#1258](https://github.com/python-zeroconf/python-zeroconf/issues/1258)) ([`1ed6bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/1ed6bd2ec4db0612b71384f923ffff1efd3ce878)) ## v0.99.0 (2023-09-06) -### Feature +### Features + +* feat: reduce IP Address parsing overhead in ServiceInfo (#1257) ([`83d0b7f`](https://github.com/python-zeroconf/python-zeroconf/commit/83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb)) -* Reduce IP Address parsing overhead in ServiceInfo ([#1257](https://github.com/python-zeroconf/python-zeroconf/issues/1257)) ([`83d0b7f`](https://github.com/python-zeroconf/python-zeroconf/commit/83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb)) ## v0.98.0 (2023-09-06) -### Feature +### Features + +* feat: speed up decoding incoming packets (#1256) ([`ac081cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac081cf00addde1ceea2c076f73905fdb293de3a)) -* Speed up decoding incoming packets ([#1256](https://github.com/python-zeroconf/python-zeroconf/issues/1256)) ([`ac081cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac081cf00addde1ceea2c076f73905fdb293de3a)) ## v0.97.0 (2023-09-03) -### Feature +### Features + +* feat: speed up answering queries (#1255) ([`2d3aed3`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3aed36e24c73013fcf4acc90803fc1737d0917)) -* Speed up answering queries ([#1255](https://github.com/python-zeroconf/python-zeroconf/issues/1255)) ([`2d3aed3`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3aed36e24c73013fcf4acc90803fc1737d0917)) ## v0.96.0 (2023-09-03) -### Feature +### Features + +* feat: optimize DNSCache.get_by_details (#1254) + +* feat: optimize DNSCache.get_by_details + +This is one of the most called functions since ServiceInfo.load_from_cache calls +it + +* fix: make get_all_by_details thread-safe + +* fix: remove unneeded key checks ([`ce59787`](https://github.com/python-zeroconf/python-zeroconf/commit/ce59787a170781ffdaa22425018d288b395ac081)) -* Optimize DNSCache.get_by_details ([#1254](https://github.com/python-zeroconf/python-zeroconf/issues/1254)) ([`ce59787`](https://github.com/python-zeroconf/python-zeroconf/commit/ce59787a170781ffdaa22425018d288b395ac081)) ## v0.95.0 (2023-09-03) -### Feature +### Features + +* feat: speed up adding and removing RecordUpdateListeners (#1253) ([`22e4a29`](https://github.com/python-zeroconf/python-zeroconf/commit/22e4a296d440b3038c0ff5ed6fc8878304ec4937)) -* Speed up adding and removing RecordUpdateListeners ([#1253](https://github.com/python-zeroconf/python-zeroconf/issues/1253)) ([`22e4a29`](https://github.com/python-zeroconf/python-zeroconf/commit/22e4a296d440b3038c0ff5ed6fc8878304ec4937)) ## v0.94.0 (2023-09-03) -### Feature +### Features + +* feat: optimize cache implementation (#1252) ([`8d3ec79`](https://github.com/python-zeroconf/python-zeroconf/commit/8d3ec792277aaf7ef790318b5b35ab00839ca3b3)) -* Optimize cache implementation ([#1252](https://github.com/python-zeroconf/python-zeroconf/issues/1252)) ([`8d3ec79`](https://github.com/python-zeroconf/python-zeroconf/commit/8d3ec792277aaf7ef790318b5b35ab00839ca3b3)) ## v0.93.1 (2023-09-03) -### Fix +### Bug Fixes + +* fix: no change re-release due to unrecoverable failed CI run (#1251) ([`730921b`](https://github.com/python-zeroconf/python-zeroconf/commit/730921b155dfb9c62251c8c643b1302e807aff3b)) -* No change re-release due to unrecoverable failed CI run ([#1251](https://github.com/python-zeroconf/python-zeroconf/issues/1251)) ([`730921b`](https://github.com/python-zeroconf/python-zeroconf/commit/730921b155dfb9c62251c8c643b1302e807aff3b)) ## v0.93.0 (2023-09-02) -### Feature +### Features + +* feat: reduce overhead to answer questions (#1250) ([`7cb8da0`](https://github.com/python-zeroconf/python-zeroconf/commit/7cb8da0c6c5c944588009fe36012c1197c422668)) -* Reduce overhead to answer questions ([#1250](https://github.com/python-zeroconf/python-zeroconf/issues/1250)) ([`7cb8da0`](https://github.com/python-zeroconf/python-zeroconf/commit/7cb8da0c6c5c944588009fe36012c1197c422668)) ## v0.92.0 (2023-09-02) -### Feature +### Features + +* feat: cache construction of records used to answer queries from the service registry (#1243) ([`0890f62`](https://github.com/python-zeroconf/python-zeroconf/commit/0890f628dbbd577fb77d3e6f2e267052b2b2b515)) -* Cache construction of records used to answer queries from the service registry ([#1243](https://github.com/python-zeroconf/python-zeroconf/issues/1243)) ([`0890f62`](https://github.com/python-zeroconf/python-zeroconf/commit/0890f628dbbd577fb77d3e6f2e267052b2b2b515)) ## v0.91.1 (2023-09-02) -### Fix +### Bug Fixes + +* fix: remove useless calls in ServiceInfo (#1248) ([`4e40fae`](https://github.com/python-zeroconf/python-zeroconf/commit/4e40fae20bf50b4608e28fad4a360c4ed48ac86b)) -* Remove useless calls in ServiceInfo ([#1248](https://github.com/python-zeroconf/python-zeroconf/issues/1248)) ([`4e40fae`](https://github.com/python-zeroconf/python-zeroconf/commit/4e40fae20bf50b4608e28fad4a360c4ed48ac86b)) ## v0.91.0 (2023-09-02) -### Feature +### Features + +* feat: reduce overhead to process incoming updates by avoiding the handle_response shim (#1247) ([`5e31f0a`](https://github.com/python-zeroconf/python-zeroconf/commit/5e31f0afe4c341fbdbbbe50348a829ea553cbda0)) -* Reduce overhead to process incoming updates by avoiding the handle_response shim ([#1247](https://github.com/python-zeroconf/python-zeroconf/issues/1247)) ([`5e31f0a`](https://github.com/python-zeroconf/python-zeroconf/commit/5e31f0afe4c341fbdbbbe50348a829ea553cbda0)) ## v0.90.0 (2023-09-02) -### Feature +### Features + +* feat: avoid python float conversion in listener hot path (#1245) ([`816ad4d`](https://github.com/python-zeroconf/python-zeroconf/commit/816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a)) + +### Refactoring + +* refactor: reduce duplicate code in engine.py (#1246) ([`36ae505`](https://github.com/python-zeroconf/python-zeroconf/commit/36ae505dc9f95b59fdfb632960845a45ba8575b8)) -* Avoid python float conversion in listener hot path ([#1245](https://github.com/python-zeroconf/python-zeroconf/issues/1245)) ([`816ad4d`](https://github.com/python-zeroconf/python-zeroconf/commit/816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a)) ## v0.89.0 (2023-09-02) -### Feature +### Features + +* feat: reduce overhead to process incoming questions (#1244) ([`18b65d1`](https://github.com/python-zeroconf/python-zeroconf/commit/18b65d1c75622869b0c29258215d3db3ae520d6c)) -* Reduce overhead to process incoming questions ([#1244](https://github.com/python-zeroconf/python-zeroconf/issues/1244)) ([`18b65d1`](https://github.com/python-zeroconf/python-zeroconf/commit/18b65d1c75622869b0c29258215d3db3ae520d6c)) ## v0.88.0 (2023-08-29) -### Feature +### Features + +* feat: speed up RecordManager with additional cython defs (#1242) ([`5a76fc5`](https://github.com/python-zeroconf/python-zeroconf/commit/5a76fc5ff74f2941ffbf7570e45390f35e0b7e01)) -* Speed up RecordManager with additional cython defs ([#1242](https://github.com/python-zeroconf/python-zeroconf/issues/1242)) ([`5a76fc5`](https://github.com/python-zeroconf/python-zeroconf/commit/5a76fc5ff74f2941ffbf7570e45390f35e0b7e01)) ## v0.87.0 (2023-08-29) -### Feature +### Features + +* feat: improve performance by adding cython pxd for RecordManager (#1241) ([`a7dad3d`](https://github.com/python-zeroconf/python-zeroconf/commit/a7dad3d9743586f352e21eea1e129c6875f9a713)) -* Improve performance by adding cython pxd for RecordManager ([#1241](https://github.com/python-zeroconf/python-zeroconf/issues/1241)) ([`a7dad3d`](https://github.com/python-zeroconf/python-zeroconf/commit/a7dad3d9743586f352e21eea1e129c6875f9a713)) ## v0.86.0 (2023-08-28) -### Feature +### Features + +* feat: build wheels for cpython 3.12 (#1239) ([`58bc154`](https://github.com/python-zeroconf/python-zeroconf/commit/58bc154f55b06b4ddfc4a141592488abe76f062a)) + +* feat: use server_key when processing DNSService records (#1238) ([`cc8feb1`](https://github.com/python-zeroconf/python-zeroconf/commit/cc8feb110fefc3fb714fd482a52f16e2b620e8c4)) -* Build wheels for cpython 3.12 ([#1239](https://github.com/python-zeroconf/python-zeroconf/issues/1239)) ([`58bc154`](https://github.com/python-zeroconf/python-zeroconf/commit/58bc154f55b06b4ddfc4a141592488abe76f062a)) -* Use server_key when processing DNSService records ([#1238](https://github.com/python-zeroconf/python-zeroconf/issues/1238)) ([`cc8feb1`](https://github.com/python-zeroconf/python-zeroconf/commit/cc8feb110fefc3fb714fd482a52f16e2b620e8c4)) ## v0.85.0 (2023-08-27) -### Feature +### Features + +* feat: simplify code to unpack properties (#1237) ([`68d9998`](https://github.com/python-zeroconf/python-zeroconf/commit/68d99985a0e9d2c72ff670b2e2af92271a6fe934)) -* Simplify code to unpack properties ([#1237](https://github.com/python-zeroconf/python-zeroconf/issues/1237)) ([`68d9998`](https://github.com/python-zeroconf/python-zeroconf/commit/68d99985a0e9d2c72ff670b2e2af92271a6fe934)) ## v0.84.0 (2023-08-27) -### Feature +### Features + +* feat: context managers in ServiceBrowser and AsyncServiceBrowser (#1233) + +Co-authored-by: J. Nick Koston ([`bd8d846`](https://github.com/python-zeroconf/python-zeroconf/commit/bd8d8467dec2a39a0b525043ea1051259100fded)) -* Context managers in ServiceBrowser and AsyncServiceBrowser ([#1233](https://github.com/python-zeroconf/python-zeroconf/issues/1233)) ([`bd8d846`](https://github.com/python-zeroconf/python-zeroconf/commit/bd8d8467dec2a39a0b525043ea1051259100fded)) ## v0.83.1 (2023-08-27) -### Fix +### Bug Fixes + +* fix: rebuild wheels with cython 3.0.2 (#1236) ([`dd637fb`](https://github.com/python-zeroconf/python-zeroconf/commit/dd637fb2e5a87ba283750e69d116e124bef54e7c)) -* Rebuild wheels with cython 3.0.2 ([#1236](https://github.com/python-zeroconf/python-zeroconf/issues/1236)) ([`dd637fb`](https://github.com/python-zeroconf/python-zeroconf/commit/dd637fb2e5a87ba283750e69d116e124bef54e7c)) ## v0.83.0 (2023-08-26) -### Feature +### Features + +* feat: speed up question and answer history with a cython pxd (#1234) ([`703ecb2`](https://github.com/python-zeroconf/python-zeroconf/commit/703ecb2901b2150fb72fac3deed61d7302561298)) -* Speed up question and answer history with a cython pxd ([#1234](https://github.com/python-zeroconf/python-zeroconf/issues/1234)) ([`703ecb2`](https://github.com/python-zeroconf/python-zeroconf/commit/703ecb2901b2150fb72fac3deed61d7302561298)) ## v0.82.1 (2023-08-22) -### Fix +### Bug Fixes + +* fix: build failures with older cython 0.29 series (#1232) ([`30c3ad9`](https://github.com/python-zeroconf/python-zeroconf/commit/30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced)) -* Build failures with older cython 0.29 series ([#1232](https://github.com/python-zeroconf/python-zeroconf/issues/1232)) ([`30c3ad9`](https://github.com/python-zeroconf/python-zeroconf/commit/30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced)) ## v0.82.0 (2023-08-22) -### Feature +### Features + +* feat: optimize processing of records in RecordUpdateListener subclasses (#1231) ([`3e89294`](https://github.com/python-zeroconf/python-zeroconf/commit/3e89294ea0ecee1122e1c1ffdc78925add8ca40e)) -* Optimize processing of records in RecordUpdateListener subclasses ([#1231](https://github.com/python-zeroconf/python-zeroconf/issues/1231)) ([`3e89294`](https://github.com/python-zeroconf/python-zeroconf/commit/3e89294ea0ecee1122e1c1ffdc78925add8ca40e)) ## v0.81.0 (2023-08-22) -### Feature +### Features + +* feat: speed up the service registry with a cython pxd (#1226) ([`47d3c7a`](https://github.com/python-zeroconf/python-zeroconf/commit/47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e)) + +* feat: optimizing sending answers to questions (#1227) ([`cd7b56b`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7b56b2aa0c8ee429da430e9a36abd515512011)) -* Speed up the service registry with a cython pxd ([#1226](https://github.com/python-zeroconf/python-zeroconf/issues/1226)) ([`47d3c7a`](https://github.com/python-zeroconf/python-zeroconf/commit/47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e)) -* Optimizing sending answers to questions ([#1227](https://github.com/python-zeroconf/python-zeroconf/issues/1227)) ([`cd7b56b`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7b56b2aa0c8ee429da430e9a36abd515512011)) ## v0.80.0 (2023-08-15) -### Feature +### Features + +* feat: optimize unpacking properties in ServiceInfo (#1225) ([`1492e41`](https://github.com/python-zeroconf/python-zeroconf/commit/1492e41b3d5cba5598cc9dd6bd2bc7d238f13555)) -* Optimize unpacking properties in ServiceInfo ([#1225](https://github.com/python-zeroconf/python-zeroconf/issues/1225)) ([`1492e41`](https://github.com/python-zeroconf/python-zeroconf/commit/1492e41b3d5cba5598cc9dd6bd2bc7d238f13555)) ## v0.79.0 (2023-08-14) -### Feature +### Features + +* feat: refactor notify implementation to reduce overhead of adding and removing listeners (#1224) ([`ceb92cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb92cfe42d885dbb38cee7aaeebf685d97627a9)) -* Refactor notify implementation to reduce overhead of adding and removing listeners ([#1224](https://github.com/python-zeroconf/python-zeroconf/issues/1224)) ([`ceb92cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb92cfe42d885dbb38cee7aaeebf685d97627a9)) ## v0.78.0 (2023-08-14) -### Feature +### Features + +* feat: add cython pxd file for _listener.py to improve incoming message processing performance (#1221) ([`f459856`](https://github.com/python-zeroconf/python-zeroconf/commit/f459856a0a61b8afa8a541926d7e15d51f8e4aea)) -* Add cython pxd file for _listener.py to improve incoming message processing performance ([#1221](https://github.com/python-zeroconf/python-zeroconf/issues/1221)) ([`f459856`](https://github.com/python-zeroconf/python-zeroconf/commit/f459856a0a61b8afa8a541926d7e15d51f8e4aea)) ## v0.77.0 (2023-08-14) -### Feature +### Features + +* feat: cythonize _listener.py to improve incoming message processing performance (#1220) ([`9efde8c`](https://github.com/python-zeroconf/python-zeroconf/commit/9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9)) -* Cythonize _listener.py to improve incoming message processing performance ([#1220](https://github.com/python-zeroconf/python-zeroconf/issues/1220)) ([`9efde8c`](https://github.com/python-zeroconf/python-zeroconf/commit/9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9)) ## v0.76.0 (2023-08-14) -### Feature +### Features + +* feat: improve performance responding to queries (#1217) ([`69b33be`](https://github.com/python-zeroconf/python-zeroconf/commit/69b33be3b2f9d4a27ef5154cae94afca048efffa)) -* Improve performance responding to queries ([#1217](https://github.com/python-zeroconf/python-zeroconf/issues/1217)) ([`69b33be`](https://github.com/python-zeroconf/python-zeroconf/commit/69b33be3b2f9d4a27ef5154cae94afca048efffa)) ## v0.75.0 (2023-08-13) -### Feature +### Features + +* feat: expose flag to disable strict name checking in service registration (#1215) ([`5df8a57`](https://github.com/python-zeroconf/python-zeroconf/commit/5df8a57a14d59687a3c22ea8ee063e265031e278)) + +* feat: speed up processing incoming records (#1216) ([`aff625d`](https://github.com/python-zeroconf/python-zeroconf/commit/aff625dc6a5e816dad519644c4adac4f96980c04)) -* Expose flag to disable strict name checking in service registration ([#1215](https://github.com/python-zeroconf/python-zeroconf/issues/1215)) ([`5df8a57`](https://github.com/python-zeroconf/python-zeroconf/commit/5df8a57a14d59687a3c22ea8ee063e265031e278)) -* Speed up processing incoming records ([#1216](https://github.com/python-zeroconf/python-zeroconf/issues/1216)) ([`aff625d`](https://github.com/python-zeroconf/python-zeroconf/commit/aff625dc6a5e816dad519644c4adac4f96980c04)) ## v0.74.0 (2023-08-04) -### Feature +### Bug Fixes + +* fix: remove typing on reset_ttl for cython compat (#1213) ([`0094e26`](https://github.com/python-zeroconf/python-zeroconf/commit/0094e2684344c6b7edd7948924f093f1b4c19901)) -* Speed up unpacking text records in ServiceInfo ([#1212](https://github.com/python-zeroconf/python-zeroconf/issues/1212)) ([`99a6f98`](https://github.com/python-zeroconf/python-zeroconf/commit/99a6f98e44a1287ba537eabb852b1b69923402f0)) +### Features -### Fix +* feat: speed up unpacking text records in ServiceInfo (#1212) ([`99a6f98`](https://github.com/python-zeroconf/python-zeroconf/commit/99a6f98e44a1287ba537eabb852b1b69923402f0)) -* Remove typing on reset_ttl for cython compat ([#1213](https://github.com/python-zeroconf/python-zeroconf/issues/1213)) ([`0094e26`](https://github.com/python-zeroconf/python-zeroconf/commit/0094e2684344c6b7edd7948924f093f1b4c19901)) ## v0.73.0 (2023-08-03) -### Feature +### Features + +* feat: add a cache to service_type_name (#1211) ([`53a694f`](https://github.com/python-zeroconf/python-zeroconf/commit/53a694f60e675ae0560e727be6b721b401c2b68f)) -* Add a cache to service_type_name ([#1211](https://github.com/python-zeroconf/python-zeroconf/issues/1211)) ([`53a694f`](https://github.com/python-zeroconf/python-zeroconf/commit/53a694f60e675ae0560e727be6b721b401c2b68f)) ## v0.72.3 (2023-08-03) -### Fix +### Bug Fixes + +* fix: revert adding typing to DNSRecord.suppressed_by (#1210) ([`3dba5ae`](https://github.com/python-zeroconf/python-zeroconf/commit/3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a)) -* Revert adding typing to DNSRecord.suppressed_by ([#1210](https://github.com/python-zeroconf/python-zeroconf/issues/1210)) ([`3dba5ae`](https://github.com/python-zeroconf/python-zeroconf/commit/3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a)) ## v0.72.2 (2023-08-03) -### Fix +### Bug Fixes + +* fix: revert DNSIncoming cimport in _dns.pxd (#1209) ([`5f14b6d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1)) -* Revert DNSIncoming cimport in _dns.pxd ([#1209](https://github.com/python-zeroconf/python-zeroconf/issues/1209)) ([`5f14b6d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1)) ## v0.72.1 (2023-08-03) -### Fix +### Bug Fixes + +* fix: race with InvalidStateError when async_request times out (#1208) ([`2233b6b`](https://github.com/python-zeroconf/python-zeroconf/commit/2233b6bc4ceeee5524d2ee88ecae8234173feb5f)) -* Race with InvalidStateError when async_request times out ([#1208](https://github.com/python-zeroconf/python-zeroconf/issues/1208)) ([`2233b6b`](https://github.com/python-zeroconf/python-zeroconf/commit/2233b6bc4ceeee5524d2ee88ecae8234173feb5f)) ## v0.72.0 (2023-08-02) -### Feature +### Features + +* feat: speed up processing incoming records (#1206) ([`126849c`](https://github.com/python-zeroconf/python-zeroconf/commit/126849c92be8cec9253fba9faa591029d992fcc3)) -* Speed up processing incoming records ([#1206](https://github.com/python-zeroconf/python-zeroconf/issues/1206)) ([`126849c`](https://github.com/python-zeroconf/python-zeroconf/commit/126849c92be8cec9253fba9faa591029d992fcc3)) ## v0.71.5 (2023-08-02) -### Fix +### Bug Fixes + +* fix: improve performance of ServiceInfo.async_request (#1205) ([`8019a73`](https://github.com/python-zeroconf/python-zeroconf/commit/8019a73c952f2fc4c88d849aab970fafedb316d8)) -* Improve performance of ServiceInfo.async_request ([#1205](https://github.com/python-zeroconf/python-zeroconf/issues/1205)) ([`8019a73`](https://github.com/python-zeroconf/python-zeroconf/commit/8019a73c952f2fc4c88d849aab970fafedb316d8)) ## v0.71.4 (2023-07-24) -### Fix +### Bug Fixes + +* fix: cleanup naming from previous refactoring in ServiceInfo (#1202) ([`b272d75`](https://github.com/python-zeroconf/python-zeroconf/commit/b272d75abd982f3be1f4b20f683cac38011cc6f4)) -* Cleanup naming from previous refactoring in ServiceInfo ([#1202](https://github.com/python-zeroconf/python-zeroconf/issues/1202)) ([`b272d75`](https://github.com/python-zeroconf/python-zeroconf/commit/b272d75abd982f3be1f4b20f683cac38011cc6f4)) ## v0.71.3 (2023-07-23) -### Fix +### Bug Fixes + +* fix: pin python-semantic-release to fix release process (#1200) ([`c145a23`](https://github.com/python-zeroconf/python-zeroconf/commit/c145a238d768aa17c3aebe120c20a46bfbec6b99)) -* Pin python-semantic-release to fix release process ([#1200](https://github.com/python-zeroconf/python-zeroconf/issues/1200)) ([`c145a23`](https://github.com/python-zeroconf/python-zeroconf/commit/c145a238d768aa17c3aebe120c20a46bfbec6b99)) ## v0.71.2 (2023-07-23) -### Fix +### Bug Fixes + +* fix: no change re-release to fix wheel builds (#1199) ([`8c3a4c8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c3a4c80c221bea7401c12e1c6a525e75b7ffea2)) -* No change re-release to fix wheel builds ([#1199](https://github.com/python-zeroconf/python-zeroconf/issues/1199)) ([`8c3a4c8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c3a4c80c221bea7401c12e1c6a525e75b7ffea2)) ## v0.71.1 (2023-07-23) -### Fix +### Bug Fixes + +* fix: add missing if TYPE_CHECKING guard to generate_service_query (#1198) ([`ac53adf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac53adf7e71db14c1a0f9adbfd1d74033df36898)) -* Add missing if TYPE_CHECKING guard to generate_service_query ([#1198](https://github.com/python-zeroconf/python-zeroconf/issues/1198)) ([`ac53adf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac53adf7e71db14c1a0f9adbfd1d74033df36898)) ## v0.71.0 (2023-07-08) -### Feature +### Features + +* feat: improve incoming data processing performance (#1194) ([`a56c776`](https://github.com/python-zeroconf/python-zeroconf/commit/a56c776008ef86f99db78f5997e45a57551be725)) -* Improve incoming data processing performance ([#1194](https://github.com/python-zeroconf/python-zeroconf/issues/1194)) ([`a56c776`](https://github.com/python-zeroconf/python-zeroconf/commit/a56c776008ef86f99db78f5997e45a57551be725)) ## v0.70.0 (2023-07-02) -### Feature +### Features + +* feat: add support for sending to a specific `addr` and `port` with `ServiceInfo.async_request` and `ServiceInfo.request` (#1192) ([`405f547`](https://github.com/python-zeroconf/python-zeroconf/commit/405f54762d3f61e97de9c1787e837e953de31412)) -* Add support for sending to a specific `addr` and `port` with `ServiceInfo.async_request` and `ServiceInfo.request` ([#1192](https://github.com/python-zeroconf/python-zeroconf/issues/1192)) ([`405f547`](https://github.com/python-zeroconf/python-zeroconf/commit/405f54762d3f61e97de9c1787e837e953de31412)) ## v0.69.0 (2023-06-18) -### Feature +### Features + +* feat: cython3 support (#1190) ([`8ae8ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/8ae8ba1af324b0c8c2da3bd12c264a5c0f3dcc3d)) + +* feat: reorder incoming data handler to reduce overhead (#1189) ([`32756ff`](https://github.com/python-zeroconf/python-zeroconf/commit/32756ff113f675b7a9cf16d3c0ab840ba733e5e4)) -* Cython3 support ([#1190](https://github.com/python-zeroconf/python-zeroconf/issues/1190)) ([`8ae8ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/8ae8ba1af324b0c8c2da3bd12c264a5c0f3dcc3d)) -* Reorder incoming data handler to reduce overhead ([#1189](https://github.com/python-zeroconf/python-zeroconf/issues/1189)) ([`32756ff`](https://github.com/python-zeroconf/python-zeroconf/commit/32756ff113f675b7a9cf16d3c0ab840ba733e5e4)) ## v0.68.1 (2023-06-18) -### Fix +### Bug Fixes + +* fix: reduce debug logging overhead by adding missing checks to datagram_received (#1188) ([`ac5c50a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5c50afc70aaa33fcd20bf02222ff4f0c596fa3)) -* Reduce debug logging overhead by adding missing checks to datagram_received ([#1188](https://github.com/python-zeroconf/python-zeroconf/issues/1188)) ([`ac5c50a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5c50afc70aaa33fcd20bf02222ff4f0c596fa3)) ## v0.68.0 (2023-06-17) -### Feature +### Features + +* feat: reduce overhead to handle queries and responses (#1184) + +- adds slots to handler classes + +- avoid any expression overhead and inline instead ([`81126b7`](https://github.com/python-zeroconf/python-zeroconf/commit/81126b7600f94848ef8c58b70bac0c6ab993c6ae)) -* Reduce overhead to handle queries and responses ([#1184](https://github.com/python-zeroconf/python-zeroconf/issues/1184)) ([`81126b7`](https://github.com/python-zeroconf/python-zeroconf/commit/81126b7600f94848ef8c58b70bac0c6ab993c6ae)) ## v0.67.0 (2023-06-17) -### Feature +### Features + +* feat: speed up answering incoming questions (#1186) ([`8f37665`](https://github.com/python-zeroconf/python-zeroconf/commit/8f376658d2a3bef0353646e6fddfda15626b73a9)) -* Speed up answering incoming questions ([#1186](https://github.com/python-zeroconf/python-zeroconf/issues/1186)) ([`8f37665`](https://github.com/python-zeroconf/python-zeroconf/commit/8f376658d2a3bef0353646e6fddfda15626b73a9)) ## v0.66.0 (2023-06-13) -### Feature -* Optimize construction of outgoing dns records ([#1182](https://github.com/python-zeroconf/python-zeroconf/issues/1182)) ([`fc0341f`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0341f281cdb71428c0f1cf90c12d34cbb4acae)) + +### Features + +* feat: optimize construction of outgoing dns records (#1182) ([`fc0341f`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0341f281cdb71428c0f1cf90c12d34cbb4acae)) + ## v0.65.0 (2023-06-13) -### Feature -* Reduce overhead to enumerate ip addresses in ServiceInfo ([#1181](https://github.com/python-zeroconf/python-zeroconf/issues/1181)) ([`6a85cbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6a85cbf2b872cb0abd184c2dd728d9ae3eb8115c)) + +### Features + +* feat: reduce overhead to enumerate ip addresses in ServiceInfo (#1181) ([`6a85cbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6a85cbf2b872cb0abd184c2dd728d9ae3eb8115c)) + ## v0.64.1 (2023-06-05) -### Fix -* Small internal typing cleanups ([#1180](https://github.com/python-zeroconf/python-zeroconf/issues/1180)) ([`f03e511`](https://github.com/python-zeroconf/python-zeroconf/commit/f03e511f7aae72c5ccd4f7514d89e168847bd7a2)) + +### Bug Fixes + +* fix: small internal typing cleanups (#1180) ([`f03e511`](https://github.com/python-zeroconf/python-zeroconf/commit/f03e511f7aae72c5ccd4f7514d89e168847bd7a2)) + ## v0.64.0 (2023-06-05) -### Feature -* Speed up processing incoming records ([#1179](https://github.com/python-zeroconf/python-zeroconf/issues/1179)) ([`d919316`](https://github.com/python-zeroconf/python-zeroconf/commit/d9193160b05beeca3755e19fd377ba13fe37b071)) -### Fix -* Always answer QU questions when the exact same packet is received from different sources in sequence ([#1178](https://github.com/python-zeroconf/python-zeroconf/issues/1178)) ([`74d7ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/74d7ba1aeeae56be087ee8142ee6ca1219744baa)) +### Bug Fixes + +* fix: always answer QU questions when the exact same packet is received from different sources in sequence (#1178) + +If the exact same packet with a QU question is asked from two different sources in a 1s window we end up ignoring the second one as a duplicate. We should still respond in this case because the client wants a unicast response and the question may not be answered by the previous packet since the response may not be multicast. + +fix: include NSEC records in initial broadcast when registering a new service + +This also revealed that we do not send NSEC records in the initial broadcast. This needed to be fixed in this PR as well for everything to work as expected since all the tests would fail with 2 updates otherwise. ([`74d7ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/74d7ba1aeeae56be087ee8142ee6ca1219744baa)) + +### Features + +* feat: speed up processing incoming records (#1179) ([`d919316`](https://github.com/python-zeroconf/python-zeroconf/commit/d9193160b05beeca3755e19fd377ba13fe37b071)) + ## v0.63.0 (2023-05-25) -### Feature -* Small speed up to fetch dns addresses from ServiceInfo ([#1176](https://github.com/python-zeroconf/python-zeroconf/issues/1176)) ([`4deaa6e`](https://github.com/python-zeroconf/python-zeroconf/commit/4deaa6ed7c9161db55bf16ec068ab7260bbd4976)) -* Speed up the service registry ([#1174](https://github.com/python-zeroconf/python-zeroconf/issues/1174)) ([`360ceb2`](https://github.com/python-zeroconf/python-zeroconf/commit/360ceb2548c4c4974ff798aac43a6fff9803ea0e)) -* Improve dns cache performance ([#1172](https://github.com/python-zeroconf/python-zeroconf/issues/1172)) ([`bb496a1`](https://github.com/python-zeroconf/python-zeroconf/commit/bb496a1dd5fa3562c0412cb064d14639a542592e)) + +### Features + +* feat: small speed up to fetch dns addresses from ServiceInfo (#1176) ([`4deaa6e`](https://github.com/python-zeroconf/python-zeroconf/commit/4deaa6ed7c9161db55bf16ec068ab7260bbd4976)) + +* feat: speed up the service registry (#1174) ([`360ceb2`](https://github.com/python-zeroconf/python-zeroconf/commit/360ceb2548c4c4974ff798aac43a6fff9803ea0e)) + +* feat: improve dns cache performance (#1172) ([`bb496a1`](https://github.com/python-zeroconf/python-zeroconf/commit/bb496a1dd5fa3562c0412cb064d14639a542592e)) + ## v0.62.0 (2023-05-04) -### Feature -* Improve performance of ServiceBrowser outgoing query scheduler ([#1170](https://github.com/python-zeroconf/python-zeroconf/issues/1170)) ([`963d022`](https://github.com/python-zeroconf/python-zeroconf/commit/963d022ef82b615540fa7521d164a98a6c6f5209)) + +### Features + +* feat: improve performance of ServiceBrowser outgoing query scheduler (#1170) ([`963d022`](https://github.com/python-zeroconf/python-zeroconf/commit/963d022ef82b615540fa7521d164a98a6c6f5209)) + ## v0.61.0 (2023-05-03) -### Feature -* Speed up parsing NSEC records ([#1169](https://github.com/python-zeroconf/python-zeroconf/issues/1169)) ([`06fa94d`](https://github.com/python-zeroconf/python-zeroconf/commit/06fa94d87b4f0451cb475a921ce1d8e9562e0f26)) + +### Features + +* feat: speed up parsing NSEC records (#1169) ([`06fa94d`](https://github.com/python-zeroconf/python-zeroconf/commit/06fa94d87b4f0451cb475a921ce1d8e9562e0f26)) + ## v0.60.0 (2023-05-01) -### Feature -* Speed up processing incoming data ([#1167](https://github.com/python-zeroconf/python-zeroconf/issues/1167)) ([`fbaaf7b`](https://github.com/python-zeroconf/python-zeroconf/commit/fbaaf7bb6ff985bdabb85feb6cba144f12d4f1d6)) + +### Features + +* feat: speed up processing incoming data (#1167) ([`fbaaf7b`](https://github.com/python-zeroconf/python-zeroconf/commit/fbaaf7bb6ff985bdabb85feb6cba144f12d4f1d6)) + ## v0.59.0 (2023-05-01) -### Feature -* Speed up decoding dns questions when processing incoming data ([#1168](https://github.com/python-zeroconf/python-zeroconf/issues/1168)) ([`f927190`](https://github.com/python-zeroconf/python-zeroconf/commit/f927190cb24f70fd7c825c6e12151fcc0daf3973)) + +### Features + +* feat: speed up decoding dns questions when processing incoming data (#1168) ([`f927190`](https://github.com/python-zeroconf/python-zeroconf/commit/f927190cb24f70fd7c825c6e12151fcc0daf3973)) + ## v0.58.2 (2023-04-26) -### Fix -* Re-release to rebuild failed wheels ([#1165](https://github.com/python-zeroconf/python-zeroconf/issues/1165)) ([`4986271`](https://github.com/python-zeroconf/python-zeroconf/commit/498627166a4976f1d9d8cd1f3654b0d50272d266)) + +### Bug Fixes + +* fix: re-release to rebuild failed wheels (#1165) ([`4986271`](https://github.com/python-zeroconf/python-zeroconf/commit/498627166a4976f1d9d8cd1f3654b0d50272d266)) + ## v0.58.1 (2023-04-26) -### Fix -* Reduce cast calls in service browser ([#1164](https://github.com/python-zeroconf/python-zeroconf/issues/1164)) ([`c0d65ae`](https://github.com/python-zeroconf/python-zeroconf/commit/c0d65aeae7037a18ed1149336f5e7bdb8b2dd8cf)) + +### Bug Fixes + +* fix: reduce cast calls in service browser (#1164) ([`c0d65ae`](https://github.com/python-zeroconf/python-zeroconf/commit/c0d65aeae7037a18ed1149336f5e7bdb8b2dd8cf)) + ## v0.58.0 (2023-04-23) -### Feature -* Speed up incoming parser ([#1163](https://github.com/python-zeroconf/python-zeroconf/issues/1163)) ([`4626399`](https://github.com/python-zeroconf/python-zeroconf/commit/46263999c0c7ea5176885f1eadd2c8498834b70e)) + +### Features + +* feat: speed up incoming parser (#1163) ([`4626399`](https://github.com/python-zeroconf/python-zeroconf/commit/46263999c0c7ea5176885f1eadd2c8498834b70e)) + ## v0.57.0 (2023-04-23) -### Feature -* Speed up incoming data parser ([#1161](https://github.com/python-zeroconf/python-zeroconf/issues/1161)) ([`cb4c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/cb4c3b2b80ca3b88b8de6e87062a45e03e8805a6)) + +### Features + +* feat: speed up incoming data parser (#1161) ([`cb4c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/cb4c3b2b80ca3b88b8de6e87062a45e03e8805a6)) + ## v0.56.0 (2023-04-07) -### Feature -* Reduce denial of service protection overhead ([#1157](https://github.com/python-zeroconf/python-zeroconf/issues/1157)) ([`2c2f26a`](https://github.com/python-zeroconf/python-zeroconf/commit/2c2f26a87d0aac81a77205b06bc9ba499caa2321)) + +### Features + +* feat: reduce denial of service protection overhead (#1157) ([`2c2f26a`](https://github.com/python-zeroconf/python-zeroconf/commit/2c2f26a87d0aac81a77205b06bc9ba499caa2321)) + ## v0.55.0 (2023-04-07) -### Feature -* Improve performance of processing incoming records ([#1155](https://github.com/python-zeroconf/python-zeroconf/issues/1155)) ([`b65e279`](https://github.com/python-zeroconf/python-zeroconf/commit/b65e2792751c44e0fafe9ad3a55dadc5d8ee9d46)) + +### Features + +* feat: improve performance of processing incoming records (#1155) ([`b65e279`](https://github.com/python-zeroconf/python-zeroconf/commit/b65e2792751c44e0fafe9ad3a55dadc5d8ee9d46)) + ## v0.54.0 (2023-04-03) -### Feature -* Avoid waking async_request when record updates are not relevant ([#1153](https://github.com/python-zeroconf/python-zeroconf/issues/1153)) ([`a3f970c`](https://github.com/python-zeroconf/python-zeroconf/commit/a3f970c7f66067cf2c302c49ed6ad8286f19b679)) + +### Features + +* feat: avoid waking async_request when record updates are not relevant (#1153) ([`a3f970c`](https://github.com/python-zeroconf/python-zeroconf/commit/a3f970c7f66067cf2c302c49ed6ad8286f19b679)) + ## v0.53.1 (2023-04-03) -### Fix -* Addresses incorrect after server name change ([#1154](https://github.com/python-zeroconf/python-zeroconf/issues/1154)) ([`41ea06a`](https://github.com/python-zeroconf/python-zeroconf/commit/41ea06a0192c0d186e678009285759eb37d880d5)) + +### Bug Fixes + +* fix: addresses incorrect after server name change (#1154) ([`41ea06a`](https://github.com/python-zeroconf/python-zeroconf/commit/41ea06a0192c0d186e678009285759eb37d880d5)) + ## v0.53.0 (2023-04-02) -### Feature -* Improve ServiceBrowser performance by removing OrderedDict ([#1148](https://github.com/python-zeroconf/python-zeroconf/issues/1148)) ([`9a16be5`](https://github.com/python-zeroconf/python-zeroconf/commit/9a16be56a9f69a5d0f7cde13dc1337b6d93c1433)) -### Fix -* Make parsed_scoped_addresses return addresses in the same order as all other methods ([#1150](https://github.com/python-zeroconf/python-zeroconf/issues/1150)) ([`9b6adcf`](https://github.com/python-zeroconf/python-zeroconf/commit/9b6adcf5c04a469632ee866c32f5898c5cbf810a)) +### Bug Fixes + +* fix: make parsed_scoped_addresses return addresses in the same order as all other methods (#1150) ([`9b6adcf`](https://github.com/python-zeroconf/python-zeroconf/commit/9b6adcf5c04a469632ee866c32f5898c5cbf810a)) + +### Features + +* feat: improve ServiceBrowser performance by removing OrderedDict (#1148) ([`9a16be5`](https://github.com/python-zeroconf/python-zeroconf/commit/9a16be56a9f69a5d0f7cde13dc1337b6d93c1433)) -### Technically breaking change -* IP Addresses returned from `ServiceInfo.parsed_addresses` are now stringified using the python `ipaddress` library which may format them differently than `socket.inet_ntop` depending on the operating system. It is recommended to use `ServiceInfo.ip_addresses_by_version` instead going forward as it offers a stronger guarantee since it returns `ipaddress` objects. ## v0.52.0 (2023-04-02) -### Feature -* Small cleanups to cache cleanup interval ([#1146](https://github.com/python-zeroconf/python-zeroconf/issues/1146)) ([`b434b60`](https://github.com/python-zeroconf/python-zeroconf/commit/b434b60f14ebe8f114b7b19bb4f54081c8ae0173)) -* Add ip_addresses_by_version to ServiceInfo ([#1145](https://github.com/python-zeroconf/python-zeroconf/issues/1145)) ([`524494e`](https://github.com/python-zeroconf/python-zeroconf/commit/524494edd49bd049726b19ae8ac8f6eea69a3943)) -* Speed up processing records in the ServiceBrowser ([#1143](https://github.com/python-zeroconf/python-zeroconf/issues/1143)) ([`6a327d0`](https://github.com/python-zeroconf/python-zeroconf/commit/6a327d00ffb81de55b7c5b599893c789996680c1)) -* Speed up matching types in the ServiceBrowser ([#1144](https://github.com/python-zeroconf/python-zeroconf/issues/1144)) ([`68871c3`](https://github.com/python-zeroconf/python-zeroconf/commit/68871c3b5569e41740a66b7d3d7fa5cc41514ea5)) -* Include tests and docs in sdist archives ([#1142](https://github.com/python-zeroconf/python-zeroconf/issues/1142)) ([`da10a3b`](https://github.com/python-zeroconf/python-zeroconf/commit/da10a3b2827cee0719d3bb9152ae897f061c6e2e)) + +### Features + +* feat: small cleanups to cache cleanup interval (#1146) ([`b434b60`](https://github.com/python-zeroconf/python-zeroconf/commit/b434b60f14ebe8f114b7b19bb4f54081c8ae0173)) + +* feat: add ip_addresses_by_version to ServiceInfo (#1145) ([`524494e`](https://github.com/python-zeroconf/python-zeroconf/commit/524494edd49bd049726b19ae8ac8f6eea69a3943)) + +* feat: speed up processing records in the ServiceBrowser (#1143) ([`6a327d0`](https://github.com/python-zeroconf/python-zeroconf/commit/6a327d00ffb81de55b7c5b599893c789996680c1)) + +* feat: speed up matching types in the ServiceBrowser (#1144) ([`68871c3`](https://github.com/python-zeroconf/python-zeroconf/commit/68871c3b5569e41740a66b7d3d7fa5cc41514ea5)) + +* feat: include tests and docs in sdist archives (#1142) + +feat: Include tests and docs in sdist archives + +Include documentation and test files in source distributions, in order +to make them more useful for packagers (Linux distributions, Conda). +Testing is an important part of packaging process, and at least Gentoo +users have requested offline documentation for Python packages. +Furthermore, the COPYING file was missing from sdist, even though it was +referenced in README. ([`da10a3b`](https://github.com/python-zeroconf/python-zeroconf/commit/da10a3b2827cee0719d3bb9152ae897f061c6e2e)) + ## v0.51.0 (2023-04-01) -### Feature -* Improve performance of constructing ServiceInfo ([#1141](https://github.com/python-zeroconf/python-zeroconf/issues/1141)) ([`36d5b45`](https://github.com/python-zeroconf/python-zeroconf/commit/36d5b45a4ece1dca902e9c3c79b5a63b8d9ae41f)) + +### Features + +* feat: improve performance of constructing ServiceInfo (#1141) ([`36d5b45`](https://github.com/python-zeroconf/python-zeroconf/commit/36d5b45a4ece1dca902e9c3c79b5a63b8d9ae41f)) + ## v0.50.0 (2023-04-01) -### Feature -* Small speed up to handler dispatch ([#1140](https://github.com/python-zeroconf/python-zeroconf/issues/1140)) ([`5bd1b6e`](https://github.com/python-zeroconf/python-zeroconf/commit/5bd1b6e7b4dd796069461c737ded956305096307)) + +### Features + +* feat: small speed up to handler dispatch (#1140) ([`5bd1b6e`](https://github.com/python-zeroconf/python-zeroconf/commit/5bd1b6e7b4dd796069461c737ded956305096307)) + ## v0.49.0 (2023-04-01) -### Feature -* Speed up processing incoming records ([#1139](https://github.com/python-zeroconf/python-zeroconf/issues/1139)) ([`7246a34`](https://github.com/python-zeroconf/python-zeroconf/commit/7246a344b6c0543871b40715c95c9435db4c7f81)) + +### Features + +* feat: speed up processing incoming records (#1139) ([`7246a34`](https://github.com/python-zeroconf/python-zeroconf/commit/7246a344b6c0543871b40715c95c9435db4c7f81)) + ## v0.48.0 (2023-04-01) -### Feature -* Reduce overhead to send responses ([#1135](https://github.com/python-zeroconf/python-zeroconf/issues/1135)) ([`c4077dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c4077dde6dfde9e2598eb63daa03c36063a3e7b0)) + +### Features + +* feat: reduce overhead to send responses (#1135) ([`c4077dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c4077dde6dfde9e2598eb63daa03c36063a3e7b0)) + ## v0.47.4 (2023-03-20) -### Fix -* Correct duplicate record entries in windows wheels by updating poetry-core ([#1134](https://github.com/python-zeroconf/python-zeroconf/issues/1134)) ([`a43055d`](https://github.com/python-zeroconf/python-zeroconf/commit/a43055d3fa258cd762c3e9394b01f8bdcb24f97e)) + +### Bug Fixes + +* fix: correct duplicate record entries in windows wheels by updating poetry-core (#1134) ([`a43055d`](https://github.com/python-zeroconf/python-zeroconf/commit/a43055d3fa258cd762c3e9394b01f8bdcb24f97e)) + ## v0.47.3 (2023-02-14) -### Fix -* Hold a strong reference to the query sender start task ([#1128](https://github.com/python-zeroconf/python-zeroconf/issues/1128)) ([`808c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/808c3b2194a7f499a469a9893102d328ccee83db)) + +### Bug Fixes + +* fix: hold a strong reference to the query sender start task (#1128) ([`808c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/808c3b2194a7f499a469a9893102d328ccee83db)) + ## v0.47.2 (2023-02-14) -### Fix -* Missing c extensions with newer poetry ([#1129](https://github.com/python-zeroconf/python-zeroconf/issues/1129)) ([`44d7fc6`](https://github.com/python-zeroconf/python-zeroconf/commit/44d7fc6483485102f60c91d591d0d697872f8865)) + +### Bug Fixes + +* fix: missing c extensions with newer poetry (#1129) ([`44d7fc6`](https://github.com/python-zeroconf/python-zeroconf/commit/44d7fc6483485102f60c91d591d0d697872f8865)) + ## v0.47.1 (2022-12-24) -### Fix -* The equality checks for DNSPointer and DNSService should be case insensitive ([#1122](https://github.com/python-zeroconf/python-zeroconf/issues/1122)) ([`48ae77f`](https://github.com/python-zeroconf/python-zeroconf/commit/48ae77f026a96e2ca475b0ff80cb6d22207ce52f)) + +### Bug Fixes + +* fix: the equality checks for DNSPointer and DNSService should be case insensitive (#1122) ([`48ae77f`](https://github.com/python-zeroconf/python-zeroconf/commit/48ae77f026a96e2ca475b0ff80cb6d22207ce52f)) + ## v0.47.0 (2022-12-22) -### Feature -* Optimize equality checks for DNS records ([#1120](https://github.com/python-zeroconf/python-zeroconf/issues/1120)) ([`3a25ff7`](https://github.com/python-zeroconf/python-zeroconf/commit/3a25ff74bea83cd7d50888ce1ebfd7650d704bfa)) + +### Features + +* feat: optimize equality checks for DNS records (#1120) ([`3a25ff7`](https://github.com/python-zeroconf/python-zeroconf/commit/3a25ff74bea83cd7d50888ce1ebfd7650d704bfa)) + ## v0.46.0 (2022-12-21) -### Feature -* Optimize the dns cache ([#1119](https://github.com/python-zeroconf/python-zeroconf/issues/1119)) ([`e80fcef`](https://github.com/python-zeroconf/python-zeroconf/commit/e80fcef967024f8e846e44b464a82a25f5550edf)) + +### Features + +* feat: optimize the dns cache (#1119) ([`e80fcef`](https://github.com/python-zeroconf/python-zeroconf/commit/e80fcef967024f8e846e44b464a82a25f5550edf)) + ## v0.45.0 (2022-12-20) -### Feature -* Optimize construction of outgoing packets ([#1118](https://github.com/python-zeroconf/python-zeroconf/issues/1118)) ([`81e186d`](https://github.com/python-zeroconf/python-zeroconf/commit/81e186d365c018381f9b486a4dbe4e2e4b8bacbf)) + +### Features + +* feat: optimize construction of outgoing packets (#1118) ([`81e186d`](https://github.com/python-zeroconf/python-zeroconf/commit/81e186d365c018381f9b486a4dbe4e2e4b8bacbf)) + ## v0.44.0 (2022-12-18) -### Feature -* Optimize dns objects by adding pxd files ([#1113](https://github.com/python-zeroconf/python-zeroconf/issues/1113)) ([`919d4d8`](https://github.com/python-zeroconf/python-zeroconf/commit/919d4d875747b4fa68e25bccd5aae7f304d8a36d)) + +### Features + +* feat: optimize dns objects by adding pxd files (#1113) ([`919d4d8`](https://github.com/python-zeroconf/python-zeroconf/commit/919d4d875747b4fa68e25bccd5aae7f304d8a36d)) + ## v0.43.0 (2022-12-18) -### Feature -* Optimize incoming parser by reducing call stack ([#1116](https://github.com/python-zeroconf/python-zeroconf/issues/1116)) ([`11f3f0e`](https://github.com/python-zeroconf/python-zeroconf/commit/11f3f0e699e00c1ee3d6d8ab5e30f62525510589)) + +### Features + +* feat: optimize incoming parser by reducing call stack (#1116) ([`11f3f0e`](https://github.com/python-zeroconf/python-zeroconf/commit/11f3f0e699e00c1ee3d6d8ab5e30f62525510589)) + ## v0.42.0 (2022-12-18) -### Feature -* Optimize incoming parser by using unpack_from ([#1115](https://github.com/python-zeroconf/python-zeroconf/issues/1115)) ([`a7d50ba`](https://github.com/python-zeroconf/python-zeroconf/commit/a7d50baab362eadd2d292df08a39de6836b41ea7)) + +### Features + +* feat: optimize incoming parser by using unpack_from (#1115) ([`a7d50ba`](https://github.com/python-zeroconf/python-zeroconf/commit/a7d50baab362eadd2d292df08a39de6836b41ea7)) + ## v0.41.0 (2022-12-18) -### Feature -* Optimize incoming parser by adding pxd files ([#1111](https://github.com/python-zeroconf/python-zeroconf/issues/1111)) ([`26efeb0`](https://github.com/python-zeroconf/python-zeroconf/commit/26efeb09783050266242542228f34eb4dd83e30c)) + +### Features + +* feat: optimize incoming parser by adding pxd files (#1111) ([`26efeb0`](https://github.com/python-zeroconf/python-zeroconf/commit/26efeb09783050266242542228f34eb4dd83e30c)) + ## v0.40.1 (2022-12-18) -### Fix -* Fix project name in pyproject.toml ([#1112](https://github.com/python-zeroconf/python-zeroconf/issues/1112)) ([`a330f62`](https://github.com/python-zeroconf/python-zeroconf/commit/a330f62040475257c4a983044e1675aeb95e030a)) -## v0.40.0 (2022-12-17) -### Feature -* Drop async_timeout requirement for python 3.11+ ([#1107](https://github.com/python-zeroconf/python-zeroconf/issues/1107)) ([`1f4224e`](https://github.com/python-zeroconf/python-zeroconf/commit/1f4224ef122299235013cb81b501f8ff9a30dea1)) +### Bug Fixes + +* fix: fix project name in pyproject.toml (#1112) ([`a330f62`](https://github.com/python-zeroconf/python-zeroconf/commit/a330f62040475257c4a983044e1675aeb95e030a)) + + +## v0.40.0 (2022-12-17) + +### Features + +* feat: drop async_timeout requirement for python 3.11+ (#1107) ([`1f4224e`](https://github.com/python-zeroconf/python-zeroconf/commit/1f4224ef122299235013cb81b501f8ff9a30dea1)) + + +## v0.39.5 (2022-12-17) + +### Unknown + +* 0.39.5 ([`2be6fbf`](https://github.com/python-zeroconf/python-zeroconf/commit/2be6fbfe3d10b185096814d2d0de322733d273cf)) + + +## v0.39.4 (2022-10-31) + +### Unknown + +* Bump version: 0.39.3 → 0.39.4 ([`e620f2a`](https://github.com/python-zeroconf/python-zeroconf/commit/e620f2a1d4f381feb99b639c6ab17845396ba7ea)) + +* Update changelog for 0.39.4 (#1103) ([`03821b6`](https://github.com/python-zeroconf/python-zeroconf/commit/03821b6f4d9fdc40d94d1070f69553649d18909b)) + +* Fix IP changes being missed by ServiceInfo (#1102) ([`524ae89`](https://github.com/python-zeroconf/python-zeroconf/commit/524ae89966d9300e78642a91434ad55643277a48)) + + +## v0.39.3 (2022-10-26) + +### Unknown + +* Bump version: 0.39.2 → 0.39.3 ([`aee3165`](https://github.com/python-zeroconf/python-zeroconf/commit/aee316539b0778eaf2b8878f78d9ead373760cfb)) + +* Update changelog for 0.39.3 (#1101) ([`39c9842`](https://github.com/python-zeroconf/python-zeroconf/commit/39c9842b80ac7d978e8c7ffef0ad836b3b4700f6)) + +* Fix port changes not being seen by ServiceInfo (#1100) ([`c96f5f6`](https://github.com/python-zeroconf/python-zeroconf/commit/c96f5f69d8e68672bb6760b1e40a0de51b62efd6)) + +* Update CI to use released python 3.11 (#1099) ([`6976980`](https://github.com/python-zeroconf/python-zeroconf/commit/6976980b4874dd65ee533d43be57694bb3b7d0fc)) + + +## v0.39.2 (2022-10-20) + +### Unknown + +* Bump version: 0.39.1 → 0.39.2 ([`785e475`](https://github.com/python-zeroconf/python-zeroconf/commit/785e475467225ddc4930d5302f130781223fd298)) + +* Update changelog for 0.39.2 (#1098) ([`b197344`](https://github.com/python-zeroconf/python-zeroconf/commit/b19734484b4c5eebb86fe6897a26ad082b07bed5)) + +* Improve cache of decode labels at offset (#1097) ([`d3c475f`](https://github.com/python-zeroconf/python-zeroconf/commit/d3c475f3e2590ae5a3056d85c29a66dc71ae3bdf)) + +* Only reprocess address records if the server changes (#1095) ([`0989336`](https://github.com/python-zeroconf/python-zeroconf/commit/0989336d79bc4dd0ef3b26e8d0f9529fca81c1fb)) + +* Prepare for python 3.11 support by adding rc2 to the CI (#1085) ([`7430ce1`](https://github.com/python-zeroconf/python-zeroconf/commit/7430ce1c462be0dd210712b4f7b3675efd3a6963)) + + +## v0.39.1 (2022-09-05) + +### Unknown + +* Bump version: 0.39.0 → 0.39.1 ([`6f90896`](https://github.com/python-zeroconf/python-zeroconf/commit/6f90896a590d6d60db75688a1ba753c333c8faab)) + +* Update changelog for 0.39.1 (#1091) ([`cad3963`](https://github.com/python-zeroconf/python-zeroconf/commit/cad3963e566a7bb2dd188088c11e7a0abb6b3924)) + +* Replace pack with to_bytes (#1090) ([`5968b76`](https://github.com/python-zeroconf/python-zeroconf/commit/5968b76ac2ffe6e41b8961c59bdcc5a48ba410eb)) + + +## v0.39.0 (2022-08-05) + +### Unknown + +* Bump version: 0.38.7 → 0.39.0 ([`60167b0`](https://github.com/python-zeroconf/python-zeroconf/commit/60167b05227ec33668aac5b960a8bc5ba5b833de)) + +* 0.39.0 changelog (#1087) ([`946890a`](https://github.com/python-zeroconf/python-zeroconf/commit/946890aca540bbae95abe8a6ffe66db56fa9e986)) + +* Remove coveralls from dev requirements (#1086) ([`087914d`](https://github.com/python-zeroconf/python-zeroconf/commit/087914da2e914275dd0fff1e4466b3c51ae0c6d3)) + +* Fix run_coro_with_timeout test not running in the CI (#1082) ([`b7a24fe`](https://github.com/python-zeroconf/python-zeroconf/commit/b7a24fef05fc6c166b25cfd4235e59c5cbb96a4c)) + +* Fix flakey service_browser_expire_callbacks test (#1084) ([`d5032b7`](https://github.com/python-zeroconf/python-zeroconf/commit/d5032b70b6ebc5c221a43f778f4d897a1d891f91)) + +* Fix flakey test_sending_unicast on windows (#1083) ([`389658d`](https://github.com/python-zeroconf/python-zeroconf/commit/389658d998a23deecd96023794d3672e51189a35)) + +* Replace wait_event_or_timeout internals with async_timeout (#1081) + +Its unlikely that https://bugs.python.org/issue39032 and +https://github.com/python/cpython/issues/83213 will be fixed +soon. While we moved away from an asyncio.Condition, we still +has a similar problem with waiting for an asyncio.Event which +wait_event_or_timeout played well with. async_timeout avoids +creating a task so its a bit more efficient. Since we call +these when resolving ServiceInfo, avoiding task creation +will resolve a performance problem when ServiceBrowsers +startup as they tend to create task storms when coupled +with ServiceInfo lookups. ([`7ffea9f`](https://github.com/python-zeroconf/python-zeroconf/commit/7ffea9f93e758f75a0eeb9997ff8d9c9d47ec31a)) + +* Update stale docstrings in AsyncZeroconf (#1079) ([`88323d0`](https://github.com/python-zeroconf/python-zeroconf/commit/88323d0c7866f78edde063080c63a72c6e875772)) + + +## v0.38.7 (2022-06-14) + +### Unknown + +* Bump version: 0.38.6 → 0.38.7 ([`f3a9f80`](https://github.com/python-zeroconf/python-zeroconf/commit/f3a9f804914fec37e961f80f347c4e706c4bae33)) + +* Update changelog for 0.38.7 (#1078) ([`5f7ba0d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f7ba0d7dc9a5a6b2cf3a321b7b2f448d4332de9)) + +* Speed up unpacking incoming packet data (#1076) ([`533ad10`](https://github.com/python-zeroconf/python-zeroconf/commit/533ad10121739997a4925d90792cbe9e00a5ac4f)) + + +## v0.38.6 (2022-05-06) + +### Unknown + +* Bump version: 0.38.5 → 0.38.6 ([`1aa7842`](https://github.com/python-zeroconf/python-zeroconf/commit/1aa7842ae0f914c10465ae977551698046406d55)) + +* Update changelog for 0.38.6 (#1073) ([`dfd3222`](https://github.com/python-zeroconf/python-zeroconf/commit/dfd3222405f0123a849d376d8be466be46bdb557)) + +* Always return `started` as False once Zeroconf has been marked as done (#1072) ([`ed02e5d`](https://github.com/python-zeroconf/python-zeroconf/commit/ed02e5d92768d1fc41163f59e303a76843bfd9fd)) + +* Avoid waking up ServiceInfo listeners when there is no new data (#1068) ([`59624a6`](https://github.com/python-zeroconf/python-zeroconf/commit/59624a6cfb1839b2654a6021a7317a1bdad179e9)) + +* Remove left-in debug print (#1071) ([`5fb0954`](https://github.com/python-zeroconf/python-zeroconf/commit/5fb0954cf2c6040704c3db1d2b0fece389425e5b)) + +* Use unique name in test_service_browser_expire_callbacks test (#1069) ([`89c9022`](https://github.com/python-zeroconf/python-zeroconf/commit/89c9022f87d3a83cc586b153fb7d5ea3af69ae3b)) + +* Fix CI failures (#1070) ([`f9b2816`](https://github.com/python-zeroconf/python-zeroconf/commit/f9b2816e15b0459f8051079f77b70e983769cd44)) + + +## v0.38.5 (2022-05-01) + +### Unknown + +* Bump version: 0.38.4 → 0.38.5 ([`3c55388`](https://github.com/python-zeroconf/python-zeroconf/commit/3c5538899b8974e99c9a279ce3ac46971ab5d91c)) + +* Update changelog for 0.38.5 (#1066) ([`ae3635b`](https://github.com/python-zeroconf/python-zeroconf/commit/ae3635b9ee73edeaabe2cbc027b8fb8bd7cd97da)) + +* Fix ServiceBrowsers not getting `ServiceStateChange.Removed` callbacks on PTR record expire (#1064) ([`10ee205`](https://github.com/python-zeroconf/python-zeroconf/commit/10ee2053a80f7c7221b4fa1475d66b01abd21b11)) + +* Fix ci trying to run mypy on pypy (#1065) ([`31662b7`](https://github.com/python-zeroconf/python-zeroconf/commit/31662b7a0bba65bea1fbfc09c70cd2970160c5c6)) + +* Force minimum version of 3.7 and update example (#1060) + +Co-authored-by: J. Nick Koston ([`6e842f2`](https://github.com/python-zeroconf/python-zeroconf/commit/6e842f238b3e1f3b738ed058e0fa4068115f041b)) + +* Fix mypy error in zeroconf._service.info (#1062) ([`e9d25f7`](https://github.com/python-zeroconf/python-zeroconf/commit/e9d25f7749778979b7449464153163587583bf8d)) + +* Refactor to fix mypy error (#1061) ([`6c451f6`](https://github.com/python-zeroconf/python-zeroconf/commit/6c451f64e7cbeaa0bb77f66790936afda2d058ef)) + + +## v0.38.4 (2022-02-28) + +### Unknown + +* Bump version: 0.38.3 → 0.38.4 ([`5c40e89`](https://github.com/python-zeroconf/python-zeroconf/commit/5c40e89420255b5b978bff4682b21f0820fb4682)) + +* Update changelog for 0.38.4 (#1058) ([`3736348`](https://github.com/python-zeroconf/python-zeroconf/commit/3736348da30ee4b7c50713936f2ae919e5446ffa)) + +* Fix IP Address updates when hostname is uppercase (#1057) ([`79d067b`](https://github.com/python-zeroconf/python-zeroconf/commit/79d067b88f9108259a44f33801e26bd3a25ca759)) + + +## v0.38.3 (2022-01-31) + +### Unknown + +* Bump version: 0.38.2 → 0.38.3 ([`e42549c`](https://github.com/python-zeroconf/python-zeroconf/commit/e42549cb70796d0577c97be96a09bca0056a5755)) + +* Update changelog for 0.38.2/3 (#1053) ([`d99c7ff`](https://github.com/python-zeroconf/python-zeroconf/commit/d99c7ffea37fd27c315115133dab08445aa417d1)) + + +## v0.38.2 (2022-01-31) + +### Unknown + +* Bump version: 0.38.1 → 0.38.2 ([`50cd12d`](https://github.com/python-zeroconf/python-zeroconf/commit/50cd12d8c2ced166da8f4852120ba8a28b13cba0)) + +* Make decode errors more helpful in finding the source of the bad data (#1052) ([`25e6123`](https://github.com/python-zeroconf/python-zeroconf/commit/25e6123a07a9560e978a04d5e285bfa74ee41e64)) + + +## v0.38.1 (2021-12-23) + +### Unknown + +* Bump version: 0.38.0 → 0.38.1 ([`6a11f24`](https://github.com/python-zeroconf/python-zeroconf/commit/6a11f24e1fc9d73f0dbb62efd834f17a9bd451c4)) + +* Update changelog for 0.38.1 (#1045) ([`670d4ac`](https://github.com/python-zeroconf/python-zeroconf/commit/670d4ac3be7e32d02afe85b72264a241b5a25ba8)) + +* Avoid linear type searches in ServiceBrowsers (#1044) ([`ff76634`](https://github.com/python-zeroconf/python-zeroconf/commit/ff766345461a82547abe462b5d690621c755d480)) + +* Improve performance of query scheduler (#1043) ([`27e50ff`](https://github.com/python-zeroconf/python-zeroconf/commit/27e50ff95625d128f71864138b8e5d871503adf0)) + + +## v0.38.0 (2021-12-23) + +### Unknown + +* Bump version: 0.37.0 → 0.38.0 ([`95ee5dc`](https://github.com/python-zeroconf/python-zeroconf/commit/95ee5dc031c9c512f99536186d1d89a99e4af37f)) + +* Update changelog for 0.38.0 (#1042) ([`de14202`](https://github.com/python-zeroconf/python-zeroconf/commit/de1420213cd7e3bd8f57e727ff1031c7b10cf7a0)) + +* Handle Service types that end with another service type (#1041) + +Co-authored-by: J. Nick Koston ([`a4d619a`](https://github.com/python-zeroconf/python-zeroconf/commit/a4d619a9f094682d9dcfc7f8fa293f17bcae88f2)) + +* Add tests for instance names containing dot(s) (#1039) + +Co-authored-by: J. Nick Koston ([`22ed08c`](https://github.com/python-zeroconf/python-zeroconf/commit/22ed08c7e5403a788b1c177a1bb9558419bce2b1)) + +* Drop python 3.6 support (#1009) ([`631a6f7`](https://github.com/python-zeroconf/python-zeroconf/commit/631a6f7c7863897336a9d6ca4bd1736cc7cc97af)) + + +## v0.37.0 (2021-11-18) + +### Unknown + +* Bump version: 0.36.13 → 0.37.0 ([`2996e64`](https://github.com/python-zeroconf/python-zeroconf/commit/2996e642f6b1abba1dbb8242ccca4cd4b96696f6)) + +* Update changelog for 0.37.0 (#1035) ([`61a7e3f`](https://github.com/python-zeroconf/python-zeroconf/commit/61a7e3fb65d99db7d51f1df42b286b55710a2e99)) + +* Log an error when listeners are added that do not inherit from RecordUpdateListener (#1034) ([`ee071a1`](https://github.com/python-zeroconf/python-zeroconf/commit/ee071a12f31f7010110eef5ccef80c6cdf469d87)) + +* Throw NotRunningException when Zeroconf is not running (#1033) + +- Before this change the consumer would get a timeout or an EventLoopBlocked + exception when calling `ServiceInfo.*request` when the instance had already been shutdown. + This was quite a confusing result. ([`28938d2`](https://github.com/python-zeroconf/python-zeroconf/commit/28938d20bb62ae0d9aa2f94929f60434fb346704)) + +* Throw EventLoopBlocked instead of concurrent.futures.TimeoutError (#1032) ([`21bd107`](https://github.com/python-zeroconf/python-zeroconf/commit/21bd10762a89ca3f4ca89f598c9d93684a02f51b)) + + +## v0.36.13 (2021-11-13) + +### Unknown + +* Bump version: 0.36.12 → 0.36.13 ([`4241c76`](https://github.com/python-zeroconf/python-zeroconf/commit/4241c76550130469aecbe88cc1a7cdc13505f8ba)) + +* Update changelog for 0.36.13 (#1030) ([`106cf27`](https://github.com/python-zeroconf/python-zeroconf/commit/106cf27478bb0c1e6e5a7194661ff52947d61c96)) + +* Downgrade incoming corrupt packet logging to debug (#1029) + +- Warning about network traffic we have no control over + is confusing to users as they think there is + something wrong with zeroconf ([`73c52d0`](https://github.com/python-zeroconf/python-zeroconf/commit/73c52d04a140bc744669777a0f353eefc6623ff9)) + +* Skip unavailable interfaces during socket bind (#1028) + +- We already skip these when adding multicast members. + Apply the same logic to the socket bind call ([`aa59998`](https://github.com/python-zeroconf/python-zeroconf/commit/aa59998182ce29c55f8c3dde9a058ce36ac2bb2d)) + + +## v0.36.12 (2021-11-05) + +### Unknown + +* Bump version: 0.36.11 → 0.36.12 ([`8b0dc48`](https://github.com/python-zeroconf/python-zeroconf/commit/8b0dc48ed42d8edc78750122eb5685a50c3cdc11)) + +* Update changelog for 0.36.12 (#1027) ([`51bf364`](https://github.com/python-zeroconf/python-zeroconf/commit/51bf364b364ecaad16503df4a4c4c3bb5ead2775)) + +* Account for intricacies of floating-point arithmetic in service browser tests (#1026) ([`3c70808`](https://github.com/python-zeroconf/python-zeroconf/commit/3c708080b3e42a02930ad17c96a2cf0dcb06f441)) + +* Prevent service lookups from deadlocking if time abruptly moves backwards (#1006) + +- The typical reason time moves backwards is via an ntp update ([`38380a5`](https://github.com/python-zeroconf/python-zeroconf/commit/38380a58a64f563f105cecc610f194c20056b2b6)) + + +## v0.36.11 (2021-10-30) + +### Unknown + +* Bump version: 0.36.10 → 0.36.11 ([`3d8f50d`](https://github.com/python-zeroconf/python-zeroconf/commit/3d8f50de74f7b3941d9b35b6ae6e42ba02be9361)) + +* Update changelog for 0.36.11 (#1024) ([`69a9b8e`](https://github.com/python-zeroconf/python-zeroconf/commit/69a9b8e060ae8a596050d393c0a5c8b43beadc8e)) + +* Add readme check to the CI (#1023) ([`c966976`](https://github.com/python-zeroconf/python-zeroconf/commit/c966976531ac9222460763d647d0a3b75459e275)) + + +## v0.36.10 (2021-10-30) + +### Unknown + +* Bump version: 0.36.9 → 0.36.10 ([`e0b340a`](https://github.com/python-zeroconf/python-zeroconf/commit/e0b340afbfd25ae9d05a59a577938b062287c8b6)) + +* Update changelog for 0.36.10 (#1021) ([`69ce817`](https://github.com/python-zeroconf/python-zeroconf/commit/69ce817a68d65f2db0bfe6d4790d3a6a356ac83f)) + +* Fix test failure when has_working_ipv6 generates an exception (#1022) ([`cd8984d`](https://github.com/python-zeroconf/python-zeroconf/commit/cd8984d3e95bffe6fd32b97eae9844bf5afed4de)) + +* Strip scope_id from IPv6 address if given. (#1020) ([`686febd`](https://github.com/python-zeroconf/python-zeroconf/commit/686febdd181c837fa6a41afce91edeeded731fbe)) + +* Optimize decoding labels from incoming packets (#1019) + +- decode is a bit faster vs str() + +``` +>>> ts = Timer("s.decode('utf-8', 'replace')", "s = b'TV Beneden (2)\x10_androidtvremote\x04_tcp\x05local'") +>>> ts.timeit() +0.09910525000003645 +>>> ts = Timer("str(s, 'utf-8', 'replace')", "s = b'TV Beneden (2)\x10_androidtvremote\x04_tcp\x05local'") +>>> ts.timeit() +0.1304596250000145 +``` ([`4b9a6c3`](https://github.com/python-zeroconf/python-zeroconf/commit/4b9a6c3fd4aec920597e7e63e82e935df68804f4)) + +* Fix typo in changelog (#1017) ([`0fdcd51`](https://github.com/python-zeroconf/python-zeroconf/commit/0fdcd5146264b37daa7cc35bda883519175e362f)) + + +## v0.36.9 (2021-10-22) + +### Unknown + +* Bump version: 0.36.8 → 0.36.9 ([`d92d3d0`](https://github.com/python-zeroconf/python-zeroconf/commit/d92d3d030558c1b81b2e35f701b585f4b48fa99a)) + +* Update changelog for 0.36.9 (#1016) ([`1427ba7`](https://github.com/python-zeroconf/python-zeroconf/commit/1427ba75a8f7e2962aa0b3105d3c856002134790)) + +* Ensure ServiceInfo orders newest addresess first (#1012) ([`87a4d8f`](https://github.com/python-zeroconf/python-zeroconf/commit/87a4d8f4d5c8365425c2ee969032205f916f80c1)) + + +## v0.36.8 (2021-10-10) + +### Unknown + +* Bump version: 0.36.7 → 0.36.8 ([`61275ef`](https://github.com/python-zeroconf/python-zeroconf/commit/61275efd05688a61d656b43125b01a5d588f1dba)) + +* Update changelog for 0.36.8 (#1010) ([`1551618`](https://github.com/python-zeroconf/python-zeroconf/commit/15516188f346c70f64a923bb587804b9bf948873)) + +* Fix ServiceBrowser infinite looping when zeroconf is closed before its canceled (#1008) ([`b0e8c8a`](https://github.com/python-zeroconf/python-zeroconf/commit/b0e8c8a21fd721e60adbac4dbf7a03959fc3f641)) + +* Update CI to use python 3.10, pypy 3.7 (#1007) ([`fec9f3d`](https://github.com/python-zeroconf/python-zeroconf/commit/fec9f3dc9626be08eccdf1263dbf4d1686fd27b2)) + +* Cleanup typing in zeroconf._protocol.outgoing (#1000) ([`543558d`](https://github.com/python-zeroconf/python-zeroconf/commit/543558d0498ed03eb9dc4597c4c40484e16ee4e6)) + +* Breakout functions with no self-use in zeroconf._handlers (#1003) ([`af4d082`](https://github.com/python-zeroconf/python-zeroconf/commit/af4d082240a545ba3014eb7f1056c3b32ce2cb70)) + +* Use more f-strings in zeroconf._dns (#1002) ([`d3ed691`](https://github.com/python-zeroconf/python-zeroconf/commit/d3ed69107330f1a29f45d174caafdec1e894f666)) + +* Remove unused code in zeroconf._core (#1001) + +- Breakout functions without self-use ([`8e45ea9`](https://github.com/python-zeroconf/python-zeroconf/commit/8e45ea943be6490b2217f0eb01501e12a5221c16)) + + +## v0.36.7 (2021-09-22) + +### Unknown + +* Bump version: 0.36.6 → 0.36.7 ([`f44b40e`](https://github.com/python-zeroconf/python-zeroconf/commit/f44b40e26ea8872151ea9ee4762b95ca25790089)) + +* Update changelog for 0.36.7 (#999) ([`d2853c3`](https://github.com/python-zeroconf/python-zeroconf/commit/d2853c31db9ece28fb258c4146ba61cf0e6a6592)) + +* Improve log message when receiving an invalid or corrupt packet (#998) ([`b637846`](https://github.com/python-zeroconf/python-zeroconf/commit/b637846e7df3292d6dcdd38a8eb77b6fa3287c51)) + +* Reduce logging overhead (#994) ([`7df7e4a`](https://github.com/python-zeroconf/python-zeroconf/commit/7df7e4a68e33c3e3a5bddf0168e248a4542a788f)) + +* Reduce overhead to compare dns records (#997) ([`7fa51de`](https://github.com/python-zeroconf/python-zeroconf/commit/7fa51de5b71d03470643a83004b9f6f8d4017214)) + +* Refactor service registry to avoid use of getattr (#996) ([`7622365`](https://github.com/python-zeroconf/python-zeroconf/commit/762236547d4838f2b6a94cfa20221dfdd03e9b94)) + +* Flush CI cache (#995) ([`93ddf7c`](https://github.com/python-zeroconf/python-zeroconf/commit/93ddf7cf9b47d7ff1e341b6c2875254b6f00eef1)) + + +## v0.36.6 (2021-09-19) + +### Unknown + +* Bump version: 0.36.5 → 0.36.6 ([`0327a06`](https://github.com/python-zeroconf/python-zeroconf/commit/0327a068250c85f3ff84d3f0b809b51f83321c47)) + +* Fix tense of 0.36.6 changelog (#992) ([`29f995f`](https://github.com/python-zeroconf/python-zeroconf/commit/29f995fd3c09604f37980e74f2785b1a451da089)) + +* Update changelog for 0.36.6 (#991) ([`92f5f4a`](https://github.com/python-zeroconf/python-zeroconf/commit/92f5f4a80b8a8e50df5ca06e3cc45480dc39b504)) + +* Simplify the can_send_to check (#990) ([`1887c55`](https://github.com/python-zeroconf/python-zeroconf/commit/1887c554b3f9d0b90a1c01798d7f06a7e4de6900)) + + +## v0.36.5 (2021-09-18) + +### Unknown + +* Bump version: 0.36.4 → 0.36.5 ([`34f4a26`](https://github.com/python-zeroconf/python-zeroconf/commit/34f4a26c9254d6002bdccb1a003d9822a8798c04)) + +* Update changelog for 0.36.5 (#989) ([`aebabe9`](https://github.com/python-zeroconf/python-zeroconf/commit/aebabe95c59e34f703307340e087b3eab5339a06)) + +* Seperate zeroconf._protocol into an incoming and outgoing modules (#988) ([`87b6a32`](https://github.com/python-zeroconf/python-zeroconf/commit/87b6a32fb77d9bdcea9d2d7ffba189abc5371b50)) + +* Reduce dns protocol attributes and add slots (#987) ([`f4665fc`](https://github.com/python-zeroconf/python-zeroconf/commit/f4665fc67cd762c4ab66271a550d75640d3bffca)) + +* Fix typo in changelog (#986) ([`4398538`](https://github.com/python-zeroconf/python-zeroconf/commit/43985380b9e995d9790d71486aed258326ad86e4)) + + +## v0.36.4 (2021-09-16) + +### Unknown + +* Bump version: 0.36.3 → 0.36.4 ([`a23f6d2`](https://github.com/python-zeroconf/python-zeroconf/commit/a23f6d2cc40ea696410c3c31b73760065c36f0bf)) + +* Update changelog for 0.36.4 (#985) ([`f4d4164`](https://github.com/python-zeroconf/python-zeroconf/commit/f4d4164989931adbac0e5907b7bf276da1d0d7d7)) + +* Defer decoding known answers until needed (#983) ([`88b9875`](https://github.com/python-zeroconf/python-zeroconf/commit/88b987551cb98757c2df2540ba390f320d46fa7b)) + +* Collapse _GLOBAL_DONE into done (#984) ([`05c4329`](https://github.com/python-zeroconf/python-zeroconf/commit/05c4329d7647c381783ead086c2ed4f3b6b44262)) + +* Remove flake8 requirement restriction as its no longer needed (#981) ([`bc64d63`](https://github.com/python-zeroconf/python-zeroconf/commit/bc64d63ef73e643e71634957fd79e6f6597373d4)) + +* Reduce duplicate code to write records (#979) ([`acf6457`](https://github.com/python-zeroconf/python-zeroconf/commit/acf6457b3c6742c92e9112b0a39a387b33cea4db)) + +* Force CI cache clear (#982) ([`d9ea918`](https://github.com/python-zeroconf/python-zeroconf/commit/d9ea9189def07531d126e01c7397b2596d9a8695)) + +* Reduce name compression overhead and complexity (#978) ([`f1d6fc3`](https://github.com/python-zeroconf/python-zeroconf/commit/f1d6fc3f60e685ff63b1a1cb820cfc3ca5268fcb)) + + +## v0.36.3 (2021-09-14) + +### Unknown + +* Bump version: 0.36.2 → 0.36.3 ([`769b397`](https://github.com/python-zeroconf/python-zeroconf/commit/769b3973835ebc6f5a34e236a01cb2cd935e81de)) + +* Update changelog for 0.36.3 (#977) ([`84f16bf`](https://github.com/python-zeroconf/python-zeroconf/commit/84f16bff6df41f1907e060e7bd4ce24d173d51c4)) + +* Reduce DNSIncoming parsing overhead (#975) + +- Parsing incoming packets is the most expensive operation + zeroconf performs on networks with high mDNS volume ([`78f9cd5`](https://github.com/python-zeroconf/python-zeroconf/commit/78f9cd5123d0e3c582aba05bd61388419d4dc01e)) + + +## v0.36.2 (2021-08-30) + +### Unknown + +* Bump version: 0.36.1 → 0.36.2 ([`5f52438`](https://github.com/python-zeroconf/python-zeroconf/commit/5f52438f4c0851bb1a3b78575c0c28e0b6ce561d)) + +* Update changelog for 0.36.2 (#973) ([`b4efa33`](https://github.com/python-zeroconf/python-zeroconf/commit/b4efa33b4ef6d5292d8d477da4258d99d22c4e84)) + +* Include NSEC records for non-existant types when responding with addresses (#972) + +Implements datatracker.ietf.org/doc/html/rfc6762#section-6.2 ([`7a20fd3`](https://github.com/python-zeroconf/python-zeroconf/commit/7a20fd3bc8dc0a703619ca9413faf674b3d7a111)) + +* Add support for writing NSEC records (#971) ([`768a23c`](https://github.com/python-zeroconf/python-zeroconf/commit/768a23c656e3f091ecbecbb6b380b5becbbf9674)) + + +## v0.36.1 (2021-08-29) + +### Unknown + +* Bump version: 0.36.0 → 0.36.1 ([`e8d8401`](https://github.com/python-zeroconf/python-zeroconf/commit/e8d84017b750ab5f159abc7225f9922d84a8f9fd)) + +* Update changelog for 0.36.1 (#970) ([`d504333`](https://github.com/python-zeroconf/python-zeroconf/commit/d5043337de39a11b2b241e9247a34c41c0c7c2bc)) + +* Skip goodbye packets for addresses when there is another service registered with the same name (#968) ([`d9d3208`](https://github.com/python-zeroconf/python-zeroconf/commit/d9d3208eed84b71b61c458f2992b08b5db259da1)) + +* Fix equality and hash for dns records with the unique bit (#969) ([`574e241`](https://github.com/python-zeroconf/python-zeroconf/commit/574e24125a536dc4fb9a1784797efd495ceb1fdf)) + + +## v0.36.0 (2021-08-16) + +### Unknown + +* Bump version: 0.35.1 → 0.36.0 ([`e4985c7`](https://github.com/python-zeroconf/python-zeroconf/commit/e4985c7dd2088d4da9fc2be25f67beb65f548e95)) + +* Update changelog for 0.36.0 (#966) ([`bc50bce`](https://github.com/python-zeroconf/python-zeroconf/commit/bc50bce04b650756fef3f8b1cce6defbc5dccee5)) + +* Create full IPv6 address tuple to enable service discovery on Windows (#965) ([`733eb3a`](https://github.com/python-zeroconf/python-zeroconf/commit/733eb3a31ed40c976f5fa4b7b3baf055589ef36b)) + + +## v0.35.1 (2021-08-15) + +### Unknown + +* Bump version: 0.35.0 → 0.35.1 ([`4281221`](https://github.com/python-zeroconf/python-zeroconf/commit/4281221b668123b770c6d6b0835dd876d1d2f22d)) + +* Fix formatting in 0.35.1 changelog entry (#964) ([`c7c7d47`](https://github.com/python-zeroconf/python-zeroconf/commit/c7c7d4778e9962af5180616af73977d8503e4762)) + +* Update changelog for 0.35.1 (#963) ([`f7bebfe`](https://github.com/python-zeroconf/python-zeroconf/commit/f7bebfe09aeb9bb973dbe6ba147b682472b64246)) + +* Cache DNS record and question hashes (#960) ([`d4c109c`](https://github.com/python-zeroconf/python-zeroconf/commit/d4c109c3abffcba2331a7f9e7bf45c6477a8d4e8)) + +* Fix flakey test: test_future_answers_are_removed_on_send (#962) ([`3b482e2`](https://github.com/python-zeroconf/python-zeroconf/commit/3b482e229d37b85e59765e023ddbca77aa513731)) + +* Add coverage for sending answers removes future queued answers (#961) + +- If we send an answer that is queued to be sent out in the future + we should remove it from the queue as the question has already + been answered and we do not want to generate additional traffic. ([`2d1b832`](https://github.com/python-zeroconf/python-zeroconf/commit/2d1b8329ad39b94f9f4aa5f53caf3bb2813879ca)) + +* Only reschedule types if the send next time changes (#958) + +- When the PTR response was seen again, the timer was being canceled and + rescheduled even if the timer was for the same time. While this did + not cause any breakage, it is quite inefficient. ([`7b125a1`](https://github.com/python-zeroconf/python-zeroconf/commit/7b125a1a0a109ef29d0a4e736a27645a7e9b4207)) + + +## v0.35.0 (2021-08-13) + +### Unknown + +* Bump version: 0.34.3 → 0.35.0 ([`1e60e13`](https://github.com/python-zeroconf/python-zeroconf/commit/1e60e13ae15a5b533a48cc955b98951eedd04dbb)) + +* Update changelog for 0.35.0 (#957) ([`dd40437`](https://github.com/python-zeroconf/python-zeroconf/commit/dd40437f4328f4ee36c43239ecf5f484b6ac261e)) + +* Reduce chance of accidental synchronization of ServiceInfo requests (#955) ([`c772936`](https://github.com/python-zeroconf/python-zeroconf/commit/c77293692062ea701037e06c1cf5497f019ae2f2)) + +* Send unicast replies on the same socket the query was received (#952) + +When replying to a QU question, we do not know if the sending host is reachable +from all of the sending sockets. We now avoid this problem by replying via +the receiving socket. This was the existing behavior when `InterfaceChoice.Default` +is set. + +This change extends the unicast relay behavior to used with `InterfaceChoice.Default` +to apply when `InterfaceChoice.All` or interfaces are explicitly passed when +instantiating a `Zeroconf` instance. + +Fixes #951 ([`5fb3e20`](https://github.com/python-zeroconf/python-zeroconf/commit/5fb3e202c06e3a0d30e3c7824397d8e8a9f52555)) + +* Sort responses to increase chance of name compression (#954) + +- When building an outgoing response, sort the names together + to increase the likelihood of name compression. In testing + this reduced the number of packets for large responses + (from 7 packets to 6) ([`ebc23ee`](https://github.com/python-zeroconf/python-zeroconf/commit/ebc23ee5e9592dd7f0235cd57f9b3ad727ec8bff)) + + +## v0.34.3 (2021-08-09) + +### Unknown + +* Bump version: 0.34.2 → 0.34.3 ([`9d69d18`](https://github.com/python-zeroconf/python-zeroconf/commit/9d69d18713bdfab53762a6b8c3aff7fd72ebd025)) + +* Update changelog for 0.34.3 (#950) ([`23b00e9`](https://github.com/python-zeroconf/python-zeroconf/commit/23b00e983b2e8335431dcc074935f379fd399d46)) + +* Fix sending immediate multicast responses (#949) + +- Fixes a typo in handle_assembled_query that prevented immediate + responses from being sent. ([`02af7f7`](https://github.com/python-zeroconf/python-zeroconf/commit/02af7f78d2e5eabcc5cce8238546ee5170951b28)) + + +## v0.34.2 (2021-08-09) + +### Unknown + +* Bump version: 0.34.1 → 0.34.2 ([`6c21f68`](https://github.com/python-zeroconf/python-zeroconf/commit/6c21f6802b58d949038e9c8501ea204eeda57a16)) + +* Update changelog for 0.34.2 (#947) ([`b87f493`](https://github.com/python-zeroconf/python-zeroconf/commit/b87f4934b39af02f26bbbfd6f372c7154fe95906)) + +* Ensure ServiceInfo requests can be answered with the default timeout with network protection (#946) + +- Adjust the time windows to ensure responses that have triggered the +protection against against excessive packet flooding due to +software bugs or malicious attack described in RFC6762 section 6 +can respond in under 1350ms to ensure ServiceInfo can ask two +questions within the default timeout of 3000ms ([`6d7266d`](https://github.com/python-zeroconf/python-zeroconf/commit/6d7266d0e1e6dcb950456da0354b4c43fd5c0ecb)) + +* Coalesce aggregated multicast answers when the random delay is shorter than the last scheduled response (#945) + +- Reduces traffic when we already know we will be sending a group of answers + inside the random delay window described in + https://datatracker.ietf.org/doc/html/rfc6762#section-6.3 + +closes #944 ([`9a5164a`](https://github.com/python-zeroconf/python-zeroconf/commit/9a5164a7a3231903537231bfb56479e617355f92)) + + +## v0.34.1 (2021-08-08) + +### Unknown + +* Bump version: 0.34.0 → 0.34.1 ([`7878a9e`](https://github.com/python-zeroconf/python-zeroconf/commit/7878a9eed93a8ec2396d8450389a08bf54bd5693)) + +* Update changelog for 0.34.1 (#943) ([`9942484`](https://github.com/python-zeroconf/python-zeroconf/commit/9942484172d7a79fe84c47924538c2c02fde7264)) + +* Ensure multicast aggregation sends responses within 620ms (#942) ([`de96e2b`](https://github.com/python-zeroconf/python-zeroconf/commit/de96e2bf01af68d754bb7c71da949e30de88a77b)) + + +## v0.34.0 (2021-08-08) + +### Unknown + +* Bump version: 0.33.4 → 0.34.0 ([`549ac3d`](https://github.com/python-zeroconf/python-zeroconf/commit/549ac3de27eb3924cc7967088c3d316184722b9d)) + +* Update changelog for 0.34.0 (#941) ([`342532e`](https://github.com/python-zeroconf/python-zeroconf/commit/342532e1d13ac24673735dc467a79edebdfb9362)) + +* Implement Multicast Response Aggregation (#940) + +- Responses are now aggregated when possible per rules in RFC6762 section 6.4 +- Responses that trigger the protection against against excessive packet flooding due to + software bugs or malicious attack described in RFC6762 section 6 are delayed instead of discarding as it was causing responders that implement Passive Observation Of Failures (POOF) to evict the records. +- Probe responses are now always sent immediately as there were cases where they would fail to be answered in time to defend a name. + +closes #939 ([`55efb41`](https://github.com/python-zeroconf/python-zeroconf/commit/55efb4169b588cef093f3065f3a894878ae8bd95)) + + +## v0.33.4 (2021-08-06) + +### Unknown + +* Bump version: 0.33.3 → 0.33.4 ([`7bbacd5`](https://github.com/python-zeroconf/python-zeroconf/commit/7bbacd57a134c12ee1fb61d8318b312dfdae18f8)) + +* Update changelog for 0.33.4 (#937) ([`858605d`](https://github.com/python-zeroconf/python-zeroconf/commit/858605db52f909d41198df76130597ff93f64cdd)) + +* Ensure zeroconf can be loaded when the system disables IPv6 (#933) + +Co-authored-by: J. Nick Koston ([`496ac44`](https://github.com/python-zeroconf/python-zeroconf/commit/496ac44e99b56485cc9197490e71bb2dd7bec6f9)) + + +## v0.33.3 (2021-08-05) + +### Unknown + +* Bump version: 0.33.2 → 0.33.3 ([`206671a`](https://github.com/python-zeroconf/python-zeroconf/commit/206671a1237ee8237d302b04c5a84158fed1d50b)) + +* Update changelog for 0.33.3 (#936) ([`6a140cc`](https://github.com/python-zeroconf/python-zeroconf/commit/6a140cc6b9c7e50e572456662d2f76f6fbc2ed25)) + +* Add support for forward dns compression pointers (#934) + +- nslookup supports these and some implementations (likely avahi) + will generate them + +- Careful attention was given to make sure we detect loops + and do not create anti-patterns described in + https://github.com/Forescout/namewreck/blob/main/rfc/draft-dashevskyi-dnsrr-antipatterns-00.txt + +Fixes https://github.com/home-assistant/core/issues/53937 +Fixes https://github.com/home-assistant/core/issues/46985 +Fixes https://github.com/home-assistant/core/issues/53668 +Fixes #308 ([`5682a4c`](https://github.com/python-zeroconf/python-zeroconf/commit/5682a4c3c89043bf8a10e79232933ada5ab71972)) + +* Provide sockname when logging a protocol error (#935) ([`319992b`](https://github.com/python-zeroconf/python-zeroconf/commit/319992bb093d9b965976bad724512d9bcd05aca7)) + + +## v0.33.2 (2021-07-28) + +### Unknown + +* Bump version: 0.33.1 → 0.33.2 ([`4d30c25`](https://github.com/python-zeroconf/python-zeroconf/commit/4d30c25fe57425bcae36a539006e44941ef46e2c)) + +* Update changelog for 0.33.2 (#931) ([`c80b5f7`](https://github.com/python-zeroconf/python-zeroconf/commit/c80b5f7253e521928d6f7e54681675be59371c6c)) + +* Handle duplicate goodbye answers in the same packet (#928) + +- Solves an exception being thrown when we tried to remove the known answer + from the cache when the second goodbye answer in the same packet was processed + +- We previously swallowed all exceptions on cache removal so this was not + visible until 0.32.x which removed the broad exception catch + +Fixes #926 ([`97e0b66`](https://github.com/python-zeroconf/python-zeroconf/commit/97e0b669be60f716e45e963f1bcfcd35b7213626)) + +* Skip ipv6 interfaces that return ENODEV (#930) ([`73e3d18`](https://github.com/python-zeroconf/python-zeroconf/commit/73e3d1865f4167e7c9f7c23ec4cc7ebfac40f512)) + +* Remove some pylint workarounds (#925) ([`1247acd`](https://github.com/python-zeroconf/python-zeroconf/commit/1247acd2e6f6154a4e5f2e27a820c55329391d8e)) + + +## v0.33.1 (2021-07-18) + +### Unknown + +* Bump version: 0.33.0 → 0.33.1 ([`6774de3`](https://github.com/python-zeroconf/python-zeroconf/commit/6774de3e7f8b461ccb83675bbb05d47949df487b)) + +* Update changelog for 0.33.1 (#924) + +- Fixes overly restrictive directory permissions reported in #923 ([`ed80333`](https://github.com/python-zeroconf/python-zeroconf/commit/ed80333896c0710857cc46b5af4d7ba3a81e07c8)) + + +## v0.33.0 (2021-07-18) + +### Unknown + +* Bump version: 0.32.1 → 0.33.0 ([`cfb28aa`](https://github.com/python-zeroconf/python-zeroconf/commit/cfb28aaf134e566d8a89b397967d1ad1ec66de35)) + +* Update changelog for 0.33.0 release (#922) ([`e4a9655`](https://github.com/python-zeroconf/python-zeroconf/commit/e4a96550398c408c3e1e6944662cc3093db912a7)) + +* Fix examples/async_registration.py attaching to the correct loop (#921) ([`b0b23f9`](https://github.com/python-zeroconf/python-zeroconf/commit/b0b23f96d3b33a627a0d071557a36af97a65dae4)) + +* Add support for bump2version (#920) ([`2e00002`](https://github.com/python-zeroconf/python-zeroconf/commit/2e0000252f0aecad8b62a649128326a6528b6824)) + +* Update changelog for 0.33.0 release (#919) ([`96be961`](https://github.com/python-zeroconf/python-zeroconf/commit/96be9618ede3c941e23cb23398b9aed11bed1ffa)) + +* Let connection_lost close the underlying socket (#918) + +- The socket was closed during shutdown before asyncio's connection_lost + handler had a chance to close it which resulted in a traceback on + win32. + +- Fixes #917 ([`919b096`](https://github.com/python-zeroconf/python-zeroconf/commit/919b096d6260a4f9f4306b9b4dddb5b026b49462)) + +* Reduce complexity of DNSRecord (#915) + +- Use constants for calculations in is_expired/is_stale/is_recent ([`b6eaf72`](https://github.com/python-zeroconf/python-zeroconf/commit/b6eaf7249f386f573b0876204ccfdfa02ee9ac5b)) + +* Remove Zeroconf.wait as its now unused in the codebase (#914) ([`aa71084`](https://github.com/python-zeroconf/python-zeroconf/commit/aa7108481235cc018600d096b093c785447d8769)) + +* Switch periodic cleanup task to call_later (#913) + +- Simplifies AsyncEngine to avoid the long running + task ([`38eb271`](https://github.com/python-zeroconf/python-zeroconf/commit/38eb271c952e89260ecac6fac3e723f4206c4648)) + +* Update changelog for 0.33.0 (#912) ([`b2a7a00`](https://github.com/python-zeroconf/python-zeroconf/commit/b2a7a00f82d401066166776cecf0857ebbdb56ad)) + +* Remove locking from ServiceRegistry (#911) + +- All calls to the ServiceRegistry are now done in async context + which makes them thread safe. Locking is no longer needed. ([`2d3da7a`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3da7a77699f88bd90ebc09d36b333690385f85)) + +* Remove duplicate unregister_all_services code (#910) ([`e63ca51`](https://github.com/python-zeroconf/python-zeroconf/commit/e63ca518c91cda7b9f460436aee4fdac1a7b9567)) + +* Rename DNSNsec.next to DNSNsec.next_name (#908) ([`69942d5`](https://github.com/python-zeroconf/python-zeroconf/commit/69942d5bfb4d92c6a312aea7c17f63fce0401e23)) + +* Upgrade syntax to python 3.6 (#907) ([`0578731`](https://github.com/python-zeroconf/python-zeroconf/commit/057873128ff05a0b2d6eae07510e23d705d10bae)) + +* Implement NSEC record parsing (#903) + +- This is needed for negative responses + https://datatracker.ietf.org/doc/html/rfc6762#section-6.1 ([`bc9e9cf`](https://github.com/python-zeroconf/python-zeroconf/commit/bc9e9cf8a5b997ca924730ed091a829f4f961ca3)) + +* Centralize running coroutines from threads (#906) + +- Cleanup to ensure all coros we run from a thread + use _LOADED_SYSTEM_TIMEOUT ([`9399c57`](https://github.com/python-zeroconf/python-zeroconf/commit/9399c57bb2b280c7b433e7fbea7cca2c2f4417ee)) + +* Reduce duplicate code between zeroconf.asyncio and zeroconf._core (#904) ([`e417fc0`](https://github.com/python-zeroconf/python-zeroconf/commit/e417fc0f5ed7eaa47a0dcaffdbc6fe335bfcc058)) + +* Disable N818 in flake8 (#905) + +- We cannot rename these exceptions now without a breaking change + as they have existed for many years ([`f8af0fb`](https://github.com/python-zeroconf/python-zeroconf/commit/f8af0fb251938dcb410127b2af2b8b407989aa08)) + + +## v0.32.1 (2021-07-05) + +### Unknown + +* Release version 0.32.1 ([`675fd6f`](https://github.com/python-zeroconf/python-zeroconf/commit/675fd6fc959e76e4e3690e5c7a02db269ca9ef60)) + +* Fix the changelog's one sentence's tense ([`fc089be`](https://github.com/python-zeroconf/python-zeroconf/commit/fc089be1f412d991f44daeecd0944198d3a638a5)) + +* Update changelog (#899) ([`a93301d`](https://github.com/python-zeroconf/python-zeroconf/commit/a93301d0fd493bf18147187bf8efed1a4ea02214)) + +* Increase timeout in ServiceInfo.request to handle loaded systems (#895) + +It can take a few seconds for a loaded system to run the `async_request` coroutine when the event loop is busy or the system is CPU bound (example being Home Assistant startup). We now add +an additional `_LOADED_SYSTEM_TIMEOUT` (10s) to the `run_coroutine_threadsafe` calls to ensure the coroutine has the total amount of time to run up to its internal timeout (default of 3000ms). + +Ten seconds is a bit large of a timeout; however, its only unused in cases where we wrap other timeouts. We now expect the only instance the `run_coroutine_threadsafe` result timeout will happen in a production circumstance is when someone is running a `ServiceInfo.request()` in a thread and another thread calls `Zeroconf.close()` at just the right moment that the future is never completed unless the system is so loaded that it is nearly unresponsive. + +The timeout for `run_coroutine_threadsafe` is the maximum time a thread can cleanly shut down when zeroconf is closed out in another thread, which should always be longer than the underlying thread operation. ([`56c7d69`](https://github.com/python-zeroconf/python-zeroconf/commit/56c7d692d67b7f56c386a7f1f4e45ebfc4e8366a)) + +* Add test for running sync code within executor (#894) ([`90bc8ca`](https://github.com/python-zeroconf/python-zeroconf/commit/90bc8ca8dce1af26ea81c5d6ecb17cf6ea664a71)) + + +## v0.32.0 (2021-06-30) + +### Unknown + +* Fix readme formatting + +It wasn't proper reStructuredText before: + + % twine check dist/* + Checking dist/zeroconf-0.32.0-py3-none-any.whl: FAILED + `long_description` has syntax errors in markup and would not be rendered on PyPI. + line 381: Error: Unknown target name: "async". + warning: `long_description_content_type` missing. defaulting to `text/x-rst`. + Checking dist/zeroconf-0.32.0.tar.gz: FAILED + `long_description` has syntax errors in markup and would not be rendered on PyPI. + line 381: Error: Unknown target name: "async". + warning: `long_description_content_type` missing. defaulting to `text/x-rst`. ([`82ff150`](https://github.com/python-zeroconf/python-zeroconf/commit/82ff150e0a72a7e20823a0c805f48f117bf1e274)) + +* Release version 0.32.0 ([`ea7bc85`](https://github.com/python-zeroconf/python-zeroconf/commit/ea7bc8592e418332e5b9973007698d3cd79754d9)) + +* Reformat changelog to match prior versions (#892) ([`34f6e49`](https://github.com/python-zeroconf/python-zeroconf/commit/34f6e498dec18b84dab1c27c75348916bceef8e6)) + +* Fix spelling and grammar errors in 0.32.0 changelog (#891) ([`ba235dd`](https://github.com/python-zeroconf/python-zeroconf/commit/ba235dd8bc65de4f461f76fd2bf4647844437e1a)) + +* Rewrite 0.32.0 changelog in past tense (#890) ([`0d91156`](https://github.com/python-zeroconf/python-zeroconf/commit/0d911568d367f1520acb19bdf830fe188b6ffb70)) + +* Reformat backwards incompatible changes to match previous versions (#889) ([`9abb40c`](https://github.com/python-zeroconf/python-zeroconf/commit/9abb40cf331bc0acc5fdbb03fce5c958cec8b41e)) + +* Remove extra newlines between changelog entries (#888) ([`d31fd10`](https://github.com/python-zeroconf/python-zeroconf/commit/d31fd103cc942574f7fbc75e5346cc3d3eaf7ee1)) + +* Collapse changelog for 0.32.0 (#887) ([`14cf936`](https://github.com/python-zeroconf/python-zeroconf/commit/14cf9362c9ae947bcee5911b9c593ca76f50d529)) + +* Disable pylint in the CI (#886) ([`b9dc12d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9dc12dee8b4a7f6d8e1f599948bf16e5e7fab47)) + +* Revert name change of zeroconf.asyncio to zeroconf.aio (#885) + +- Now that `__init__.py` no longer needs to import `asyncio`, + the name conflict is not a concern. + +Fixes #883 ([`b9eae5a`](https://github.com/python-zeroconf/python-zeroconf/commit/b9eae5a6f8f86bfe60446f133cad5fc33d072959)) + +* Update changelog (#879) ([`be1d3bb`](https://github.com/python-zeroconf/python-zeroconf/commit/be1d3bbe0ee12254d11e3d8b75c2faba950fabce)) + +* Add coverage to ensure loading zeroconf._logger does not override logging level (#878) ([`86e2ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/86e2ab9db3c7bd47b6e81837d594280ced3b30f9)) + +* Add coverge for disconnected adapters in add_multicast_member (#877) ([`ab83819`](https://github.com/python-zeroconf/python-zeroconf/commit/ab83819ad6b6ff727a894271dde3e4be6c28cb2c)) + +* Break apart net_socket for easier testing (#875) ([`f0770fe`](https://github.com/python-zeroconf/python-zeroconf/commit/f0770fea80b00f2340815fa983968f68a15c702e)) + +* Fix flapping test test_integration_with_listener_class (#876) ([`decd8a2`](https://github.com/python-zeroconf/python-zeroconf/commit/decd8a26aa8a89ceefcd9452fe562f2eeaa3fecb)) + +* Add coverage to ensure unrelated A records do not generate ServiceBrowser callbacks (#874) + +closes #871 ([`471bacd`](https://github.com/python-zeroconf/python-zeroconf/commit/471bacd3200aa1216054c0e52b2e5842e9760aa0)) + +* Update changelog (#870) ([`972da99`](https://github.com/python-zeroconf/python-zeroconf/commit/972da99e4dd9d0fe1c1e0786da45d66fd43a717a)) + +* Fix deadlock when event loop is shutdown during service registration (#869) ([`4ed9036`](https://github.com/python-zeroconf/python-zeroconf/commit/4ed903698b10f434cfbbe601998f27c10d2fb9db)) + +* Break apart new_socket to be testable (#867) ([`22ff6b5`](https://github.com/python-zeroconf/python-zeroconf/commit/22ff6b56d7b6531d2af5c50dca66fd2be2b276f4)) + +* Add test coverage to ensure ServiceBrowser ignores unrelated updates (#866) ([`dcf18c8`](https://github.com/python-zeroconf/python-zeroconf/commit/dcf18c8a32652c6aa70af180b6a5261f4277faa9)) + +* Add test coverage for duplicate properties in a TXT record (#865) ([`6ef65fc`](https://github.com/python-zeroconf/python-zeroconf/commit/6ef65fc7cafc3d4089a2b943da224c6cb027b4b0)) + +* Update changelog (#864) ([`c64064a`](https://github.com/python-zeroconf/python-zeroconf/commit/c64064ad3b38a40775637c0fd8877d9d00d2d537)) + +* Ensure protocol and sending errors are logged once (#862) ([`c516919`](https://github.com/python-zeroconf/python-zeroconf/commit/c516919064687551299f23e23bf0797888020041)) + +* Remove unreachable code in AsyncListener.datagram_received (#863) ([`f536869`](https://github.com/python-zeroconf/python-zeroconf/commit/f5368692d7907e440ca81f0acee9744f79dbae80)) + +* Add unit coverage for shutdown_loop (#860) ([`af83c76`](https://github.com/python-zeroconf/python-zeroconf/commit/af83c766c2ae72bd23184c6f6300e4d620c7b3e8)) + +* Make a dispatch dict for ServiceStateChange listeners (#859) ([`57cccc4`](https://github.com/python-zeroconf/python-zeroconf/commit/57cccc4dcbdc9df52672297968ccb55054122049)) + +* Cleanup coverage data (#858) ([`3eb7be9`](https://github.com/python-zeroconf/python-zeroconf/commit/3eb7be95fd6cd4960f96f29aa72fc45347c57b6e)) + +* Fix changelog formatting (#857) ([`59247f1`](https://github.com/python-zeroconf/python-zeroconf/commit/59247f1c44b485bf51d4a8d3e3966b9faf40cf82)) + +* Update changelog (#856) ([`cb2e237`](https://github.com/python-zeroconf/python-zeroconf/commit/cb2e237b6f1af0a83bc7352464562cdb7bbcac14)) + +* Only run linters on Linux in CI (#855) + +- The github MacOS and Windows runners are slower and + will have the same results as the Linux runners so there + is no need to wait for them. + +closes #854 ([`03411f3`](https://github.com/python-zeroconf/python-zeroconf/commit/03411f35d82752d5d2633a67db132a011098d9e6)) + +* Speed up test_verify_name_change_with_lots_of_names under PyPy (#853) + +fixes #840 ([`0cd876f`](https://github.com/python-zeroconf/python-zeroconf/commit/0cd876f5a42699aeb0176380ba4cca4d8a536df3)) + +* Make ServiceInfo first question QU (#852) + +- We want an immediate response when making a request with ServiceInfo + by asking a QU question, most responders will not delay the response + and respond right away to our question. This also improves compatibility + with split networks as we may not have been able to see the response + otherwise. If the responder has not multicast the record recently + it may still choose to do so in addition to responding via unicast + +- Reduces traffic when there are multiple zeroconf instances running + on the network running ServiceBrowsers + +- If we don't get an answer on the first try, we ask a QM question + in the event we can't receive a unicast response for some reason + +- This change puts ServiceInfo inline with ServiceBrowser which + also asks the first question as QU since ServiceInfo is commonly + called from ServiceBrowser callbacks + +closes #851 ([`76e0b05`](https://github.com/python-zeroconf/python-zeroconf/commit/76e0b05ca9c601bd638817bf68ca8d981f1d65f8)) + +* Update changelog (#850) ([`8c9d1d8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c9d1d8964d9226d5d3ac38bec908e930954b369)) + +* Switch ServiceBrowser query scheduling to use call_later instead of a loop (#849) + +- Simplifies scheduling as there is no more need to sleep in a loop as + we now schedule future callbacks with call_later + +- Simplifies cancelation as there is no more coroutine to cancel, only a timer handle + We no longer have to handle the canceled error and cleaning up the awaitable + +- Solves the infrequent test failures in test_backoff and test_integration ([`a8c1623`](https://github.com/python-zeroconf/python-zeroconf/commit/a8c16231881de43adedbedbc3f1ea707c0b457f2)) + +* Fix spurious failures in ZeroconfServiceTypes tests (#848) + +- These tests ran the same test twice in 0.5s and would + trigger the duplicate packet suppression. Rather then + making them run longer, we can disable the suppression + for the test. ([`9f71e5b`](https://github.com/python-zeroconf/python-zeroconf/commit/9f71e5b7364d4a23492cafe4f49a5c2acda4178d)) + +* Fix thread safety in handlers test (#847) ([`182c68f`](https://github.com/python-zeroconf/python-zeroconf/commit/182c68ff11ba381444a708e17560e920ae1849ef)) + +* Update changelog (#845) ([`72502c3`](https://github.com/python-zeroconf/python-zeroconf/commit/72502c303a1a889cf84906b8764fd941a840e6d3)) + +* Increase timeout in test_integration (#844) + +- The github macOS runners tend to be a bit loaded and these + sometimes fail because of it ([`dd86f2f`](https://github.com/python-zeroconf/python-zeroconf/commit/dd86f2f9fee4bbaebce956b330c1837a6e9c6c99)) + +* Use AAAA records instead of A records in test_integration_with_listener_ipv6 (#843) ([`688c518`](https://github.com/python-zeroconf/python-zeroconf/commit/688c5184dce67e5af857c138639ced4bdcec1e57)) + +* Fix ineffective patching on PyPy (#842) + +- Use patch in all places so its easier to find where we need + to clean up ([`ecd9c94`](https://github.com/python-zeroconf/python-zeroconf/commit/ecd9c941810e4b413b20dc55929b3ae1a7e57b27)) + +* Limit duplicate packet suppression to 1s intervals (#841) + +- Only suppress duplicate packets that happen within the same + second. Legitimate queriers will retry the question if they + are suppressed. The limit was reduced to one second to be + in line with rfc6762: + + To protect the network against excessive packet flooding due to + software bugs or malicious attack, a Multicast DNS responder MUST NOT + (except in the one special case of answering probe queries) multicast + a record on a given interface until at least one second has elapsed + since the last time that record was multicast on that particular ([`7fb11bf`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb11bfc03c06cbe9ed5a4303b3e632d69665bb1)) + +* Skip dependencies install in CI on cache hit (#839) + +There is no need to reinstall dependencies in the CI when we have a cache hit. ([`937be52`](https://github.com/python-zeroconf/python-zeroconf/commit/937be522a42830b27326b5253d49003b57998bc9)) + +* Adjust restore key for CI cache (#838) ([`3fdd834`](https://github.com/python-zeroconf/python-zeroconf/commit/3fdd8349553c160586fb6831c9466410f19a3308)) + +* Make multipacket known answer suppression per interface (#836) + +- The suppression was happening per instance of Zeroconf instead + of per interface. Since the same network can be seen on multiple + interfaces (usually and wifi and ethernet), this would confuse the + multi-packet known answer supression since it was not expecting + to get the same data more than once + +Fixes #835 ([`7297f3e`](https://github.com/python-zeroconf/python-zeroconf/commit/7297f3ef71c9984296c3e28539ce7a4b42f04a05)) + +* Ensure coverage.xml is written for codecov (#837) ([`0b1abbc`](https://github.com/python-zeroconf/python-zeroconf/commit/0b1abbc8f2b09235cfd44e5586024c7b82dc5289)) + +* Wait for startup in test_integration (#834) ([`540c652`](https://github.com/python-zeroconf/python-zeroconf/commit/540c65218eb9d1aedc88a3d3724af97f39ccb88e)) + +* Cache dependency installs in CI (#833) ([`0bf4f75`](https://github.com/python-zeroconf/python-zeroconf/commit/0bf4f7537a042a00d9d3f815afcdf7ebe29d9f53)) + +* Annotate test failures on github (#831) ([`4039b0b`](https://github.com/python-zeroconf/python-zeroconf/commit/4039b0b755a3d0fe15e4cb1a7cb1592c35e048e1)) + +* Show 20 slowest tests on each run (#832) ([`8230e3f`](https://github.com/python-zeroconf/python-zeroconf/commit/8230e3f40da5d2d152942725d67d5f8c0b8c647b)) + +* Disable duplicate question suppression for test_integration (#830) + +- This test waits until we get 50 known answers. It would + sometimes fail because it could not ask enough + unsuppressed questions in the allowed time. ([`10f4a7f`](https://github.com/python-zeroconf/python-zeroconf/commit/10f4a7f8d607d09673be56e5709912403503d86b)) + +* Convert test_integration to asyncio to avoid testing threading races (#828) + +Fixes #768 ([`4c4b388`](https://github.com/python-zeroconf/python-zeroconf/commit/4c4b388ba125ad23a03722b30c71da86853fe05a)) + +* Update changelog (#827) ([`82f80c3`](https://github.com/python-zeroconf/python-zeroconf/commit/82f80c301a6324d2f1711ca751e81069e90030ec)) + +* Drop oversize packets before processing them (#826) + +- Oversized packets can quickly overwhelm the system and deny + service to legitimate queriers. In practice this is usually + due to broken mDNS implementations rather than malicious + actors. ([`6298ef9`](https://github.com/python-zeroconf/python-zeroconf/commit/6298ef9078cf2408bc1e57660ee141e882d13469)) + +* Guard against excessive ServiceBrowser queries from PTR records significantly lower than recommended (#824) + +* We now enforce a minimum TTL for PTR records to avoid +ServiceBrowsers generating excessive queries refresh queries. +Apple uses a 15s minimum TTL, however we do not have the same +level of rate limit and safe guards so we use 1/4 of the recommended value. ([`7f6d003`](https://github.com/python-zeroconf/python-zeroconf/commit/7f6d003210244b6f7df133bd474d7ddf64098422)) + +* Update changelog (#822) ([`4a82769`](https://github.com/python-zeroconf/python-zeroconf/commit/4a8276941a07188180ee31dc4ca578306c2df92b)) + +* Only wake up the query loop when there is a change in the next query time (#818) + +The ServiceBrowser query loop (async_browser_task) was being awoken on +every packet because it was using `zeroconf.async_wait` which wakes +up on every new packet. We only need to awaken the loop when the next time +we are going to send a query has changed. + +fixes #814 fixes #768 ([`4062fe2`](https://github.com/python-zeroconf/python-zeroconf/commit/4062fe21d8baaad36960f8cae0f59ac7083a6b55)) + +* Fix reliablity of tests that patch sending (#820) ([`a7b4f8e`](https://github.com/python-zeroconf/python-zeroconf/commit/a7b4f8e070de69db1ed872e2ff7a953ec624394c)) + +* Fix default v6_flow_scope argument with tests that mock send (#819) ([`f9d3529`](https://github.com/python-zeroconf/python-zeroconf/commit/f9d35299a39fee0b1632a3b2ac00170f761d53b1)) + +* Turn on logging in the types test (#816) + +- Will be needed to track down #813 ([`ffd2532`](https://github.com/python-zeroconf/python-zeroconf/commit/ffd2532f72a59ede86732b310512774b8fa344e7)) + +* New ServiceBrowsers now request QU in the first outgoing when unspecified (#812) ([`e32bb5d`](https://github.com/python-zeroconf/python-zeroconf/commit/e32bb5d98be0dc7ed130224206a4de699bcd68e3)) + +* Update changelog (#811) ([`13c558c`](https://github.com/python-zeroconf/python-zeroconf/commit/13c558cf3f40e52a13347a39b050e49a9241c269)) + +* Simplify wait_event_or_timeout (#810) + +- This function always did the same thing on timeout and + wait complete so we can use the same callback. This + solves the CI failing due to the test coverage flapping + back and forth as the timeout would rarely happen. ([`d4c8f0d`](https://github.com/python-zeroconf/python-zeroconf/commit/d4c8f0d3ffdcdc609810aca383492a57f9e1a723)) + +* Make DNSHinfo and DNSAddress use the same match order as DNSPointer and DNSText (#808) + +We want to check the data that is most likely to be unique first +so we can reject the __eq__ as soon as possible. ([`f9bbbce`](https://github.com/python-zeroconf/python-zeroconf/commit/f9bbbce388f2c6c24109c15ef843c10eeccf008f)) + +* Format tests/services/test_info.py with newer black (#809) ([`0129ac0`](https://github.com/python-zeroconf/python-zeroconf/commit/0129ac061db4a950f7bddf1084309e44aaabdbdf)) + +* Qualify IPv6 link-local addresses with scope_id (#343) + +Co-authored-by: Lokesh Prajapati +Co-authored-by: de Angelis, Antonio + +When a service is advertised on an IPv6 address where +the scope is link local, i.e. fe80::/64 (see RFC 4007) +the resolved IPv6 address must be extended with the +scope_id that identifies through the "%" symbol the +local interface to be used when routing to that address. +A new API `parsed_scoped_addresses()` is provided to +return qualified addresses to avoid breaking compatibility +on the existing parsed_addresses(). ([`05bb21b`](https://github.com/python-zeroconf/python-zeroconf/commit/05bb21b9b43f171e30b48fad6a756df49162b557)) + +* Tag 0.32.0b3 (#805) ([`5dccf34`](https://github.com/python-zeroconf/python-zeroconf/commit/5dccf3496a9bd4c268da4c39aab545ddcd50ac57)) + +* Update changelog (#804) ([`59e4bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/59e4bd25347aac254700dc3a1518676042982b3a)) + +* Skip network adapters that are disconnected (#327) + +Co-authored-by: J. Nick Koston ([`df66da2`](https://github.com/python-zeroconf/python-zeroconf/commit/df66da2a943b9ff978602680b746f1edeba048dc)) + +* Add slots to DNS classes (#803) + +- On a busy network that receives many mDNS packets per second, we + will not know the answer to most of the questions being asked. + In this case the creating the DNS* objects are usually garbage + collected within 1s as they are not needed. We now set __slots__ + to speed up the creation and destruction of these objects ([`18fe341`](https://github.com/python-zeroconf/python-zeroconf/commit/18fe341300e28ed93d7b5d7ca8e07edb119bd597)) + +* Update changelog (#802) ([`58ae3cf`](https://github.com/python-zeroconf/python-zeroconf/commit/58ae3cf553cd925ac90f3db551f4085ea5bc8b79)) + +* Update changelog (#801) ([`662ed61`](https://github.com/python-zeroconf/python-zeroconf/commit/662ed6166282b9b5b6e83a596c0576a57f8962d2)) + +* Ensure we handle threadsafe shutdown under PyPy with multiple event loops (#800) ([`bbc9124`](https://github.com/python-zeroconf/python-zeroconf/commit/bbc91241a86f3339aa27cae7b4ea2ab9d7c1f37d)) + +* Update changelog (#798) ([`9961dce`](https://github.com/python-zeroconf/python-zeroconf/commit/9961dce598d3c6eeda68a2f874a7a50ec33f819c)) + +* Ensure fresh ServiceBrowsers see old_record as None when replaying the cache (#793) ([`38e66ec`](https://github.com/python-zeroconf/python-zeroconf/commit/38e66ec5ba5fcb96cef17b8949385075807a2fb7)) + +* Update changelog (#797) ([`c36099a`](https://github.com/python-zeroconf/python-zeroconf/commit/c36099a41a71298d58e7afa42ecdc7a54d3b010a)) + +* Pass both the new and old records to async_update_records (#792) + +* Pass the old_record (cached) as the value and the new_record (wire) +to async_update_records instead of forcing each consumer to +check the cache since we will always have the old_record +when generating the async_update_records call. This avoids +the overhead of multiple cache lookups for each listener. ([`d637d67`](https://github.com/python-zeroconf/python-zeroconf/commit/d637d67378698e0a505be90afbce4e2264b49444)) + +* Remove unused constant from zeroconf._handlers (#796) ([`cb91484`](https://github.com/python-zeroconf/python-zeroconf/commit/cb91484670ba76c8c453dc49502e89195561b31e)) + +* Make add_listener and remove_listener threadsafe (#794) ([`2bfbcbe`](https://github.com/python-zeroconf/python-zeroconf/commit/2bfbcbe9e05b9df98bba66a73deb0041c0e7c13b)) + +* Fix test_tc_bit_defers_last_response_missing failures due to thread safety (#795) ([`6aac0eb`](https://github.com/python-zeroconf/python-zeroconf/commit/6aac0eb0c1e394ec7ee21ddd6e98e446417d0e07)) + +* Ensure outgoing ServiceBrowser questions are seen by the question history (#790) ([`ecad4e8`](https://github.com/python-zeroconf/python-zeroconf/commit/ecad4e84c44ffd21dbf15e969c08f7b3376b131c)) + +* Update changelog (#788) ([`5d23628`](https://github.com/python-zeroconf/python-zeroconf/commit/5d2362825110e9f7a9c9259218a664e2e927e821)) + +* Add async_apple_scanner example (#719) ([`62dc9c9`](https://github.com/python-zeroconf/python-zeroconf/commit/62dc9c91c277bc4755f81597adca030a43d0ce5f)) + +* Add support for requesting QU questions to ServiceBrowser and ServiceInfo (#787) ([`135983c`](https://github.com/python-zeroconf/python-zeroconf/commit/135983cb96a27e3ad3750234286d1d9bfa6ff44f)) + +* Update changelog (#786) ([`3b3ecf0`](https://github.com/python-zeroconf/python-zeroconf/commit/3b3ecf09d2f30ee39c6c29b4d85e000577b2c4b9)) + +* Ensure the queue is created before adding listeners to ServiceBrowser (#785) + +* Ensure the queue is created before adding listeners to ServiceBrowser + +- The callback from the listener could generate an event that would + fire in async context that should have gone to the queue which + could result in the consumer running a sync call in the event loop + and blocking it. + +* add comments + +* add comments + +* add comments + +* add comments + +* black ([`97f5b50`](https://github.com/python-zeroconf/python-zeroconf/commit/97f5b502815075f2ff29bee3ace7cde6ad725dfb)) + +* Add a guard to prevent running ServiceInfo.request in async context (#784) + +* Add a guard to prevent running ServiceInfo.request in async context + +* test ([`dd85ae7`](https://github.com/python-zeroconf/python-zeroconf/commit/dd85ae7defd3f195ed0511a2fdb6512326ca0562)) + +* Inline utf8 decoding when processing incoming packets (#782) ([`3be1bc8`](https://github.com/python-zeroconf/python-zeroconf/commit/3be1bc84bff5ee2840040ddff41185b257a1055c)) + +* Drop utf cache from _dns (#781) + +- The cache did not make enough difference to justify the additional + complexity after additional testing was done ([`1b87343`](https://github.com/python-zeroconf/python-zeroconf/commit/1b873436e2d9ff36876a71c48fa697d277fd3ffa)) + +* Switch to using a simple cache instead of lru_cache (#779) ([`7aeafbf`](https://github.com/python-zeroconf/python-zeroconf/commit/7aeafbf3b990ab671ff691b6c20cd410f69808bf)) + +* Reformat test_handlers (#780) ([`767ae8f`](https://github.com/python-zeroconf/python-zeroconf/commit/767ae8f6cd92493f8f43d66edc70c8fd856ed11e)) + +* Fix Responding to Address Queries (RFC6762 section 6.2) (#777) ([`ac9f72a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac9f72a986ae314af0043cae6fb6219baabea7e6)) + +* Implement duplicate question supression (#770) + +https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 ([`c0f4f48`](https://github.com/python-zeroconf/python-zeroconf/commit/c0f4f48e2bb996ce18cb569aa5369356cbc919ff)) + +* Fix deadlock on ServiceBrowser shutdown with PyPy (#774) ([`b5d54e4`](https://github.com/python-zeroconf/python-zeroconf/commit/b5d54e485d9dbcde1b7b472760a0b307198b8ec8)) + +* Add a guard against the task list changing when shutting down (#776) ([`e8836b1`](https://github.com/python-zeroconf/python-zeroconf/commit/e8836b134c47080edaf47532d7cb844b307dfb08)) + +* Verify async callers can still use Zeroconf without migrating to AsyncZeroconf (#775) ([`f23df4f`](https://github.com/python-zeroconf/python-zeroconf/commit/f23df4f5f05e3911cbf96234b198ea88691aadad)) + +* Implement accidental synchronization protection (RFC2762 section 5.2) (#773) ([`b600547`](https://github.com/python-zeroconf/python-zeroconf/commit/b600547a47878775e1c6fb8df46682a670beccba)) + +* Improve performance of parsing DNSIncoming by caching read_utf (#769) ([`5d44a36`](https://github.com/python-zeroconf/python-zeroconf/commit/5d44a36a59c21ef7869ba9e6dde9f658d3502793)) + +* Add test coverage to ensure RecordManager.add_listener callsback known question answers (#767) ([`e70431e`](https://github.com/python-zeroconf/python-zeroconf/commit/e70431e1fdc92c155309a1d40c89fed48737970c)) + +* Switch to using an asyncio.Event for async_wait (#759) + +- We no longer need to check for thread safety under a asyncio.Condition + as the ServiceBrowser and ServiceInfo internals schedule coroutines + in the eventloop. ([`6c82fa9`](https://github.com/python-zeroconf/python-zeroconf/commit/6c82fa9efd0f434f0f7c83e3bd98bd7851ede4cf)) + +* Break test_lots_of_names into two tests (#764) ([`85532e1`](https://github.com/python-zeroconf/python-zeroconf/commit/85532e13e42447fcd6d4d4b0060f04d33c3ab780)) + +* Fix test_lots_of_names overflowing the incoming buffer (#763) ([`38b59a6`](https://github.com/python-zeroconf/python-zeroconf/commit/38b59a64592f41b2bb547b35c72a010a925a2941)) + +* Fix race condition in ServiceBrowser test_integration (#762) + +- The event was being cleared in the wrong thread which + meant if the test was fast enough it would not be seen + the second time and give a spurious failure ([`fc0e599`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0e599eec77477dd8f21ecd68b238e6a27f1bcf)) + +* Add 60s timeout for each test (#761) ([`936500a`](https://github.com/python-zeroconf/python-zeroconf/commit/936500a47cc33d9daa86f9012b1791986361ff63)) + +* Add missing coverage for SignalRegistrationInterface (#758) ([`9f68fc8`](https://github.com/python-zeroconf/python-zeroconf/commit/9f68fc8b1b834d0194e8ba1069d052aa853a8d38)) + +* Update changelog (#757) ([`1c93baa`](https://github.com/python-zeroconf/python-zeroconf/commit/1c93baa486b1b0f44487891766e0a0c1de3eb252)) + +* Simplify ServiceBrowser callsbacks (#756) ([`f24ebba`](https://github.com/python-zeroconf/python-zeroconf/commit/f24ebba9ecc4d1626d570956a7cc735206d7ff6e)) + +* Revert: Fix thread safety in _ServiceBrowser.update_records_complete (#708) (#755) + +- This guarding is no longer needed as the ServiceBrowser loop + now runs in the event loop and the thread safety guard is no + longer needed ([`f53c88b`](https://github.com/python-zeroconf/python-zeroconf/commit/f53c88b52ed080c80e2e98d3da91a830f0c7ebca)) + +* Drop AsyncServiceListener (#754) ([`04cd268`](https://github.com/python-zeroconf/python-zeroconf/commit/04cd2688022ebd07c1f875fefc73f8d15c4ed56c)) + +* Run ServiceBrowser queries in the event loop (#752) ([`4d0a8f3`](https://github.com/python-zeroconf/python-zeroconf/commit/4d0a8f3c643a0fc5c3a40420bab96ef18dddaecb)) + +* Remove unused argument from AsyncZeroconf (#751) ([`e7adce2`](https://github.com/python-zeroconf/python-zeroconf/commit/e7adce2bf6ea0b4af1709369a36421acd9757b4a)) + +* Fix warning about Zeroconf._async_notify_all not being awaited in sync shutdown (#750) ([`3b9baf0`](https://github.com/python-zeroconf/python-zeroconf/commit/3b9baf07278290b2b4eb8ac5850bccfbd8b107d8)) + +* Update async_service_info_request example to ensure it runs in the right event loop (#749) ([`0f702c6`](https://github.com/python-zeroconf/python-zeroconf/commit/0f702c6a41bb33ed63872249b82d1111bdac4fa6)) + +* Run ServiceInfo requests in the event loop (#748) ([`0dbcabf`](https://github.com/python-zeroconf/python-zeroconf/commit/0dbcabfade41057a055ebefffd410d1afc3eb0ea)) + +* Remove support for notify listeners (#733) ([`7b3b4b5`](https://github.com/python-zeroconf/python-zeroconf/commit/7b3b4b5b8303a684165fcd53c0d9c36a1b8dda3d)) + +* Update changelog (#747) ([`0909c80`](https://github.com/python-zeroconf/python-zeroconf/commit/0909c80c67287ba92ed334ab6896136aec0f3f24)) + +* Relocate service info tests to tests/services/test_info.py (#746) ([`541292e`](https://github.com/python-zeroconf/python-zeroconf/commit/541292e55fee8bbafe687afcb8d152f6fe0efb5f)) + +* Relocate service browser tests to tests/services/test_browser.py (#745) ([`869c95a`](https://github.com/python-zeroconf/python-zeroconf/commit/869c95a51e228131eb7debe1acc47c105b9bf7b5)) + +* Relocate ServiceBrowser to zeroconf._services.browser (#744) ([`368163d`](https://github.com/python-zeroconf/python-zeroconf/commit/368163d3c30325d60021203430711e10fd6d97e9)) + +* Relocate ServiceInfo to zeroconf._services.info (#741) ([`f0d727b`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d727bd9addd6dab373b75008f04a6f8547928b)) + +* Run question answer callbacks from add_listener in the event loop (#740) ([`c8e15dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c8e15dd2bb5f6d2eb3a8ef5f26ad044517b70c47)) + +* Fix flakey cache bit flush test (#739) ([`e227d6e`](https://github.com/python-zeroconf/python-zeroconf/commit/e227d6e4c337ef9d5aa626c41587a8046313e416)) + +* Remove second level caching from ServiceBrowsers (#737) ([`5feda7e`](https://github.com/python-zeroconf/python-zeroconf/commit/5feda7e318f7d164d2b04b2d243a804372517da6)) + +* Breakout ServiceBrowser handler from listener creation (#736) ([`35ac7a3`](https://github.com/python-zeroconf/python-zeroconf/commit/35ac7a39d1fab00898ed6075e7e930424716b627)) + +* Add fast cache lookup functions (#732) ([`9d31245`](https://github.com/python-zeroconf/python-zeroconf/commit/9d31245f9ed4f6b1f7d9d7c51daf0ca394fd208f)) + +* Switch to using DNSRRSet in RecordManager (#735) ([`c035925`](https://github.com/python-zeroconf/python-zeroconf/commit/c035925f47732a889c76a2ff0989b92c6687c950)) + +* Add test coverage to ensure the cache flush bit is properly handled (#734) ([`50af944`](https://github.com/python-zeroconf/python-zeroconf/commit/50af94493ff6bf5d21445eaa80d3a96f348b0d11)) + +* Fix server cache to be case-insensitive (#731) ([`3ee9b65`](https://github.com/python-zeroconf/python-zeroconf/commit/3ee9b650bedbe61d59838897f653ad43a6d51910)) + +* Update changelog (#730) ([`733f79d`](https://github.com/python-zeroconf/python-zeroconf/commit/733f79d28c7dd4500a1598b279ee638ead8bdd55)) + +* Prefix cache functions that are non threadsafe with async_ (#724) ([`3503e76`](https://github.com/python-zeroconf/python-zeroconf/commit/3503e7614fc31bbfe2c919f13689468cc73179fd)) + +* Fix cache handling of records with different TTLs (#729) + +- There should only be one unique record in the cache at + a time as having multiple unique records will different + TTLs in the cache can result in unexpected behavior since + some functions returned all matching records and some + fetched from the right side of the list to return the + newest record. Intead we now store the records in a dict + to ensure that the newest record always replaces the same + unique record and we never have a source of truth problem + determining the TTL of a record from the cache. ([`88aa610`](https://github.com/python-zeroconf/python-zeroconf/commit/88aa610274bf79aef6c74998f2bfca8c8de0dccb)) + +* Add tests for the DNSCache class (#728) + +- There is currently a bug in the implementation where an entry + can exist in two places in the cache with different TTLs. Since + a known answer cannot be both expired and expired at the same + time, this is a bug that needs to be fixed. ([`ceb79bd`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb79bd7f7bdad434cbe5b4846492cd434ea883b)) + +* Update changelog (#727) ([`9cc834d`](https://github.com/python-zeroconf/python-zeroconf/commit/9cc834d501fa5e582adeb4468b02775288e1fa11)) + +* Rename handlers and internals to make it clear what is threadsafe (#726) + +- It was too easy to get confused about what was threadsafe and + what was not threadsafe which lead to unexpected failures. + Rename functions to make it clear what will be run in the event + loop and what is expected to be threadsafe ([`f91af79`](https://github.com/python-zeroconf/python-zeroconf/commit/f91af79c8779ac235598f5584f439c78b3bdcca2)) + +* Fix ServiceInfo with multiple A records (#725) ([`3338594`](https://github.com/python-zeroconf/python-zeroconf/commit/33385948da9123bc9348374edce7502abd898e82)) + +* Relocate cache tests to tests/test_cache.py (#722) ([`e2d4d98`](https://github.com/python-zeroconf/python-zeroconf/commit/e2d4d98db70b376c53883367b3a24c1d2510c2b5)) + +* Synchronize time for fate sharing (#718) ([`18ddb8d`](https://github.com/python-zeroconf/python-zeroconf/commit/18ddb8dbeef3edad3bb97131803dfecde4355467)) + +* Update changelog (#717) ([`1ab6859`](https://github.com/python-zeroconf/python-zeroconf/commit/1ab685960bc0e412d36baf6794fde06350998474)) + +* Cleanup typing in zero._core and document ignores (#714) ([`8183640`](https://github.com/python-zeroconf/python-zeroconf/commit/818364008e911757fca24e41a4eb36e0eef49bfa)) + +* Update README (#716) ([`0f2f4e2`](https://github.com/python-zeroconf/python-zeroconf/commit/0f2f4e207cb5007112ba09e87a332b1a46cd1577)) + +* Cleanup typing in zeroconf._logger (#715) ([`3fcdcfd`](https://github.com/python-zeroconf/python-zeroconf/commit/3fcdcfd9a3efc56a34f0334ffb8706613e07d19d)) + +* Cleanup typing in zeroconf._utils.net (#713) ([`a50b3ee`](https://github.com/python-zeroconf/python-zeroconf/commit/a50b3eeda5f275c31b36cdc1c8312f61599e72bf)) + +* Cleanup typing in zeroconf._services (#711) ([`a42512c`](https://github.com/python-zeroconf/python-zeroconf/commit/a42512ca6a6a4c15f37ab623a96deb2aa06dd053)) + +* Cleanup typing in zeroconf._services.registry (#712) ([`6b923de`](https://github.com/python-zeroconf/python-zeroconf/commit/6b923deb3682088d0fe9182377b5603d0ade1e1a)) + +* Add setter for DNSQuestion to easily make a QU question (#710) + +Closes #703 ([`aeb1b23`](https://github.com/python-zeroconf/python-zeroconf/commit/aeb1b23defa2d5956a6f19acca4ce410d6a04cc9)) + +* Synchronize created time for incoming and outgoing queries (#709) ([`c366c8c`](https://github.com/python-zeroconf/python-zeroconf/commit/c366c8cc45f565c4066fc72b481c6a960bac1cb9)) + +* Set stale unique records to expire 1s in the future instead of instant removal (#706) + +- Fixes #475 + +- https://tools.ietf.org/html/rfc6762#section-10.2 + Queriers receiving a Multicast DNS response with a TTL of zero SHOULD + NOT immediately delete the record from the cache, but instead record + a TTL of 1 and then delete the record one second later. In the case + of multiple Multicast DNS responders on the network described in + Section 6.6 above, if one of the responders shuts down and + incorrectly sends goodbye packets for its records, it gives the other + cooperating responders one second to send out their own response to + "rescue" the records before they expire and are deleted. ([`f3eeecd`](https://github.com/python-zeroconf/python-zeroconf/commit/f3eeecd84413b510b9b8e05e2d1f6ad99d0dc37d)) + +* Fix thread safety in _ServiceBrowser.update_records_complete (#708) ([`dc0c613`](https://github.com/python-zeroconf/python-zeroconf/commit/dc0c6137742edf97626c972e5c9191dfbffaecdc)) + +* Split DNSOutgoing/DNSIncoming/DNSMessage into zeroconf._protocol (#705) ([`f39bde0`](https://github.com/python-zeroconf/python-zeroconf/commit/f39bde0f6cba7a3c1b8fe8bc1a4ab4388801e486)) + +* Update changelog (#699) ([`c368e1c`](https://github.com/python-zeroconf/python-zeroconf/commit/c368e1c67c82598e920ca52b1f7a47ed6e1cf738)) + +* Efficiently bucket queries with known answers (#698) ([`7e30848`](https://github.com/python-zeroconf/python-zeroconf/commit/7e308480238fdf2cfe08474d679121e77f746fa6)) + +* Abstract DNSOutgoing ttl write into _write_ttl (#695) ([`26fa2fb`](https://github.com/python-zeroconf/python-zeroconf/commit/26fa2fb479fff87ca5af17c2c09a557c4b6176b5)) + +* Use unique names in service types tests (#697) ([`767546b`](https://github.com/python-zeroconf/python-zeroconf/commit/767546b656d7db6df0cbf2b257953498f1bc3996)) + +* Rollback data in one call instead of poping one byte at a time in DNSOutgoing (#696) ([`5cbaa3f`](https://github.com/python-zeroconf/python-zeroconf/commit/5cbaa3fc02f635e6c735e1ee5f1ca19b84c0a069)) + +* Fix off by 1 in test_tc_bit_defers_last_response_missing (#694) ([`32b7dc4`](https://github.com/python-zeroconf/python-zeroconf/commit/32b7dc40e2c3621fcacb2f389d51408ab35ac832)) + +* Suppress additionals when answer is suppressed (#690) ([`0cdba98`](https://github.com/python-zeroconf/python-zeroconf/commit/0cdba98e65dd3dce2db8aa607e97e3b67b97721a)) + +* Move setting DNS created and ttl into its own function (#692) ([`993a82e`](https://github.com/python-zeroconf/python-zeroconf/commit/993a82e414db8aadaee0e0475e178e75df417a71)) + +* Remove AA flags from handlers test (#693) + +- The flag was added by mistake when copying from other tests ([`b60f307`](https://github.com/python-zeroconf/python-zeroconf/commit/b60f307d59e342983d1baa6040c3d997f84538ab)) + +* Implement multi-packet known answer supression (#687) + +- Implements https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 + +- Fixes https://github.com/jstasiak/python-zeroconf/issues/499 ([`8a25a44`](https://github.com/python-zeroconf/python-zeroconf/commit/8a25a44ec5e4f21c6bdb282fefb8f6c2d296a70b)) + +* Remove sleeps from services types test (#688) + +- Instead of registering the services and doing the broadcast + we now put them in the registry directly. ([`4865d2b`](https://github.com/python-zeroconf/python-zeroconf/commit/4865d2ba782d0313c0f7d878f5887453086febaa)) + +* Add truncated property to DNSMessage to lookup the TC bit (#686) ([`e816053`](https://github.com/python-zeroconf/python-zeroconf/commit/e816053af4d900f57100c07c48f384165ba28b9a)) + +* Update changelog (#684) ([`6fd1bf2`](https://github.com/python-zeroconf/python-zeroconf/commit/6fd1bf2364da4fc2949a905d2e4acb7da003e84d)) + +* Add coverage to verify ServiceInfo tolerates bytes or string in the txt record (#683) ([`95ddb36`](https://github.com/python-zeroconf/python-zeroconf/commit/95ddb36de64ddf3be9e93f07a1daa8389410f73d)) + +* Fix logic reversal in apple_p2p test (#681) ([`00b972c`](https://github.com/python-zeroconf/python-zeroconf/commit/00b972c062fd0ed3f2fcc4ceaec84c43b9a613be)) + +* Check if SO_REUSEPORT exists instead of using an exception catch (#682) ([`d2b5e51`](https://github.com/python-zeroconf/python-zeroconf/commit/d2b5e51d0dcde801e171a4c1e43ef1f86abde825)) + +* Use DNSRRSet for known answer suppression (#680) + +- DNSRRSet uses hash table lookups under the hood which + is much faster than the linear searches used by + DNSRecord.suppressed_by ([`e5ea9bb`](https://github.com/python-zeroconf/python-zeroconf/commit/e5ea9bb6c0a3bce7d05241f275a205ddd9e6b615)) + +* Add DNSRRSet class for quick hashtable lookups of records (#678) + +- This class will be used to do fast checks to see + if records should be suppressed by a set of answers. ([`691c29e`](https://github.com/python-zeroconf/python-zeroconf/commit/691c29eeb049e17a12d6f0a6e3bce2c3f8c2aa02)) + +* Allow unregistering a service multiple times (#679) ([`d3d439a`](https://github.com/python-zeroconf/python-zeroconf/commit/d3d439ad5d475cff094a4ea83f19d17939527021)) + +* Remove unreachable BadTypeInNameException check in _ServiceBrowser (#677) ([`57c94bb`](https://github.com/python-zeroconf/python-zeroconf/commit/57c94bb25e056e1827f15c234d7e0bcb5702a0e3)) + +* Make calculation of times in DNSRecord lazy (#676) + +- Most of the time we only check one of the time attrs + or none at all. Wait to calculate them until they are + requested. ([`ba2a4f9`](https://github.com/python-zeroconf/python-zeroconf/commit/ba2a4f960d0f9478198968a1466a8b48c963b772)) + +* Add oversized packet to the invalid packet test (#671) ([`8535110`](https://github.com/python-zeroconf/python-zeroconf/commit/8535110dd661ce406904930994a9f86faf897597)) + +* Add test for sending unicast responses (#670) ([`d274cd3`](https://github.com/python-zeroconf/python-zeroconf/commit/d274cd3a3409997b764c49d3eae7e8ee2fba33b6)) + +* Add missing coverage for ServiceInfo address changes (#669) ([`d59fb8b`](https://github.com/python-zeroconf/python-zeroconf/commit/d59fb8be29d8602ad66d89f595b26671a528fd77)) + +* Add missing coverage for ServiceListener (#668) ([`75347b4`](https://github.com/python-zeroconf/python-zeroconf/commit/75347b4e30429e130716b666da52953700f0f8e9)) + +* Update async_browser.py example to use AsyncZeroconfServiceTypes (#665) ([`481cc42`](https://github.com/python-zeroconf/python-zeroconf/commit/481cc42d000f5b0258f1be3b6df7cb7b24428b7f)) + +* Permit the ServiceBrowser to browse overlong types (#666) + +- At least one type "tivo-videostream" exists in the wild + so we are permissive about what we will look for, and + strict about what we will announce. + +Fixes #661 ([`e76c7a5`](https://github.com/python-zeroconf/python-zeroconf/commit/e76c7a5b76485efce0929ee8417aa2e0f262c04c)) + +* Add an AsyncZeroconfServiceTypes to mirror ZeroconfServiceTypes to zeroconf.aio (#658) ([`aaf8a36`](https://github.com/python-zeroconf/python-zeroconf/commit/aaf8a368063f080be4a9c01fe671243e63bdf576)) + +* Fix flakey ZeroconfServiceTypes types test (#662) ([`72db0c1`](https://github.com/python-zeroconf/python-zeroconf/commit/72db0c10246e948c15d9a53f60a54b835ccc67bc)) + +* Add test for launching with apple_p2p=True (#660) + +- Switch to using `sys.platform` to detect Mac instead of + `platform.system()` since `platform.system()` is not intended + to be machine parsable and is only for humans. + +Closes #650 ([`0e52be0`](https://github.com/python-zeroconf/python-zeroconf/commit/0e52be059065e23ebe9e11c465adc20655b6080e)) + +* Add test for Zeroconf.get_service_info failure case (#657) ([`5752ace`](https://github.com/python-zeroconf/python-zeroconf/commit/5752ace7727bffa34cdac0455125a941014ab123)) + +* Add coverage for registering a service with a custom ttl (#656) ([`87fe529`](https://github.com/python-zeroconf/python-zeroconf/commit/87fe529a33b920532b2af688bb66182ae832a3ad)) + +* Improve aio utils tests to validate high lock contention (#655) ([`efd6bfb`](https://github.com/python-zeroconf/python-zeroconf/commit/efd6bfbe81f448da2ee68b91d49cbe1982271da3)) + +* Add test coverage for normalize_interface_choice exception paths (#654) ([`3c61d03`](https://github.com/python-zeroconf/python-zeroconf/commit/3c61d03f5954c3e45229d6c1399a63c0f7331d55)) + +* Remove all calls to the executor in AsyncZeroconf (#653) ([`7d8994b`](https://github.com/python-zeroconf/python-zeroconf/commit/7d8994bc3cb4d5978bb1ff189bb5a4b7c81b5c4c)) + +* Set __all__ in zeroconf.aio to ensure private functions do now show in the docs (#652) ([`b940f87`](https://github.com/python-zeroconf/python-zeroconf/commit/b940f878fe1f8e6b8dfe2554b781cd6034dee722)) + +* Ensure interface_index_to_ip6_address skips ipv4 adapters (#651) ([`df9f8d9`](https://github.com/python-zeroconf/python-zeroconf/commit/df9f8d9a0110cc9135b7c2f0b4cd47e985da9a7e)) + +* Add async_unregister_all_services to AsyncZeroconf (#649) ([`72e709b`](https://github.com/python-zeroconf/python-zeroconf/commit/72e709b40caed016ba981be3752c439bbbf40ec7)) + +* Use cache clear helper in aio tests (#648) ([`79e39c0`](https://github.com/python-zeroconf/python-zeroconf/commit/79e39c0e923a1f6d87353761809f34f0fe1f0800)) + +* Ensure services are removed from the registry when calling unregister_all_services (#644) + +- There was a race condition where a query could be answered for a service + in the registry while goodbye packets which could result a fresh record + being broadcast after the goodbye if a query came in at just the right + time. To avoid this, we now remove the services from the registry right + after we generate the goodbye packet ([`cf0b5b9`](https://github.com/python-zeroconf/python-zeroconf/commit/cf0b5b9e2cfa4779425401b3d205f5d913621864)) + +* Use ServiceInfo.key/ServiceInfo.server_key instead of lowering in ServiceRegistry (#647) ([`a83d390`](https://github.com/python-zeroconf/python-zeroconf/commit/a83d390bef042da51d93014c222c65af81723a20)) + +* Add missing coverage to ServiceRegistry (#646) ([`9354ab3`](https://github.com/python-zeroconf/python-zeroconf/commit/9354ab39f350e4e6451dc4965225591761ada40d)) + +* Ensure the ServiceInfo.key gets updated when the name is changed externally (#645) ([`330e36c`](https://github.com/python-zeroconf/python-zeroconf/commit/330e36ceb4202c579fe979958c63c37033ababbb)) + +* Ensure cache is cleared before starting known answer enumeration query test (#639) ([`5ebd954`](https://github.com/python-zeroconf/python-zeroconf/commit/5ebd95452b16e76c37649486b232856a80390ac3)) + +* Ensure AsyncZeroconf.async_close can be called multiple times like Zeroconf.close (#638) ([`ce6912a`](https://github.com/python-zeroconf/python-zeroconf/commit/ce6912a75392cde41d8950b224ba3d14460993ff)) + +* Update changelog (#637) ([`09c18a4`](https://github.com/python-zeroconf/python-zeroconf/commit/09c18a4173a013e67da5a1cdc7089452ba6f67ee)) + +* Ensure eventloop shutdown is threadsafe (#636) + +- Prevent ConnectionResetError from being thrown on + Windows with ProactorEventLoop on cpython 3.8+ ([`bbbbddf`](https://github.com/python-zeroconf/python-zeroconf/commit/bbbbddf40d78dbd62a84f2439763d0a59211c5b9)) + +* Update changelog (#635) ([`c854d03`](https://github.com/python-zeroconf/python-zeroconf/commit/c854d03efd31e1d002518a43221b347fa6ca5de5)) + +* Clear cache in ZeroconfServiceTypes tests to ensure responses can be mcast before the timeout (#634) + +- We prevent the same record from being multicast within 1s + because of RFC6762 sec 14. Since these test timeout after + 0.5s, the answers they are looking for many be suppressed. + Since a legitimate querier will retry again later, we need + to clear the cache to simulate that the record has not + been multicast recently ([`a0977a1`](https://github.com/python-zeroconf/python-zeroconf/commit/a0977a1ddfd7a7a1abcf74c1d90c18021aebc910)) + +* Mark DNSOutgoing write functions as protected (#633) ([`5f66caa`](https://github.com/python-zeroconf/python-zeroconf/commit/5f66caaccf44c1504988cb82c1cba78d28dde7e7)) + +* Return early in the shutdown/close process (#632) ([`4ce33e4`](https://github.com/python-zeroconf/python-zeroconf/commit/4ce33e48e2094f17d8358cf221c7e2f9a8cb3568)) + +* Update changelog (#631) ([`64f6dd7`](https://github.com/python-zeroconf/python-zeroconf/commit/64f6dd7e244c86d58b962f48a50d07625f2a2a33)) + +* Remove unreachable cache check for DNSAddresses (#629) + +- The ServiceBrowser would check to see if a DNSAddress was + already in the cache and return early to avoid sending + updates when the address already was held in the cache. + This check was not needed since there is already a check + a few lines before as `self.zc.cache.get(record)` which + effectively does the same thing. This lead to the check + never being covered in the tests and 2 cache lookups when + only one was needed. ([`2b31612`](https://github.com/python-zeroconf/python-zeroconf/commit/2b31612e3f128b1193da9e0d2640f4e93fab2e3a)) + +* Add test for wait_condition_or_timeout_times_out util (#630) ([`2065b1d`](https://github.com/python-zeroconf/python-zeroconf/commit/2065b1d7ec7cb5d41c34826c2d8887bdd8a018b6)) + +* Return early on invalid data received (#628) + +- Improve coverage for handling invalid incoming data ([`28a614e`](https://github.com/python-zeroconf/python-zeroconf/commit/28a614e0586a0ca1c5c1651b59c9a4d9c1af9a1b)) + +* Update changelog (#627) ([`215d6ba`](https://github.com/python-zeroconf/python-zeroconf/commit/215d6badb3db796b13a000b26953cb57c557e5e5)) + +* Add test to ensure ServiceBrowser sees port change as an update (#625) ([`113874a`](https://github.com/python-zeroconf/python-zeroconf/commit/113874a7b59ac9cc887b1b626ac1486781c7d56f)) + +* Fix random test failures due to monkey patching not being undone between tests (#626) + +- Switch patching to use unitest.mock.patch to ensure the patch + is reverted when the test is completed + +Fixes #505 ([`5750f7c`](https://github.com/python-zeroconf/python-zeroconf/commit/5750f7ceef0441fe1cedc0d96e7ef5ccc232d875)) + +* Ensure zeroconf can be loaded when the system disables IPv6 (#624) ([`42d53c7`](https://github.com/python-zeroconf/python-zeroconf/commit/42d53c7c04a7bbf4e60e691e2e58fe7acfec8ad9)) + +* Update changelog (#623) ([`4d05961`](https://github.com/python-zeroconf/python-zeroconf/commit/4d05961088efa8b503cad5658afade874eaeec76)) + +* Eliminate aio sender thread (#622) ([`f15e84f`](https://github.com/python-zeroconf/python-zeroconf/commit/f15e84f3ee7a644792fe98edde84dd216b3497cb)) + +* Replace select loop with asyncio loop (#504) ([`8f00cfc`](https://github.com/python-zeroconf/python-zeroconf/commit/8f00cfca0e67dde6afda399da6984ed7d8f929df)) + +* Add support for handling QU questions (#621) + +- Implements RFC 6762 sec 5.4: + Questions Requesting Unicast Responses + https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 ([`9a32db8`](https://github.com/python-zeroconf/python-zeroconf/commit/9a32db8582588e4bf812fd5670a7e61c50631a2e)) + +* Add is_recent property to DNSRecord (#620) + +- RFC 6762 defines recent as not multicast within one quarter of its TTL + https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 ([`1f36754`](https://github.com/python-zeroconf/python-zeroconf/commit/1f36754f3964738e496a1da9c24380e204aaff01)) + +* Protect the network against excessive packet flooding (#619) ([`0e644ad`](https://github.com/python-zeroconf/python-zeroconf/commit/0e644ad650627024c7a3f926a86f7d9ecc66e591)) + +* Ensure matching PTR queries are returned with the ANY query (#618) + +Fixes #464 ([`b6365aa`](https://github.com/python-zeroconf/python-zeroconf/commit/b6365aa1f889a3045aa185f67354de622bd7ebd3)) + +* Suppress additionals when they are already in the answers section (#617) ([`427b728`](https://github.com/python-zeroconf/python-zeroconf/commit/427b7285269984cbb6f28c87a8bf8f864a5e15d7)) + +* Fix queries for AAAA records (#616) ([`0100c08`](https://github.com/python-zeroconf/python-zeroconf/commit/0100c08c5a3fb90d0795cf57f0bd3e11c7a94a0b)) + +* Breakout the query response handler into its own class (#615) ([`c828c75`](https://github.com/python-zeroconf/python-zeroconf/commit/c828c7555ed1fb82ff95ed578262d1553f19d903)) + +* Avoid including additionals when the answer is suppressed by known-answer supression (#614) ([`219aa3e`](https://github.com/python-zeroconf/python-zeroconf/commit/219aa3e54c944b2935c9a40cc15de19284aded3c)) + +* Add the ability for ServiceInfo.dns_addresses to filter by address type (#612) ([`aea2c8a`](https://github.com/python-zeroconf/python-zeroconf/commit/aea2c8ab24d4be19b34f407c854241e0d73d0525)) + +* Make DNSRecords hashable (#611) + +- Allows storing them in a set for de-duplication + +- Needed to be able to check for duplicates to solve https://github.com/jstasiak/python-zeroconf/issues/604 ([`b7d8678`](https://github.com/python-zeroconf/python-zeroconf/commit/b7d867878153fa600053869265260992e5462b2d)) + +* Ensure the QU bit is set for probe queries (#609) + +- The bit should be set per + https://datatracker.ietf.org/doc/html/rfc6762#section-8.1 ([`22bd147`](https://github.com/python-zeroconf/python-zeroconf/commit/22bd1475fb58c7c421c0009cd0c5c791cedb225d)) + +* Log destination when sending packets (#606) ([`850e211`](https://github.com/python-zeroconf/python-zeroconf/commit/850e2115aa79c10765dfc45a290a68193397de6c)) + +* Fix docs version to match readme (cpython 3.6+) (#602) ([`809b6df`](https://github.com/python-zeroconf/python-zeroconf/commit/809b6df376205e6ab5ce8fb5fe3a92e77662fe2d)) + +* Add ZeroconfServiceTypes to zeroconf.__all__ (#601) + +- This class is in the readme, but is not exported by + default ([`f6cd8f6`](https://github.com/python-zeroconf/python-zeroconf/commit/f6cd8f6d23459f9ed48ad06ff6702e606d620eaf)) + +* Ensure unicast responses can be sent to any source port (#598) + +- Unicast responses were only being sent if the source port + was 53, this prevented responses when testing with dig: + + dig -p 5353 @224.0.0.251 media-12.local + + The above query will now see a response ([`3556c22`](https://github.com/python-zeroconf/python-zeroconf/commit/3556c22aacc72e62c318955c084533b70311bcc9)) + +* Add id_ param to allow setting the id in the DNSOutgoing constructor (#599) ([`cb64e0d`](https://github.com/python-zeroconf/python-zeroconf/commit/cb64e0dd5d1c621f61d0d0f92ea282d287a9c242)) + +* Fix lookup of uppercase names in registry (#597) + +- If the ServiceInfo was registered with an uppercase name and the query was + for a lowercase name, it would not be found and vice-versa. ([`fe72524`](https://github.com/python-zeroconf/python-zeroconf/commit/fe72524dbaf934ca63ebce053e34f3e838743460)) + +* Add unicast property to DNSQuestion to determine if the QU bit is set (#593) ([`d2d8262`](https://github.com/python-zeroconf/python-zeroconf/commit/d2d826220bd4f287835ebb4304450cc2311d1db6)) + +* Reduce branching in DNSOutgoing.add_answer_at_time (#592) ([`35e25fd`](https://github.com/python-zeroconf/python-zeroconf/commit/35e25fd46f8d3689b723dd845eba9862a5dc8a22)) + +* Move notify listener tests to test_core (#591) ([`72032d6`](https://github.com/python-zeroconf/python-zeroconf/commit/72032d6dde2ee7388b8cb4545554519d3ffa8508)) + +* Set mypy follow_imports to skip as ignore is not a valid option (#590) ([`fd70ac1`](https://github.com/python-zeroconf/python-zeroconf/commit/fd70ac1b6bdded992f8fbbb723ca92f5395abf23)) + +* Relocate handlers tests to tests/test_handlers (#588) ([`8aa14d3`](https://github.com/python-zeroconf/python-zeroconf/commit/8aa14d33849c057c91a00e1093606081ade488e7)) + +* Relocate ServiceRegistry tests to tests/services/test_registry (#587) ([`ae6530a`](https://github.com/python-zeroconf/python-zeroconf/commit/ae6530a59e2d8ddb9a7367243c29c5e00665a82f)) + +* Disable flakey ServiceTypesQuery ipv6 win32 test (#586) ([`5cb5702`](https://github.com/python-zeroconf/python-zeroconf/commit/5cb5702fca2845e99b457e4427428497c3cd9b31)) + +* Relocate network utils tests to tests/utils/test_net (#585) ([`12f5676`](https://github.com/python-zeroconf/python-zeroconf/commit/12f567695b5364c9c5c5af0a7017d877de84274d)) + +* Relocate ServiceTypesQuery tests to tests/services/test_types (#584) ([`1fe282b`](https://github.com/python-zeroconf/python-zeroconf/commit/1fe282ba246505d172356cc8672307c7d125820d)) + +* Mark zeroconf.services as protected by renaming to zeroconf._services (#583) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`4a88066`](https://github.com/python-zeroconf/python-zeroconf/commit/4a88066d66b2f2a00ebc388c5cda478c52cb9e6c)) + +* Mark zeroconf.utils as protected by renaming to zeroconf._utils (#582) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`cc5bc36`](https://github.com/python-zeroconf/python-zeroconf/commit/cc5bc36f6f7597a0adb0d637147c2f93ca243ff4)) + +* Mark zeroconf.cache as protected by renaming to zeroconf._cache (#581) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`a16e85b`](https://github.com/python-zeroconf/python-zeroconf/commit/a16e85b20c2069aa9cee0510c618cb61d46dc19c)) + +* Mark zeroconf.exceptions as protected by renaming to zeroconf._exceptions (#580) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`241700a`](https://github.com/python-zeroconf/python-zeroconf/commit/241700a07a76a8c45afbe1bdd8325cd9f0eb0168)) + +* Fix flakey backoff test race on startup (#579) ([`dd9ada7`](https://github.com/python-zeroconf/python-zeroconf/commit/dd9ada781fdb1d5efc7c6ad194426e92550245b1)) + +* Mark zeroconf.logger as protected by renaming to zeroconf._logger (#578) ([`500066f`](https://github.com/python-zeroconf/python-zeroconf/commit/500066f940aa89737f343976ee0387eae97eac37)) + +* Mark zeroconf.handlers as protected by renaming to zeroconf._handlers (#577) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`1a2ee68`](https://github.com/python-zeroconf/python-zeroconf/commit/1a2ee6892e996c1e84ba97082e5cda609d1d55d7)) + +* Log zeroconf.asyncio deprecation warning with the logger module (#576) ([`c29a235`](https://github.com/python-zeroconf/python-zeroconf/commit/c29a235eb59ed3b4883305cf11f8bf9fa06284d3)) + +* Mark zeroconf.core as protected by renaming to zeroconf._core (#575) ([`601e8f7`](https://github.com/python-zeroconf/python-zeroconf/commit/601e8f70499638a6f24291bc0a28054fd78243c0)) + +* Mark zeroconf.dns as protected by renaming to zeroconf._dns (#574) + +- The public API should only access zeroconf and zeroconf.aio + as internals may be relocated between releases ([`0e61b15`](https://github.com/python-zeroconf/python-zeroconf/commit/0e61b1502c7fd3412f979bc4d651ee016e712de9)) + +* Update changelog (#573) ([`f10a562`](https://github.com/python-zeroconf/python-zeroconf/commit/f10a562471ad89527e6eef6ba935a27177bb1417)) + +* Relocate services tests to test_services (#570) ([`ae552e9`](https://github.com/python-zeroconf/python-zeroconf/commit/ae552e94732568fd798e1f2d0e811849edff7790)) + +* Remove DNSOutgoing.packet backwards compatibility (#569) + +- DNSOutgoing.packet only returned a partial message when the + DNSOutgoing contents exceeded _MAX_MSG_ABSOLUTE or _MAX_MSG_TYPICAL + This was a legacy function that was replaced with .packets() + which always returns a complete payload in #248 As packet() + should not be used since it will end up missing data, it has + been removed ([`1e7c074`](https://github.com/python-zeroconf/python-zeroconf/commit/1e7c07481bb0cd08fe492dab02be888c6a1dadf2)) + +* Breakout DNSCache into zeroconf.cache (#568) ([`0e0bc2a`](https://github.com/python-zeroconf/python-zeroconf/commit/0e0bc2a901ed1d64e357c63e9fb8655f3a6e9298)) + +* Removed protected imports from zeroconf namespace (#567) + +- These protected items are not intended to be part of the + public API ([`a8420cd`](https://github.com/python-zeroconf/python-zeroconf/commit/a8420cde192647486eba4da4e54df9d0fe65adba)) + +* Update setup.py for utils and services (#562) ([`7807fa0`](https://github.com/python-zeroconf/python-zeroconf/commit/7807fa0dfdab20d950c446f17b7233a8c65cbab1)) + +* Move additional dns tests to test_dns (#561) ([`ae1ce09`](https://github.com/python-zeroconf/python-zeroconf/commit/ae1ce092de7eb4797da0f56e9eb8e538c95a8cc1)) + +* Move exceptions tests to test_exceptions (#560) ([`b5d848d`](https://github.com/python-zeroconf/python-zeroconf/commit/b5d848de1ed95c55f8c262bcf0811248818da901)) + +* Move additional tests to test_core (#559) ([`eb37f08`](https://github.com/python-zeroconf/python-zeroconf/commit/eb37f089579fdc5a405dbc2f0ce5620cf9d1b011)) + +* Relocate additional dns tests to test_dns (#558) ([`18b9d0a`](https://github.com/python-zeroconf/python-zeroconf/commit/18b9d0a8bd07c0a0d2923763a5f131905c31e0df)) + +* Relocate dns tests to test_dns (#557) ([`f0d99e2`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d99e2e68791376a8517254338c708a3244f178)) + +* Relocate some of the services tests to test_services (#556) ([`715cd9a`](https://github.com/python-zeroconf/python-zeroconf/commit/715cd9a1d208139862e6d9d718114e1e472efd28)) + +* Fix invalid typing in ServiceInfo._set_text (#554) ([`3d69656`](https://github.com/python-zeroconf/python-zeroconf/commit/3d69656c4e5fbd8f90d54826877a04120d5ec951)) + +* Add missing coverage for ipv6 network utils (#555) ([`3dfda64`](https://github.com/python-zeroconf/python-zeroconf/commit/3dfda644efef83640e80876e4fe7da10e87b5990)) + +* Move ZeroconfServiceTypes to zeroconf.services.types (#553) ([`e50b62b`](https://github.com/python-zeroconf/python-zeroconf/commit/e50b62bb633916d5b84df7bcf7a804c9e3ef7fc2)) + +* Add recipe for TYPE_CHECKING to .coveragerc (#552) ([`e7fb4e5`](https://github.com/python-zeroconf/python-zeroconf/commit/e7fb4e5fb2a6b2163b143a63e2a9e8c5d1eca482)) + +* Move QueryHandler and RecordManager handlers into zeroconf.handlers (#551) ([`5b489e5`](https://github.com/python-zeroconf/python-zeroconf/commit/5b489e5b15ff89a0ffc000ccfeab2a8af346a65e)) + +* Move ServiceListener to zeroconf.services (#550) ([`ffdc988`](https://github.com/python-zeroconf/python-zeroconf/commit/ffdc9887ede1f867c155743b344efc53e0ceee42)) + +* Move the ServiceRegistry into its own module (#549) ([`4086fb4`](https://github.com/python-zeroconf/python-zeroconf/commit/4086fb4304b0653153865306e46c865c90137922)) + +* Move ServiceStateChange to zeroconf.services (#548) ([`c8a0a71`](https://github.com/python-zeroconf/python-zeroconf/commit/c8a0a71c31252bbc4a242701bc786eb419e1a8e8)) + +* Relocate core functions into zeroconf.core (#547) ([`bf0e867`](https://github.com/python-zeroconf/python-zeroconf/commit/bf0e867ead1e48e05a27fe8db69900d9dc387ea2)) + +* Breakout service classes into zeroconf.services (#544) ([`bdea21c`](https://github.com/python-zeroconf/python-zeroconf/commit/bdea21c0a61b6d9d0af3810f18dbc2fc2364c484)) + +* Move service_type_name to zeroconf.utils.name (#543) ([`b4814f5`](https://github.com/python-zeroconf/python-zeroconf/commit/b4814f5f216cd4072bafdd7dd1e68ee522f329c2)) + +* Relocate DNS classes to zeroconf.dns (#541) ([`1e3e7df`](https://github.com/python-zeroconf/python-zeroconf/commit/1e3e7df8b7fdacd90cf5d864411e5db5a915be94)) + +* Update zeroconf.aio import locations (#539) ([`8733cad`](https://github.com/python-zeroconf/python-zeroconf/commit/8733cad2eae71ebdf94ecadc6fd5439882477235)) + +* Move int2byte to zeroconf.utils.struct (#540) ([`6af42b5`](https://github.com/python-zeroconf/python-zeroconf/commit/6af42b54640ebba541302bfcf7688b3926453b15)) + +* Breakout network utils into zeroconf.utils.net (#537) ([`5af3eb5`](https://github.com/python-zeroconf/python-zeroconf/commit/5af3eb58bfdc1736e6db175c4c6f7c6f2c05b694)) + +* Move time utility functions into zeroconf.utils.time (#536) ([`7ff810a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ff810a02e608fae39634be09d6c3ce0a93485b8)) + +* Avoid making DNSOutgoing aware of the Zeroconf object (#535) + +- This is not a breaking change since this code has not + yet shipped ([`2976cc2`](https://github.com/python-zeroconf/python-zeroconf/commit/2976cc2001cbba2c0afc57b9a3d301f382ddac8a)) + +* Add missing coverage for QuietLogger (#534) ([`328c1b9`](https://github.com/python-zeroconf/python-zeroconf/commit/328c1b9acdcd5cafa2df3e5b4b833b908d299500)) + +* Move logger into zeroconf.logger (#533) ([`e2e4eed`](https://github.com/python-zeroconf/python-zeroconf/commit/e2e4eede9117827f47c66a4852dd2d236b46ecda)) + +* Move exceptions into zeroconf.exceptions (#532) ([`5100506`](https://github.com/python-zeroconf/python-zeroconf/commit/5100506f896b649e6a6a8e2efb592362cd2644d3)) + +* Move constants into const.py (#531) ([`89d4755`](https://github.com/python-zeroconf/python-zeroconf/commit/89d4755106a6c3bced395b0a26eb3082c1268fa1)) + +* Move asyncio utils into zeroconf.utils.aio (#530) ([`2d8a27a`](https://github.com/python-zeroconf/python-zeroconf/commit/2d8a27a54aee298af74121986b4ea76f1f50b421)) + +* Relocate tests to tests directory (#527) ([`3f1a5a7`](https://github.com/python-zeroconf/python-zeroconf/commit/3f1a5a7b7a929d5f699812a809347b0c2f799fbf)) + +* Fix flakey test_update_record test (round 2) (#528) ([`14542bd`](https://github.com/python-zeroconf/python-zeroconf/commit/14542bd2bd327fd9b3d93cfb48a3bf09d6c89e15)) + +* Move ipversion auto detection code into its own function (#524) ([`16d40b5`](https://github.com/python-zeroconf/python-zeroconf/commit/16d40b50ccab6a8d53fe4aeb7b0006f7fd67ef53)) + +* Fix flakey test_update_record (#525) + +- Ensure enough time has past that the first record update + was processed before sending the second one ([`f49342c`](https://github.com/python-zeroconf/python-zeroconf/commit/f49342cdaff2d012ad23635b49ae746ad71333df)) + +* Update python compatibility as PyPy3 7.2 is required (#523) + +- When the version requirement changed to cpython 3.6, PyPy + was not bumped as well ([`b37d115`](https://github.com/python-zeroconf/python-zeroconf/commit/b37d115a233b61e2989d1439f65cdd911b86f407)) + +* Make the cache cleanup interval a constant (#522) ([`7ce29a2`](https://github.com/python-zeroconf/python-zeroconf/commit/7ce29a2f736af13886aa66dc1c49e15768e6fdcc)) + +* Add test helper to inject DNSIncoming (#518) ([`ef7aa25`](https://github.com/python-zeroconf/python-zeroconf/commit/ef7aa250e140d70b8c62abf4d13dcaa36f128c63)) + +* Remove broad exception catch from RecordManager.remove_listener (#517) ([`e125239`](https://github.com/python-zeroconf/python-zeroconf/commit/e12523933819087d2a087b8388e79b24af058a58)) + +* Small cleanups to RecordManager.add_listener (#516) ([`f80a051`](https://github.com/python-zeroconf/python-zeroconf/commit/f80a0515cf73b1e304d0615f8cee91ae38ac1ae8)) + +* Move RecordUpdateListener management into RecordManager (#514) ([`6cc3adb`](https://github.com/python-zeroconf/python-zeroconf/commit/6cc3adb020115ef9626caf61bb5f7550a2da8b4c)) + +* Update changelog (#513) ([`3d6c682`](https://github.com/python-zeroconf/python-zeroconf/commit/3d6c68278713a2ca66e27938feedcc451a078369)) + +* Break out record updating into RecordManager (#512) ([`9a766a2`](https://github.com/python-zeroconf/python-zeroconf/commit/9a766a2a96abd0f105056839b5c30f2ede31ea2e)) + +* Remove uneeded wait in the Engine thread (#511) + +- It is not longer necessary to wait since the socketpair + was added in #243 which will cause the select to unblock + when a new socket is added or removed. ([`70b455b`](https://github.com/python-zeroconf/python-zeroconf/commit/70b455ba53ce43e9280c02612e8a89665abd57f6)) + +* Stop monkey patching send in the TTL test (#510) ([`954ca3f`](https://github.com/python-zeroconf/python-zeroconf/commit/954ca3fb498bdc7cd5a6a168c40ad5b6b2476e71)) + +* Stop monkey patching send in the PTR optimization test (#509) ([`db866f7`](https://github.com/python-zeroconf/python-zeroconf/commit/db866f7d032ed031e6aa5e14fba24b3dafeafa8d)) + +* Extract code for handling queries into QueryHandler (#507) ([`1cfcc56`](https://github.com/python-zeroconf/python-zeroconf/commit/1cfcc5636a845924eb683ad4acf4d9a36ef85fb7)) + +* Update changelog for zeroconf.asyncio -> zeroconf.aio (#506) ([`26b7005`](https://github.com/python-zeroconf/python-zeroconf/commit/26b70050ffe7dee4fb34428f285be377d1d8f210)) + +* Rename zeroconf.asyncio to zeroconf.aio (#503) + +- The asyncio name could shadow system asyncio in some cases. If + zeroconf is in sys.path, this would result in loading zeroconf.asyncio + when system asyncio was intended. + +- An `zeroconf.asyncio` shim module has been added that imports `zeroconf.aio` + that was available in 0.31 to provide backwards compatibility in 0.32. + This module will be removed in 0.33 to fix the underlying problem + detailed in #502 ([`bfca3b4`](https://github.com/python-zeroconf/python-zeroconf/commit/bfca3b46fd9a395f387bd90b68c523a3ca84bde4)) + +* Update changelog, move breaking changes to the top of the list (#501) ([`9b480bc`](https://github.com/python-zeroconf/python-zeroconf/commit/9b480bc1abb2c2702f60796f2edae76ce03ca5d4)) + +* Set the TC bit for query packets where the known answers span multiple packets (#494) ([`f04a2eb`](https://github.com/python-zeroconf/python-zeroconf/commit/f04a2eb43745eba7c43c9c56179ed1fceb992bd8)) + +* Ensure packets are properly seperated when exceeding maximum size (#498) + +- Ensure that questions that exceed the max packet size are + moved to the next packet. This fixes DNSQuestions being + sent in multiple packets in violation of: + https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 + +- Ensure only one resource record is sent when a record + exceeds _MAX_MSG_TYPICAL + https://datatracker.ietf.org/doc/html/rfc6762#section-17 ([`e2908c6`](https://github.com/python-zeroconf/python-zeroconf/commit/e2908c6c89802ba7a0ea51ac351da40bce3f1cb6)) + +* Make a base class for DNSIncoming and DNSOutgoing (#497) ([`38e4b42`](https://github.com/python-zeroconf/python-zeroconf/commit/38e4b42b847e700db52bc51973210efc485d8c23)) + +* Update internal version check to match docs (3.6+) (#491) ([`20f8b3d`](https://github.com/python-zeroconf/python-zeroconf/commit/20f8b3d6fb8d117b0c3c794c4075a00e117e3f31)) + +* Remove unused __ne__ code from Python 2 era (#492) ([`f0c02a0`](https://github.com/python-zeroconf/python-zeroconf/commit/f0c02a02c1a2d7c914c62479bad4957b06471661)) + +* Lint before testing in the CI (#488) ([`69880ae`](https://github.com/python-zeroconf/python-zeroconf/commit/69880ae6ca4d4f0a7d476b0271b89adea92b9389)) + +* Add AsyncServiceBrowser example (#487) ([`ef9334f`](https://github.com/python-zeroconf/python-zeroconf/commit/ef9334f1279d029752186bc6f4a1ebff6229bf5b)) + +* Move threading daemon property into ServiceBrowser class (#486) ([`275765a`](https://github.com/python-zeroconf/python-zeroconf/commit/275765a4fd3b477b79163c04f6411709e14506b9)) + +* Enable test_integration_with_listener_class test on PyPy (#485) ([`49db96d`](https://github.com/python-zeroconf/python-zeroconf/commit/49db96dae466a602662f4fde1537f62a8c8d3110)) + +* RecordUpdateListener now uses update_records instead of update_record (#419) ([`0a69aa0`](https://github.com/python-zeroconf/python-zeroconf/commit/0a69aa0d37e13cb2c65ceb5cc3ab0fd7e9d34b22)) + +* AsyncServiceBrowser must recheck for handlers to call when holding condition (#483) + +- There was a short race condition window where the AsyncServiceBrowser + could add to _handlers_to_call in the Engine thread, have the + condition notify_all called, but since the AsyncServiceBrowser was + not yet holding the condition it would not know to stop waiting + and process the handlers to call. ([`9606936`](https://github.com/python-zeroconf/python-zeroconf/commit/960693628006e23fd13fcaefef915ca0c84401b9)) + +* Relocate ServiceBrowser wait time calculation to seperate function (#484) + +- Eliminate the need to duplicate code between the ServiceBrowser + and AsyncServiceBrowser to calculate the wait time. ([`9c06ce1`](https://github.com/python-zeroconf/python-zeroconf/commit/9c06ce15db31ebffe3a556896393d48cb786b5d9)) + +* Switch from using an asyncio.Event to asyncio.Condition for waiting (#482) ([`393910b`](https://github.com/python-zeroconf/python-zeroconf/commit/393910b67ac667a660ee9351cc8f94310937f654)) + +* ServiceBrowser must recheck for handlers to call when holding condition (#477) ([`8da00ca`](https://github.com/python-zeroconf/python-zeroconf/commit/8da00caf31e007153e10a8038a0a484edea03c2f)) + +* Provide a helper function to convert milliseconds to seconds (#481) ([`849e9bc`](https://github.com/python-zeroconf/python-zeroconf/commit/849e9bc792c6cc77b879b4761195192bea1720ce)) + +* Fix AsyncServiceInfo.async_request not waiting long enough (#480) + +- The call to async_wait should have been in milliseconds, but + the time was being passed in seconds which resulted in waiting + 1000x shorter ([`b0c0cdc`](https://github.com/python-zeroconf/python-zeroconf/commit/b0c0cdc6779dc095cf03ebd92652af69800b7bca)) + +* Add support for updating multiple records at once to ServiceInfo (#474) + +- Adds `update_records` method to `ServiceInfo` ([`ed53f62`](https://github.com/python-zeroconf/python-zeroconf/commit/ed53f6283265eb8fb506d4af8fb31bd4eaa7292b)) + +* Narrow exception catch in DNSAddress.__repr__ to only expected exceptions (#473) ([`b853413`](https://github.com/python-zeroconf/python-zeroconf/commit/b8534130ec31a6be191fcc60615ab2fd02fd8d7a)) + +* Add test coverage to ensure ServiceInfo rejects expired records (#468) ([`d0f5a60`](https://github.com/python-zeroconf/python-zeroconf/commit/d0f5a60275ccf810407055c63ca9080fa6654443)) + +* Reduce branching in service_type_name (#472) ([`00af5ad`](https://github.com/python-zeroconf/python-zeroconf/commit/00af5adc4be76afd23135d37653119f45c57a531)) + +* Fix flakey test_update_record (#470) ([`1eaeef2`](https://github.com/python-zeroconf/python-zeroconf/commit/1eaeef2d6f07efba67e91699529f8361226233ce)) + +* Reduce branching in Zeroconf.handle_response (#467) + +- Adds `add_records` and `remove_records` to `DNSCache` to + permit multiple records to be added or removed in one call + +- This change is not enough to remove the too-many-branches + pylint disable, however when combined with #419 it should + no longer be needed ([`8a9ae29`](https://github.com/python-zeroconf/python-zeroconf/commit/8a9ae29b6f6643f3625938ac44df66dcc556de46)) + +* Ensure PTR questions asked in uppercase are answered (#465) ([`7a50402`](https://github.com/python-zeroconf/python-zeroconf/commit/7a5040247cbaad6bed3fc1204820dfc31ed9b0ae)) + +* Clear cache between ServiceTypesQuery tests (#466) + +- Ensures the test relies on the ZeroconfServiceTypes.find making + the correct calls instead of the cache from the previous call ([`c3365e1`](https://github.com/python-zeroconf/python-zeroconf/commit/c3365e1fd060cebc63cc42443260bd785077c246)) + +* Break apart Zeroconf.handle_query to reduce branching (#462) ([`c1ed987`](https://github.com/python-zeroconf/python-zeroconf/commit/c1ed987ede34b0049e6466e673b1629d7cd0cd6a)) + +* Support for context managers in Zeroconf and AsyncZeroconf (#284) + +Co-authored-by: J. Nick Koston ([`4c4b529`](https://github.com/python-zeroconf/python-zeroconf/commit/4c4b529c841f015108a7489bd8f3b92a5e57e827)) + +* Use constant for service type enumeration (#461) ([`558cec3`](https://github.com/python-zeroconf/python-zeroconf/commit/558cec3687ac7e7f494ab7aa4ce574c1e784b81f)) + +* Reduce branching in Zeroconf.handle_response (#459) ([`ceb0def`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb0def1b43f2e55bb17e33d13d4efdaa055221c)) + +* Reduce branching in Zeroconf.handle_query (#460) ([`5e24da0`](https://github.com/python-zeroconf/python-zeroconf/commit/5e24da08bc463bf79b27eb3768ec01755804f403)) + +* Enable pylint (#438) ([`6fafdee`](https://github.com/python-zeroconf/python-zeroconf/commit/6fafdee241571d68937e29ee0a2b1bd5ef0038d9)) + +* Trap OSError directly in Zeroconf.send instead of checking isinstance (#453) + +- Fixes: Instance of 'Exception' has no 'errno' member (no-member) ([`9510808`](https://github.com/python-zeroconf/python-zeroconf/commit/9510808cfd334b0b2f6381da8214225c4cfbf6a0)) + +* Disable protected-access on the ServiceBrowser usage of _handlers_lock (#452) + +- This will be fixed in https://github.com/jstasiak/python-zeroconf/pull/419 ([`69c4cf6`](https://github.com/python-zeroconf/python-zeroconf/commit/69c4cf69bbc34474e70eac3ad0fe905be7ab4eb4)) + +* Mark functions with too many branches in need of refactoring (#455) ([`5fce89d`](https://github.com/python-zeroconf/python-zeroconf/commit/5fce89db2707b163231aec216e4c4fc310527e4c)) + +* Disable pylint no-self-use check on abstract methods (#451) ([`7544cdf`](https://github.com/python-zeroconf/python-zeroconf/commit/7544cdf956c4eeb4b688729432ba87278f606b7c)) + +* Use unique name in test_async_service_browser test (#450) ([`f26a92b`](https://github.com/python-zeroconf/python-zeroconf/commit/f26a92bc2abe61f5a2b5acd76991f81d07452201)) + +* Disable no-member check for WSAEINVAL false positive (#454) ([`ef0cf8e`](https://github.com/python-zeroconf/python-zeroconf/commit/ef0cf8e393a8ffdccb3cd2094a8764f707f518c1)) + +* Mark methods used by asyncio without self use (#447) ([`7e03f83`](https://github.com/python-zeroconf/python-zeroconf/commit/7e03f836dd7a4ee938bfff21cd150e863f608b5e)) + +* Extract _get_queue from _AsyncSender (#444) ([`18851ed`](https://github.com/python-zeroconf/python-zeroconf/commit/18851ed4c0f605996798472e1a68dded16d41ff6)) + +* Add missing update_service method to ZeroconfServiceTypes (#449) ([`ffc6cbb`](https://github.com/python-zeroconf/python-zeroconf/commit/ffc6cbb94d7401a70ebd6f747ed6c5e56e528bb0)) + +* Fix redefining argument with the local name 'record' in ServiceInfo.update_record (#448) ([`929ba12`](https://github.com/python-zeroconf/python-zeroconf/commit/929ba12d046496782491d96160e6cb8d0d04cfe5)) + +* Remove unneeded-not in new_socket (#445) ([`424c002`](https://github.com/python-zeroconf/python-zeroconf/commit/424c00257083f1d091a52ff0c966b306eea70efb)) + +* Disable broad except checks in places we still catch broad exceptions (#443) ([`6002c9c`](https://github.com/python-zeroconf/python-zeroconf/commit/6002c9c88a9a49814f86070c07925f798a61461a)) + +* Merge _TYPE_CNAME and _TYPE_PTR comparison in DNSIncoming.read_others (#442) ([`41be4f4`](https://github.com/python-zeroconf/python-zeroconf/commit/41be4f4db0501adb9fbaa6b353fbcb36a45e6e21)) + +* Convert unnecessary use of a comprehension to a list (#441) ([`a70370a`](https://github.com/python-zeroconf/python-zeroconf/commit/a70370a0f653df911cc6f641522cec0fcc8471a3)) + +* Remove unused now argument from ServiceInfo._process_record (#440) ([`594da70`](https://github.com/python-zeroconf/python-zeroconf/commit/594da709273c2e0a53fee2f9ad7fcec607ad0868)) + +* Disable pylint too-many-branches for functions that need refactoring (#439) ([`4bcb698`](https://github.com/python-zeroconf/python-zeroconf/commit/4bcb698bda0ec7266d5e454b5e81a07eb64be32a)) + +* Cleanup unused variables (#437) ([`8412eb7`](https://github.com/python-zeroconf/python-zeroconf/commit/8412eb791dd5ad1c287c1d7cc24c5db75a5291b7)) + +* Cleanup unnecessary else after returns (#436) ([`1d3f986`](https://github.com/python-zeroconf/python-zeroconf/commit/1d3f986e00e18682c209cecbdea2481f4ca987b5)) + +* Update changelog for latest changes (#435) ([`6737e13`](https://github.com/python-zeroconf/python-zeroconf/commit/6737e13d8e6227b96d5cc0e776c62889b7dc4fd3)) + +* Add zeroconf.asyncio to the docs (#434) ([`5460cae`](https://github.com/python-zeroconf/python-zeroconf/commit/5460caef83b5cdb9c5d637741ed95dea6b328f08)) + +* Fix warning when generating sphinx docs (#432) + +- `docstring of zeroconf.ServiceInfo:5: WARNING: Unknown target name: "type".` ([`e5a0c9a`](https://github.com/python-zeroconf/python-zeroconf/commit/e5a0c9a45df93a668f3611ddf5c41a1800cb4556)) + +* Implement an AsyncServiceBrowser to compliment the sync ServiceBrowser (#429) ([`415a7b7`](https://github.com/python-zeroconf/python-zeroconf/commit/415a7b762030e9d236bef71f39156686a0b277f9)) + +* Seperate non-thread specific code from ServiceBrowser into _ServiceBrowserBase (#428) ([`e7b2bb5`](https://github.com/python-zeroconf/python-zeroconf/commit/e7b2bb5e351f04f4f1e14ef5a20ed2111f8097c4)) + +* Remove is_type_unique as it is unused (#426) ([`e68e337`](https://github.com/python-zeroconf/python-zeroconf/commit/e68e337cd482e06a422b2d2e2e6ae12ce1673ce5)) + +* Avoid checking the registry when answering requests for _services._dns-sd._udp.local. (#425) + +- _services._dns-sd._udp.local. is a special case and should never + be in the registry ([`47e266e`](https://github.com/python-zeroconf/python-zeroconf/commit/47e266eb66be36b355f1738cd4d2f7369712b7b3)) + +* Remove unused argument from ServiceInfo.dns_addresses (#423) + +- This should always return all addresses since its _CLASS_UNIQUE ([`fc97e5c`](https://github.com/python-zeroconf/python-zeroconf/commit/fc97e5c3ad35da789373a1898c00efe0f13a3b5f)) + +* A methods to generate DNSRecords from ServiceInfo (#422) ([`41de419`](https://github.com/python-zeroconf/python-zeroconf/commit/41de419453c0679c5a04ec248339783afbeb0e4f)) + +* Seperate logic for consuming records in ServiceInfo (#421) ([`8bca030`](https://github.com/python-zeroconf/python-zeroconf/commit/8bca0305deae0db8ced7e213be3aaee975985c56)) + +* Seperate query generation for ServiceBrowser (#420) ([`58cfcf0`](https://github.com/python-zeroconf/python-zeroconf/commit/58cfcf0c902b5e27937f118bf4f7a855db635301)) + +* Add async_request example with browse (#415) ([`7f08826`](https://github.com/python-zeroconf/python-zeroconf/commit/7f08826c03b7997758ff0236834bf6f1a091c558)) + +* Add async_register_service/async_unregister_service example (#414) ([`71cfbcb`](https://github.com/python-zeroconf/python-zeroconf/commit/71cfbcb85bdd5948f1b96a871b10e9e35ab76c3b)) + +* Update changelog for 0.32.0 (#411) ([`bb83edf`](https://github.com/python-zeroconf/python-zeroconf/commit/bb83edfbca339fb6ec20b821d79b171220f5e675)) + +* Add async_get_service_info to AsyncZeroconf and async_request to AsyncServiceInfo (#408) ([`0fa049c`](https://github.com/python-zeroconf/python-zeroconf/commit/0fa049c2e0f5e9f18830583a8df2736630c891e2)) + +* Add async_wait function to AsyncZeroconf (#410) ([`53306e1`](https://github.com/python-zeroconf/python-zeroconf/commit/53306e1b99d9133590d47081994ee77cef468828)) + +* Add support for registering notify listeners (#409) + +- Notify listeners will be used by AsyncZeroconf to set + asyncio.Event objects when new data is received + +- Registering a notify listener: + notify_listener = YourNotifyListener() + Use zeroconf.add_notify_listener(notify_listener) + +- Unregistering a notify listener: + Use zeroconf.remove_notify_listener(notify_listener) + +- Notify listeners must inherit from the NotifyListener + class ([`745087b`](https://github.com/python-zeroconf/python-zeroconf/commit/745087b234dd5ff65b4b041a7221d58030a69cdd)) + +* Remove unreachable code in ServiceInfo.get_name (#407) ([`ff31f38`](https://github.com/python-zeroconf/python-zeroconf/commit/ff31f386273fbe9fd0b466bbe5f724c815745215)) + +* Allow passing in a sync Zeroconf instance to AsyncZeroconf (#406) + +- Uses the same pattern as ZeroconfServiceTypes.find ([`2da6198`](https://github.com/python-zeroconf/python-zeroconf/commit/2da6198b2e60a598580637e80b3bd579c1f845a5)) + +* Use a dedicated thread for sending outgoing packets with asyncio (#404) + +- Sends now go into a queue and are processed by the thread FIFO + +- Avoids overwhelming the executor when registering multiple + services in parallel ([`1e7b46c`](https://github.com/python-zeroconf/python-zeroconf/commit/1e7b46c36f6e0735b44d3edd9740891a2dc0c761)) + +* Seperate query generation for Zeroconf (#403) + +- Will be used to send the query in asyncio ([`e753078`](https://github.com/python-zeroconf/python-zeroconf/commit/e753078f0345fa28ffceb8de69542c8549d2994c)) + +* Seperate query generation in ServiceInfo (#401) ([`bddf69c`](https://github.com/python-zeroconf/python-zeroconf/commit/bddf69c0839eda966376987a8c4a1fbe3d865529)) + +* Remove unreachable code in ServiceInfo (part 2) (#402) + +- self.server is never None ([`4ae27be`](https://github.com/python-zeroconf/python-zeroconf/commit/4ae27beba29c6e9ac1782f40eadda584b4722af7)) + +* Remove unreachable code in ServiceInfo (#400) + +- self.server is never None ([`dd63835`](https://github.com/python-zeroconf/python-zeroconf/commit/dd6383589b161e828def0ed029519a645e434512)) + +* Update changelog with latest changes (#394) ([`a6010a9`](https://github.com/python-zeroconf/python-zeroconf/commit/a6010a94b626a9a1585cc47417c08516020729d7)) + +* Add test coverage for multiple AAAA records (#391) ([`acf174d`](https://github.com/python-zeroconf/python-zeroconf/commit/acf174db93ee60f1a80d501eb691d9cb434a90b7)) + +* Enable IPv6 in the CI (#393) ([`ec2fafd`](https://github.com/python-zeroconf/python-zeroconf/commit/ec2fafd904cd2d341a3815fcf6d34508dcddda5a)) + +* Fix IPv6 setup under MacOS when binding to "" (#392) + +- Setting IP_MULTICAST_TTL and IP_MULTICAST_LOOP does not work under + MacOS when the bind address is "" ([`d67d5f4`](https://github.com/python-zeroconf/python-zeroconf/commit/d67d5f41effff4c01735de0ae64ed25a5dbe7567)) + +* Update changelog for 0.32.0 (Unreleased) (#390) ([`33a3a6a`](https://github.com/python-zeroconf/python-zeroconf/commit/33a3a6ae42ef8c4ea0f606ad2a02df3f6bc13752)) + +* Ensure ZeroconfServiceTypes.find always cancels the ServiceBrowser (#389) ([`8f4d2e8`](https://github.com/python-zeroconf/python-zeroconf/commit/8f4d2e858a5efadeb33120322c1169f3ce7d6e0c)) + +* Fix flapping test: test_update_record (#388) ([`ba8d8e3`](https://github.com/python-zeroconf/python-zeroconf/commit/ba8d8e3e658c71e0d603db3f4c5bdfe8e508710a)) + +* Simplify DNSPointer processing in ServiceBrowser (#386) ([`709bd9a`](https://github.com/python-zeroconf/python-zeroconf/commit/709bd9abae63cf566220693501cd37cf74391ccf)) + +* Ensure listeners do not miss initial packets if Engine starts too quickly (#387) ([`62a02d7`](https://github.com/python-zeroconf/python-zeroconf/commit/62a02d774fd874340fa3043bd3bf260a77ffe3d8)) + +* Update changelog with latest commits (#384) ([`69d9357`](https://github.com/python-zeroconf/python-zeroconf/commit/69d9357b3dae7a99d302bf4ad71d4ed45cbe3e42)) + +* Ensure the cache is checked for name conflict after final service query with asyncio (#382) + +- The check was not happening after the last query ([`5057f97`](https://github.com/python-zeroconf/python-zeroconf/commit/5057f97b9b724c041d2bee65972fe3637bf04f0b)) + +* Fix multiple unclosed instances in tests (#383) ([`69a79b9`](https://github.com/python-zeroconf/python-zeroconf/commit/69a79b9fd48a24d311520e228c78b2aae52d1dd5)) + +* Update changelog with latest merges (#381) ([`2b502bc`](https://github.com/python-zeroconf/python-zeroconf/commit/2b502bc2e21efa2f840c42ed79f850b276a8c103)) + +* Complete ServiceInfo request as soon as all questions are answered (#380) + +- Closes a small race condition where there were no questions + to ask because the cache was populated in between checks ([`3afa5c1`](https://github.com/python-zeroconf/python-zeroconf/commit/3afa5c13f2be956505428c5b01f6ce507845131a)) + +* Coalesce browser questions scheduled at the same time (#379) + +- With multiple types, the ServiceBrowser questions can be + chatty because it would generate a question packet for + each type. If multiple types are due to be requested, + try to combine the questions into a single outgoing + packet(s) ([`60c1895`](https://github.com/python-zeroconf/python-zeroconf/commit/60c1895e67a6147ab8c6ba7d21d4fe5adec3e590)) + +* Bump version to 0.31.0 to match released version (#378) ([`23442d2`](https://github.com/python-zeroconf/python-zeroconf/commit/23442d2e5a0336a64646cb70f2ce389746744ce0)) + +* Update changelog with latest merges (#377) ([`5535ea8`](https://github.com/python-zeroconf/python-zeroconf/commit/5535ea8c365557681721fdafdcabfc342c75daf5)) + +* Ensure duplicate packets do not trigger duplicate updates (#376) + +- If TXT or SRV records update was already processed and then + recieved again, it was possible for a second update to be + called back in the ServiceBrowser ([`b158b1c`](https://github.com/python-zeroconf/python-zeroconf/commit/b158b1cff31620d5cf27969e475d788332f4b38c)) + +* Only trigger a ServiceStateChange.Updated event when an ip address is added (#375) ([`5133742`](https://github.com/python-zeroconf/python-zeroconf/commit/51337425c9be08d59d496c6783d07d5e4e2382d4)) + +* Fix RFC6762 Section 10.2 paragraph 2 compliance (#374) ([`03f2eb6`](https://github.com/python-zeroconf/python-zeroconf/commit/03f2eb688859a78807305771d04b216e20e72064)) + +* Reduce length of ServiceBrowser thread name with many types (#373) + +- Before + +"zeroconf-ServiceBrowser__ssh._tcp.local.-_enphase-envoy._tcp.local.-_hap._udp.local." +"-_nut._tcp.local.-_Volumio._tcp.local.-_kizbox._tcp.local.-_home-assistant._tcp.local." +"-_viziocast._tcp.local.-_dvl-deviceapi._tcp.local.-_ipp._tcp.local.-_touch-able._tcp.local." +"-_hap._tcp.local.-_system-bridge._udp.local.-_dkapi._tcp.local.-_airplay._tcp.local." +"-_elg._tcp.local.-_miio._udp.local.-_wled._tcp.local.-_esphomelib._tcp.local." +"-_ipps._tcp.local.-_fbx-api._tcp.local.-_xbmc-jsonrpc-h._tcp.local.-_powerview._tcp.local." +"-_spotify-connect._tcp.local.-_leap._tcp.local.-_api._udp.local.-_plugwise._tcp.local." +"-_googlecast._tcp.local.-_printer._tcp.local.-_axis-video._tcp.local.-_http._tcp.local." +"-_mediaremotetv._tcp.local.-_homekit._tcp.local.-_bond._tcp.local.-_daap._tcp.local._243" + +- After + +"zeroconf-ServiceBrowser-_miio._udp-_mediaremotetv._tcp-_dvl-deviceapi._tcp-_ipp._tcp" +"-_dkapi._tcp-_hap._udp-_xbmc-jsonrpc-h._tcp-_hap._tcp-_googlecast._tcp-_airplay._tcp" +"-_viziocast._tcp-_api._udp-_kizbox._tcp-_spotify-connect._tcp-_home-assistant._tcp" +"-_bond._tcp-_powerview._tcp-_daap._tcp-_http._tcp-_leap._tcp-_elg._tcp-_homekit._tcp" +"-_ipps._tcp-_plugwise._tcp-_ssh._tcp-_esphomelib._tcp-_Volumio._tcp-_fbx-api._tcp" +"-_wled._tcp-_touch-able._tcp-_enphase-envoy._tcp-_axis-video._tcp-_printer._tcp" +"-_system-bridge._udp-_nut._tcp-244" ([`5d4aa28`](https://github.com/python-zeroconf/python-zeroconf/commit/5d4aa2800d1196274cfdd0bf3e631f49ab5b78bd)) + +* Update changelog for 0.32.0 (unreleased) (#372) ([`82fb26f`](https://github.com/python-zeroconf/python-zeroconf/commit/82fb26f14518a8e59f886b8d7b0708a68725bf48)) + +* Remove Callable quoting (#371) + +- The current minimum supported cpython is 3.6+ which does not need + the quoting ([`7f45bef`](https://github.com/python-zeroconf/python-zeroconf/commit/7f45bef8db444b0436c5f80b4f4b31b2f1d7ec2f)) + +* Abstract check to see if a record matches a type the ServiceBrowser wants (#369) ([`4819ef8`](https://github.com/python-zeroconf/python-zeroconf/commit/4819ef8c97ddbbadcd6e7cf1b5fee36f573bde45)) + +* Reduce complexity of ServiceBrowser enqueue_callback (#368) + +- The handler key was by name, however ServiceBrowser can have multiple + types which meant the check to see if a state change was an add + remove, or update was overly complex. Reduce the complexity by + making the key (name, type_) ([`4657a77`](https://github.com/python-zeroconf/python-zeroconf/commit/4657a773690a34c897c80894a10ac33b6edadf8b)) + +* Fix empty answers being added in ServiceInfo.request (#367) ([`5a4c1e4`](https://github.com/python-zeroconf/python-zeroconf/commit/5a4c1e46510956276de117d86bee9d2ccb602802)) + +* Ensure ServiceInfo populates all AAAA records (#366) + +- Use get_all_by_details to ensure all records are loaded + into addresses. + +- Only load A/AAAA records from cache once in load_from_cache + if there is a SRV record present + +- Move duplicate code that checked if the ServiceInfo was complete + into its own function ([`bae3a9b`](https://github.com/python-zeroconf/python-zeroconf/commit/bae3a9b97672581e77255c4937b815173c8547b4)) + +* Remove black python 3.5 exception block (#365) ([`6d29e6c`](https://github.com/python-zeroconf/python-zeroconf/commit/6d29e6c93bdcf6cf31fcfa133258257704945dfc)) + +* Small cleanup of ServiceInfo.update_record (#364) + +- Return as record is not viable (None or expired) + +- Switch checks to isinstance since its needed by mypy anyways + +- Prepares for supporting multiple AAAA records (via https://github.com/jstasiak/python-zeroconf/pull/361) ([`1b8b291`](https://github.com/python-zeroconf/python-zeroconf/commit/1b8b2917e7e70e3996e9a96204dd5df3dfb39072)) + +* Add new cache function get_all_by_details (#363) + +- When working with IPv6, multiple AAAA records can exist + for a given host. get_by_details would only return the + latest record in the cache. + +- Fix a case where the cache list can change during + iteration ([`d8c3240`](https://github.com/python-zeroconf/python-zeroconf/commit/d8c32401ada4f430cd75617324b6d8ecd1dbe1f2)) + +* Small cleanups to asyncio tests (#362) ([`7e960b7`](https://github.com/python-zeroconf/python-zeroconf/commit/7e960b78cac8008beca9c5451c6d465e2674a050)) + +* Improve test coverage for name conflicts (#357) ([`c0674e9`](https://github.com/python-zeroconf/python-zeroconf/commit/c0674e97aee4f61212389337340fc8ff4472eb25)) + +* Return task objects created by AsyncZeroconf (#360) ([`8c1c394`](https://github.com/python-zeroconf/python-zeroconf/commit/8c1c394e9b4aa01e08a2c3e240396b533792be55)) + +* Separate cache loading from I/O in ServiceInfo (#356) + +Provides a load_from_cache method on ServiceInfo that does no I/O + +- When a ServiceBrowser is running for a type there is no need + to make queries on the network since the entries will already + be in the cache. When discovering many devices making queries + that will almost certainly fail for offline devices delays the + startup of online devices. + +- The DNSEntry and ServiceInfo classes were matching on the name + instead of the key (lowercase name). These classes now treat dns + names the same reguardless of case. + + https://datatracker.ietf.org/doc/html/rfc6762#section-16 + > The simple rules for case-insensitivity in Unicast DNS [RFC1034] + > [RFC1035] also apply in Multicast DNS; that is to say, in name + > comparisons, the lowercase letters "a" to "z" (0x61 to 0x7A) match + > their uppercase equivalents "A" to "Z" (0x41 to 0x5A). Hence, if a + > querier issues a query for an address record with the name + > "myprinter.local.", then a responder having an address record with + > the name "MyPrinter.local." should issue a response. ([`87ba2a3`](https://github.com/python-zeroconf/python-zeroconf/commit/87ba2a3960576cfcf4207ea74a711b2c0cc584a7)) + +* Provide an asyncio class for service registration (#347) + +* Provide an AIO wrapper for service registration + +- When using zeroconf with async code, service registration can cause the + executor to overload when registering multiple services since each one + will have to wait a bit between sending the broadcast. An aio subclass + is now available as aio.AsyncZeroconf that implements the following + + - async_register_service + - async_unregister_service + - async_update_service + - async_close + + I/O is currently run in the executor to provide backwards compat with + existing use cases. + + These functions avoid overloading the executor by waiting in the event + loop instead of the executor threads. ([`a41d7b8`](https://github.com/python-zeroconf/python-zeroconf/commit/a41d7b8aa5572f3faf29eb087cc18a1343bbcdfa)) + +* Eliminate the reaper thread (#349) + +- Cache is now purged between reads when the interval is reached + +- Reduce locking since we are already making a copy of the readers + and not reading under the lock + +- Simplify shutdown process ([`7816278`](https://github.com/python-zeroconf/python-zeroconf/commit/781627864efbb3c8285e1b75144d688083414cf3)) + +* Return early when already closed (#350) + +- Reduce indentation with a return early guard in close ([`523aefb`](https://github.com/python-zeroconf/python-zeroconf/commit/523aefb0b0c477489e4e1e4ab763ce56c57295b7)) + +* Skip socket creation if add_multicast_member fails (windows) (#341) + +Co-authored-by: Timothee 'TTimo' Besset ([`beccad1`](https://github.com/python-zeroconf/python-zeroconf/commit/beccad1f0b41730f541b2e90ea2eaa2496de5044)) + +* Simplify cache iteration (#340) + +- Remove the need to trap runtime error +- Only copy the names of the keys when iterating the cache +- Fixes RuntimeError: list changed size during iterating entries_from_name +- Cache services +- The Repear thread is no longer aware of the cache internals ([`fe94810`](https://github.com/python-zeroconf/python-zeroconf/commit/fe948105cc0923336ffa6d93cbe7d45470612a36)) + + +## v0.29.0 (2021-03-25) + +### Unknown + +* Release version 0.29.0 ([`203ec2e`](https://github.com/python-zeroconf/python-zeroconf/commit/203ec2e26e6f0f676e7d88b4a1b0c80ad74659f1)) + +* Fill a missing changelog entry ([`53cb804`](https://github.com/python-zeroconf/python-zeroconf/commit/53cb8044bfb4256f570d438817fd37acc8b78511)) + +* Make mypy configuration more lenient + +We want to be able to call untyped modules. ([`f871b90`](https://github.com/python-zeroconf/python-zeroconf/commit/f871b90d25c0f788590ceb14237b08a6b5e6eeeb)) + +* Silence a flaky test on PyPy ([`bc6ef8c`](https://github.com/python-zeroconf/python-zeroconf/commit/bc6ef8c65b22d982798104d5bdf11b78746a8ddd)) + +* Silence a mypy false-positive ([`6482da0`](https://github.com/python-zeroconf/python-zeroconf/commit/6482da05344e6ae8c4da440da4a704a20c344bb6)) + +* Switch from Travis CI/Coveralls to GH Actions/Codecov + +Travis CI free tier is going away and Codecov is my go-to code coverage +service now. + +Closes GH-332. ([`bd80d20`](https://github.com/python-zeroconf/python-zeroconf/commit/bd80d20682c0af5e15a4b7102dcfe814cdba3a01)) + +* Drop Python 3.5 compatibilty, it reached its end of life ([`ab67a7a`](https://github.com/python-zeroconf/python-zeroconf/commit/ab67a7aecd63042178061f0d1a76f9a7f6e1559a)) + +* Use a single socket for InterfaceChoice.Default + +When using multiple sockets with multi-cast, the outgoing +socket's responses could be read back on the incoming socket, +which leads to duplicate processing and could fill up the +incoming buffer before it could be processed. + +This behavior manifested with error similar to +`OSError: [Errno 105] No buffer space available` + +By using a single socket with InterfaceChoice.Default +we avoid this case. ([`6beefbb`](https://github.com/python-zeroconf/python-zeroconf/commit/6beefbbe76a0e261394b308c8cc68545be653019)) + +* Simplify read_name + +(venv) root@ha-dev:~/python-zeroconf# python3 -m timeit -s 'result=""' -u usec 'result = "".join((result, "thisisaname" + "."))' +20000 loops, best of 5: 16.4 usec per loop +(venv) root@ha-dev:~/python-zeroconf# python3 -m timeit -s 'result=""' -u usec 'result += "thisisaname" + "."' +2000000 loops, best of 5: 0.105 usec per loop ([`5e268fa`](https://github.com/python-zeroconf/python-zeroconf/commit/5e268faeaa99f0a513c7bbeda8f447f4eb36a747)) + +* Fix link to readme md --> rst (#324) ([`c5a675d`](https://github.com/python-zeroconf/python-zeroconf/commit/c5a675d22788aa905a4e47feb1d4c30f30416356)) + + +## v0.28.8 (2021-01-04) + +### Unknown + +* Release version 0.28.8 ([`1d726b5`](https://github.com/python-zeroconf/python-zeroconf/commit/1d726b551a49e945b134df6e29b352697030c5a9)) + +* Ensure the name cache is rolled back when the packet reaches maximum size + +If the packet was too large, it would be rolled back at the end of write_record. +We need to remove the names that were added to the name cache (self.names) +as well to avoid a case were we would create a pointer to a name that was +rolled back. + +The size of the packet was incorrect at the end after the inserts because +insert_short would increase self.size even though it was already accounted +before. To resolve this insert_short_at_start was added which does not +increase self.size. This did not cause an actual bug, however it sure +made debugging this problem far more difficult. + +Additionally the size now inserted and then replaced when the actual +size is known because it made debugging quite difficult since the size +did not previously agree with the data. ([`86b4e11`](https://github.com/python-zeroconf/python-zeroconf/commit/86b4e11434d44e2f9a42354109a10f601c44d66a)) + + +## v0.28.7 (2020-12-13) + +### Unknown + +* Release version 0.28.7 ([`8f7effd`](https://github.com/python-zeroconf/python-zeroconf/commit/8f7effd2f89c542162d0e5ac257c561501690d16)) + +* Refactor to move service registration into a registry + +This permits removing the broad exception catch that +was expanded to avoid a crash in when adding or +removing a service ([`2708fef`](https://github.com/python-zeroconf/python-zeroconf/commit/2708fef6052f7e6e6eb36a157438b316e6d38b21)) + +* Prevent crash when a service is added or removed during handle_response + +Services are now modified under a lock. The service processing +is now done in a try block to ensure RuntimeError is caught +which prevents the zeroconf engine from unexpectedly +terminating. ([`4136858`](https://github.com/python-zeroconf/python-zeroconf/commit/41368588e5fcc6ec9596f306e39e2eaac2a9ec18)) + +* Restore IPv6 addresses output + +Before this change, script `examples/browser.py` printed IPv4 only, even with `--v6` argument. +With this change, `examples/browser.py` prints both IPv4 + IPv6 by default, and IPv6 only with `--v6-only` argument. + +I took the idea from the fork +https://github.com/ad3angel1s/python-zeroconf/blob/master/examples/browser.py ([`4da1612`](https://github.com/python-zeroconf/python-zeroconf/commit/4da1612b728acbcf2ab0c4bee09891c46f387bfb)) + + +## v0.28.6 (2020-10-13) + +### Unknown + +* Release version 0.28.6 ([`4744427`](https://github.com/python-zeroconf/python-zeroconf/commit/474442750d5d529436a118fda98a0b5f4680dc4d)) + +* Merge strict and allow_underscores (#309) + +Those really serve the same purpose -- are we receiving data (and want +to be flexible) or registering services (and want to be strict). ([`6a0c5dd`](https://github.com/python-zeroconf/python-zeroconf/commit/6a0c5dd4e84c30264747847e8f1045ece2a14288)) + +* Loosen validation to ensure get_service_info can handle production devices (#307) + +Validation of names was too strict and rejected devices that are otherwise +functional. A partial list of devices that unexpectedly triggered +a BadTypeInNameException: + + Bose Soundtouch + Yeelights + Rachio Sprinklers + iDevices ([`6ab0cd0`](https://github.com/python-zeroconf/python-zeroconf/commit/6ab0cd0a0446f158a1d8a64a3bc548cf9e103179)) + + +## v0.28.5 (2020-09-11) + +### Unknown + +* Release version 0.28.5 ([`eda1b3d`](https://github.com/python-zeroconf/python-zeroconf/commit/eda1b3dd17329c40a59b628b4bbca15c42af43b7)) + +* Fix AttributeError: module 'unittest' has no attribute 'mock' (#302) + +We only had module-level unittest import before now, but code accessing +mock through unittest.mock was working because we have a test-level +import from unittest.mock which causes unittest to gain the mock +attribute and if the test was run before other tests (those using +unittest.mock.patch) all was good. If the test was not run before them, +though, they'd fail. + +Closes GH-295. ([`2db7fff`](https://github.com/python-zeroconf/python-zeroconf/commit/2db7fff033937a929cdfee1fc7c93c594872799e)) + +* Ignore duplicate messages (#299) + +When watching packet captures, I noticed that zeroconf was processing +incoming data 3x on a my Home Assistant OS install because there are +three interfaces. + +We can skip processing duplicate packets in order to reduce the overhead +of decoding data we have already processed. + +Before + +Idle cpu ~8.3% + +recvfrom 4 times + + 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 + 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 + 267 recvfrom(8, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 + 267 recvfrom(8, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 + +sendto 8 times + + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 + +After + +Idle cpu ~4.1% + +recvfrom 4 times (no change): + + 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 + 267 recvfrom(9, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 + 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 + 267 recvfrom(9, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 + +sendto 2 times (reduced by 4x): + + 267 sendto(9, "\0\0\204\0\0\0\0\2\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\t_services\7_dns-sd\4_udp\300!\0\f\0\1\0\0\21\224\0\2\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300p\0\1\200\1\0\0\0x\0\4\300\250\325\232", 372, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 372 + 267 sendto(9, "\0\0\204\0\0\0\0\2\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\t_services\7_dns-sd\4_udp\300!\0\f\0\1\0\0\21\224\0\2\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300p\0\1\200\1\0\0\0x\0\4\300\250\325\232", 372, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 372 + +With debug logging on for ~5 minutes + + bash-5.0# grep 'Received from' home-assistant.log |wc + 11458 499196 19706165 + bash-5.0# grep 'Ignoring' home-assistant.log |wc + 9357 210562 9299687 ([`f321932`](https://github.com/python-zeroconf/python-zeroconf/commit/f3219326e65f4410d45ace05f88082354a2f7525)) + +* Test with the development version of Python 3.9 (#300) + +There've been reports of test failures on Python 3.9, let's verify this. +Allowing failures for now until it goes stable. ([`1f81e0b`](https://github.com/python-zeroconf/python-zeroconf/commit/1f81e0bcad1cae735ba532758d167368925c8ede)) + + +## v0.28.4 (2020-09-06) + +### Unknown + +* Release version 0.28.4 ([`fb876d6`](https://github.com/python-zeroconf/python-zeroconf/commit/fb876d6013979cdaa8c0ddebe81e7520e9ee8cc9)) + +* Add ServiceListener to __all__ for Zeroconf module (#298) + +It's part of the public API. ([`0265a9d`](https://github.com/python-zeroconf/python-zeroconf/commit/0265a9d57630a4a19bcd3638a6bb3f4b18eba01b)) + +* Avoid copying the entires cache and reduce frequency of Reaper + +The cache reaper was running at least every 10 seconds, making +a copy of the cache, and iterated all the entries to +check if they were expired so they could be removed. + +In practice the reaper was actually running much more frequently +because it used self.zc.wait which would unblock any time +a record was updated, a listener was added, or when a +listener was removed. + +This change ensures the reaper frequency is only every 10s, and +will first attempt to iterate the cache before falling back to +making a copy. + +Previously it made sense to expire the cache more frequently +because we had places were we frequently had to enumerate +all the cache entries. With #247 and #232 we no longer +have to account for this concern. + +On a mostly idle RPi running HomeAssistant and a busy +network the total time spent reaping the cache was +more than the total time spent processing the mDNS traffic. + +Top 10 functions, idle RPi (before) + + %Own %Total OwnTime TotalTime Function (filename:line) + 0.00% 0.00% 2.69s 2.69s handle_read (zeroconf/__init__.py:1367) <== Incoming mDNS + 0.00% 0.00% 1.51s 2.98s run (zeroconf/__init__.py:1431) <== Reaper + 0.00% 0.00% 1.42s 1.42s is_expired (zeroconf/__init__.py:502) <== Reaper + 0.00% 0.00% 1.12s 1.12s entries (zeroconf/__init__.py:1274) <== Reaper + 0.00% 0.00% 0.620s 0.620s do_execute (sqlalchemy/engine/default.py:593) + 0.00% 0.00% 0.620s 0.620s read_utf (zeroconf/__init__.py:837) + 0.00% 0.00% 0.610s 0.610s do_commit (sqlalchemy/engine/default.py:546) + 0.00% 0.00% 0.540s 1.16s read_name (zeroconf/__init__.py:853) + 0.00% 0.00% 0.380s 0.380s do_close (sqlalchemy/engine/default.py:549) + 0.00% 0.00% 0.340s 0.340s write (asyncio/selector_events.py:908) + +After this change, the Reaper code paths do not show up in the top +10 function sample. + + %Own %Total OwnTime TotalTime Function (filename:line) + 4.00% 4.00% 2.72s 2.72s handle_read (zeroconf/__init__.py:1378) <== Incoming mDNS + 4.00% 4.00% 1.81s 1.81s read_utf (zeroconf/__init__.py:837) + 1.00% 5.00% 1.68s 3.51s read_name (zeroconf/__init__.py:853) + 0.00% 0.00% 1.32s 1.32s do_execute (sqlalchemy/engine/default.py:593) + 0.00% 0.00% 0.960s 0.960s readinto (socket.py:669) + 0.00% 0.00% 0.950s 0.950s create_connection (urllib3/util/connection.py:74) + 0.00% 0.00% 0.910s 0.910s do_commit (sqlalchemy/engine/default.py:546) + 1.00% 1.00% 0.880s 0.880s write (asyncio/selector_events.py:908) + 0.00% 0.00% 0.700s 0.810s __eq__ (zeroconf/__init__.py:606) + 2.00% 2.00% 0.670s 0.670s unpack (zeroconf/__init__.py:737) ([`1e4aaea`](https://github.com/python-zeroconf/python-zeroconf/commit/1e4aaeaa10c306b9447dacefa03b89ce1e9d7493)) + +* Add an author in the last changelog entry ([`9e27d12`](https://github.com/python-zeroconf/python-zeroconf/commit/9e27d126d75c73466584c417ab35c1d6cf47ca8b)) + + +## v0.28.3 (2020-08-31) + +### Unknown + +* Release version 0.28.3 ([`0e49aec`](https://github.com/python-zeroconf/python-zeroconf/commit/0e49aeca6497ede18a3f0c71ea69f2343934ba19)) + +* Reduce the time window that the handlers lock is held + +Only hold the lock if we have an update. ([`5a359bb`](https://github.com/python-zeroconf/python-zeroconf/commit/5a359bb0931fbda8444e30d07a50e59cf4ccca8e)) + +* Reformat using the latest black (20.8b1) ([`57d89d8`](https://github.com/python-zeroconf/python-zeroconf/commit/57d89d85e52dea1f8cb7f6d4b02c0281d5ba0540)) + + +## v0.28.2 (2020-08-27) + +### Unknown + +* Release version 0.28.2 ([`f64768a`](https://github.com/python-zeroconf/python-zeroconf/commit/f64768a7253829f9d8f7796a6a5c8129b92f2aad)) + +* Increase test coverage for dns cache ([`3be96b0`](https://github.com/python-zeroconf/python-zeroconf/commit/3be96b014d61c94d71ae3aa23ba223eead4f4cb7)) + +* Don't ask already answered questions (#292) + +Fixes GH-288. + +Co-authored-by: Erik ([`fca090d`](https://github.com/python-zeroconf/python-zeroconf/commit/fca090db06a0d481ad7f608c4fde3e936ad2f80e)) + +* Remove initial delay before querying for service info ([`0f73664`](https://github.com/python-zeroconf/python-zeroconf/commit/0f7366423fab8369700be086f3007c20897fde1f)) + + +## v0.28.1 (2020-08-17) + +### Unknown + +* Release version 0.28.1 ([`3c5d385`](https://github.com/python-zeroconf/python-zeroconf/commit/3c5d3856e286824611712de13aa0fcbe94e4313f)) + +* Ensure all listeners are cleaned up on ServiceBrowser cancelation (#290) + +When creating listeners for a ServiceBrowser with multiple types +they would not all be removed on cancelation. This led +to a build up of stale listeners when ServiceBrowsers were +frequently added and removed. ([`c9f3c91`](https://github.com/python-zeroconf/python-zeroconf/commit/c9f3c91da568fdbd26d571eed8a636a49e527b15)) + +* Gitignore some build artifacts ([`19e33a6`](https://github.com/python-zeroconf/python-zeroconf/commit/19e33a6829846008b50f408c77ac3e8e73176529)) + + +## v0.28.0 (2020-07-07) + +### Unknown + +* Release version 0.28.0 ([`0fdbf5e`](https://github.com/python-zeroconf/python-zeroconf/commit/0fdbf5e197a9f76e9e9c91a5e0908a0c66370dbd)) + +* Advertise Python 3.8 compatibility ([`02bcad9`](https://github.com/python-zeroconf/python-zeroconf/commit/02bcad902c516a5a2d2aa3302bca9871900da6e3)) + +* Fix an OS X edge case (#270, #188) + +This contains two major changes: + +* Listen on data from respond_sockets in addition to listen_socket +* Do not bind respond sockets to 0.0.0.0 or ::/0 + +The description of the original change by Emil: + +<<< +Without either of these changes, I get no replies at all when browsing for +services using the browser example. I'm on a corporate network, and when +connecting to a different network it works without these changes, so maybe +it's something about the network configuration in this particular network +that breaks the previous behavior. + +Unfortunately, I have no idea how this affects other platforms, or what +the changes really mean. However, it works for me and it seems reasonable +to get replies back on the same socket where they are sent. +>>> + +The tests pass and it's been confirmed to a reasonable degree that this +doesn't break the previously working use cases. + +Additionally this removes a memory leak where data sent to some of the +respond sockets would not be ever read from them (#171). + +Co-authored-by: Emil Styrke ([`fc92b1e`](https://github.com/python-zeroconf/python-zeroconf/commit/fc92b1e2635868792aa7ebe937a9cfef2e2f0418)) + +* Stop using socket.if_nameindex (#282) + +This improves Windows compatibility ([`a7f9823`](https://github.com/python-zeroconf/python-zeroconf/commit/a7f9823cbed254b506a09cc514d86d9f5dc61ad3)) + +* Make Mypy happy (#281) + +Otherwise it'd complain: + + % make mypy + mypy examples/*.py zeroconf/*.py + zeroconf/__init__.py:2039: error: Returning Any from function declared to return "int" + Found 1 error in 1 file (checked 6 source files) + make: *** [mypy] Error 1 ([`4381784`](https://github.com/python-zeroconf/python-zeroconf/commit/4381784150e07625b4acd2034b253bf2ed320c5f)) + +* Use Adapter.index from ifaddr. (#280) + +Co-authored-by: PhilippSelenium ([`64056ab`](https://github.com/python-zeroconf/python-zeroconf/commit/64056ab4aa55eb11c185c9879462ba1f82c7e886)) + +* Exclude a problematic pep8-naming version ([`023e72d`](https://github.com/python-zeroconf/python-zeroconf/commit/023e72d821faed9513ee0ef3a22a00231d87389e)) + +* Log listen and respond sockets just in case ([`3b6906a`](https://github.com/python-zeroconf/python-zeroconf/commit/3b6906ab94f8d9ebeb1c97b6026ab7f9be226eab)) + +* Fix one log format string (we use a socket object here) ([`328abfc`](https://github.com/python-zeroconf/python-zeroconf/commit/328abfc54138e68e36a9f5381650bd6997701e73)) + +* Add support for passing text addresses to ServiceInfo + +Not sure if parsed_addresses is the best way to name the parameter, but +we already have a parsed_addresses property so for the sake of +consistency let's stick to that. ([`0a9aa8d`](https://github.com/python-zeroconf/python-zeroconf/commit/0a9aa8d31bffec5d7b7291b84fbc95222b10d189)) + +* Support Windows when using socket errno checks (#274) + +Windows reports errno.WSAEINVAL(10022) instead of errno.EINVAL(22). +This issue is triggered when a device has two IP's assigned under +windows. + +This fixes #189 ([`c31ae7f`](https://github.com/python-zeroconf/python-zeroconf/commit/c31ae7fd519df04f41939d3c60c2b88960737fd6)) + + +## v0.27.1 (2020-06-05) + +### Unknown + +* Release version 0.27.1 ([`0538abf`](https://github.com/python-zeroconf/python-zeroconf/commit/0538abf135f5502d94dd883475bcb2781ce5ddd2)) + +* Fix false warning (#273) + +When there is nothing to write, we don't need to warn about not making progress. ([`10065b9`](https://github.com/python-zeroconf/python-zeroconf/commit/10065b976247ae9247cddaff8f3e9d7b331e66d7)) + +* Improve logging (mainly include sockets in some messages) (#271) ([`beff998`](https://github.com/python-zeroconf/python-zeroconf/commit/beff99897f0a5ece17e224a7ea9b12ebd420044f)) + +* Simplify DNSHinfo constructor, cpu and os are always text (#266) ([`d6593af`](https://github.com/python-zeroconf/python-zeroconf/commit/d6593af2a3811b262d70bbc75c2c91613de41b21)) + +* Improve ImportError message (wrong supported Python version) ([`8045191`](https://github.com/python-zeroconf/python-zeroconf/commit/8045191ae6300da47d38e5cd82957965139359d2)) + +* Remove old Python 2-specific code ([`6f876a7`](https://github.com/python-zeroconf/python-zeroconf/commit/6f876a7f14f0b172860005b0d6d959d82f7c1bbf)) + + +## v0.27.0 (2020-05-27) + +### Unknown + +* Release version 0.27.0 ([`0502f19`](https://github.com/python-zeroconf/python-zeroconf/commit/0502f1904b0a8b9134ea2a09333232b30b3b6897)) + +* Remove no longer needed typing dependency + +We don't support Python older than 3.5. ([`d881aba`](https://github.com/python-zeroconf/python-zeroconf/commit/d881abaf591f260ad019f4ff86e7f70a6f018a64)) + +* Add --find option to example/browser.py (#263, rebased #175) + +Co-authored-by: Perry Kundert ([`781ac83`](https://github.com/python-zeroconf/python-zeroconf/commit/781ac834da38708d95bfe6e5f5ec7dd0f31efc54)) + +* Restore missing warnings import ([`178cec7`](https://github.com/python-zeroconf/python-zeroconf/commit/178cec75bd9a065b150b3542dfdb40682f6745b6)) + +* Warn on every call to missing update_service() listener method + +This is in order to provide visibility to the library users that this +method exists - without it the client code may be missing data. ([`488ee1e`](https://github.com/python-zeroconf/python-zeroconf/commit/488ee1e85762dc5856d8e132da54762e5e712c5a)) + +* Separately send large mDNS responses to comply with RFC 6762 (#248) + +This fixes issue #245 + +Split up large multi-response packets into separate packets instead of relying on IP Fragmentation. IP Fragmentation of mDNS packets causes ChromeCast Audios to +crash their mDNS responder processes and RFC 6762 +(https://tools.ietf.org/html/rfc6762) section 17 states some +requirements for Multicast DNS Message Size, and the fourth paragraph reads: + +"A Multicast DNS packet larger than the interface MTU, which is sent +using fragments, MUST NOT contain more than one resource record." + +This change makes this implementation conform with this MUST NOT clause. ([`87a0fe2`](https://github.com/python-zeroconf/python-zeroconf/commit/87a0fe27a7be9d96af08f8a007f37a16105c64a0)) + +* Remove deprecated ServiceInfo address parameter/property (#260) ([`ab72aa8`](https://github.com/python-zeroconf/python-zeroconf/commit/ab72aa8e5a6a83e50d24d7fb187e8fa8a549a847)) + + +## v0.26.3 (2020-05-26) + +### Unknown + +* Release version 0.26.3 ([`fbcefca`](https://github.com/python-zeroconf/python-zeroconf/commit/fbcefca592632304579c1b3f9c7bd3dd342e1618)) + +* Don't call callbacks when holding _handlers_lock (#258) + +Closes #255 + +Background: +#239 adds the lock _handlers_lock: + +python-zeroconf/zeroconf/__init__.py + + self._handlers_lock = threading.Lock() # ensure we process a full message in one go + +Which is used in the engine thread: + + def handle_response(self, msg: DNSIncoming) -> None: + """Deal with incoming response packets. All answers + are held in the cache, and listeners are notified.""" + + with self._handlers_lock: + + +And also by the service browser when issuing the state change callbacks: + + if len(self._handlers_to_call) > 0 and not self.zc.done: + with self.zc._handlers_lock: + handler = self._handlers_to_call.popitem(False) + self._service_state_changed.fire( + zeroconf=self.zc, service_type=self.type, name=handler[0], state_change=handler[1] + ) + +Both pychromecast and Home Assistant calls Zeroconf.get_service_info from the service callbacks which means the lock may be held for several seconds which will starve the engine thread. ([`fe86566`](https://github.com/python-zeroconf/python-zeroconf/commit/fe865667e4610d57067a8f710f4d818eaa5e14dc)) + +* Give threads unique names (#257) ([`54d116f`](https://github.com/python-zeroconf/python-zeroconf/commit/54d116fd69a66062f91be04d84ceaebcfb13cc43)) + +* Use equality comparison instead of identity comparison for ints + +Integers aren't guaranteed to have the same identity even though they +may be equal. ([`445d7f5`](https://github.com/python-zeroconf/python-zeroconf/commit/445d7f5dbe38947bd0bd1e3a5b8d649c1819c21f)) + +* Merge 0.26.2 release commit + +I accidentally only pushed 0.26.2 tag (commit ffb42e5836bd) without +pushing the commit to master and now I merged aa9de4de7202 so this is +the best I can do without force-pushing to master. Tag 0.26.2 will +continue to point to that dangling commit. ([`1c4d3fc`](https://github.com/python-zeroconf/python-zeroconf/commit/1c4d3fcbf34b09364e52a773783dc9c924a7b17a)) + +* Improve readability of logged incoming data (#254) ([`aa9de4d`](https://github.com/python-zeroconf/python-zeroconf/commit/aa9de4de7202b3ab0a60f14532d227f63d7d981b)) + +* Add support for multiple types to ServiceBrowsers + +As each ServiceBrowser runs in its own thread there +is a scale problem when listening for many types. + +ServiceBrowser can now accept a list of types +in addition to a single type. ([`a6ad100`](https://github.com/python-zeroconf/python-zeroconf/commit/a6ad100a60e8434cef6b411208eef98f68d594d3)) + +* Fix race condition where a listener gets +a message before the lock is created. ([`24a0619`](https://github.com/python-zeroconf/python-zeroconf/commit/24a06191ea35469948d12124a07429207b3c1b3b)) + +* Fix flake8 E741 in setup.py (#252) ([`4b1d953`](https://github.com/python-zeroconf/python-zeroconf/commit/4b1d953979287e08f914857867da1000634ca3af)) + + +## v0.26.1 (2020-05-06) + +### Unknown + +* Release version 0.26.1 ([`4c359e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4c359e2e7cdf104efca90ffd9912ea7c7792e3bf)) + +* Remove unwanted pylint directives + +Those are results of a bad conflict resolution I did when merging [1]. + +[1] 552a030eb592 ("Call UpdateService on SRV & A/AAAA updates as well as TXT (#239)") ([`0dd6fe4`](https://github.com/python-zeroconf/python-zeroconf/commit/0dd6fe44ca3895375ba447fed5f138042ab12ebf)) + +* Avoid iterating the entire cache when an A/AAAA address has not changed (#247) + +Iterating the cache is an expensive operation +when there is 100s of devices generating zeroconf +traffic as there can be 1000s of entries in the +cache. ([`0540342`](https://github.com/python-zeroconf/python-zeroconf/commit/0540342bacd859f38f6d2a3743a7959cd3ae4d02)) + +* Update .gitignore for Visual Studio config files (#244) ([`16431b6`](https://github.com/python-zeroconf/python-zeroconf/commit/16431b6cb51f561a4c5d2897e662b254ca4243ec)) + + +## v0.26.0 (2020-04-26) + +### Unknown + +* Release version 0.26.0 ([`36941ae`](https://github.com/python-zeroconf/python-zeroconf/commit/36941aeb72711f7954d40f0abeab4802174636df)) + +* Call UpdateService on SRV & A/AAAA updates as well as TXT (#239) + +Fix https://github.com/jstasiak/python-zeroconf/issues/235 + +Contains: + +* Add lock around handlers list +* Reverse DNSCache order to ensure newest records take precedence + + When there are multiple records in the cache, the behaviour was + inconsistent. Whilst the DNSCache.get() method returned the newest, + any function which iterated over the entire cache suffered from + a last write winds issue. This change makes this behaviour consistent + and allows the removal of an (incorrect) wait from one of the unit tests. ([`552a030`](https://github.com/python-zeroconf/python-zeroconf/commit/552a030eb592a0c07feaa7a01ece1464da4b1d0b)) + + +## v0.25.1 (2020-04-14) + +### Unknown + +* Release version 0.25.1 ([`f8fe400`](https://github.com/python-zeroconf/python-zeroconf/commit/f8fe400e4be833728f015a3d6396bfc3f7c185c0)) + +* Update Engine to immediately notify its worker thread (#243) ([`976e3dc`](https://github.com/python-zeroconf/python-zeroconf/commit/976e3dcf9d6d897b063ab6f0b7831bcfa6ac1814)) + +* Remove unstable IPv6 tests from Travis (#241) ([`cf0382b`](https://github.com/python-zeroconf/python-zeroconf/commit/cf0382ba771bcc22284fd719c80a26eaa05ba5cd)) + +* Switch to pytest for test running (#240) + +Nose is dead for all intents and purposes (last release in 2015) and +pytest provide a very valuable feature of printing relevant extra +information in case of assertion failure (from[1]): + + ================================= FAILURES ================================= + _______________________________ test_answer ________________________________ + + def test_answer(): + > assert func(3) == 5 + E assert 4 == 5 + E + where 4 = func(3) + + test_sample.py:6: AssertionError + ========================= short test summary info ========================== + FAILED test_sample.py::test_answer - assert 4 == 5 + ============================ 1 failed in 0.12s ============================= + +This should be helpful in debugging tests intermittently failing on +PyPy. + +Several TestCase.assertEqual() calls have been replaced by plain +assertions now that that method no longer provides anything we can't get +without it. Few assertions have been modified to not explicitly provide +extra information in case of failure – pytest will provide this +automatically. + +Dev dependencies are forced to be the latest versions to make sure +we don't fail because of outdated ones on Travis. + +[1] https://docs.pytest.org/en/latest/getting-started.html#create-your-first-test ([`f071f3d`](https://github.com/python-zeroconf/python-zeroconf/commit/f071f3d49d82ab212b86f889532200c94b36aea6)) + + +## v0.25.0 (2020-04-03) + +### Unknown + +* Release version 0.25.0 ([`0cbced8`](https://github.com/python-zeroconf/python-zeroconf/commit/0cbced809989283893e02914e251a94739a41062)) + +* Improve ServiceInfo documentation ([`e839c40`](https://github.com/python-zeroconf/python-zeroconf/commit/e839c40081ba15e228d447969b725ee42f1ef2ad)) + +* Remove uniqueness assertions + +The assertions, added in [1] and modified in [2] introduced a +regression. When browsing in the presence of devices advertising SRV +records not marked as unique there would be an undesired crash (from [3]): + + Exception in thread zeroconf-ServiceBrowser__hap._tcp.local.: + Traceback (most recent call last): + File "/usr/lib/python3.7/threading.py", line 917, in _bootstrap_inner + self.run() + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1504, in run + handler(self.zc) + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1444, in + zeroconf=zeroconf, service_type=self.type, name=name, state_change=state_change + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1322, in fire + h(**kwargs) + File "browser.py", line 20, in on_service_state_change + info = zeroconf.get_service_info(service_type, name) + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 2191, in get_service_info + if info.request(self, timeout): + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1762, in request + out.add_answer_at_time(zc.cache.get_by_details(self.name, _TYPE_SRV, _CLASS_IN), now) + File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 907, in add_answer_at_time + assert record.unique + AssertionError + +The intention is to bring those assertions back in a way that only +enforces uniqueness when sending records, not when receiving them. + +[1] bef8f593ae82 ("Ensure all TXT, SRV, A records are unique") +[2] 5e4f496778d9 ("Refactor out unique assertion") +[3] https://github.com/jstasiak/python-zeroconf/issues/236 ([`a79015e`](https://github.com/python-zeroconf/python-zeroconf/commit/a79015e7c4bdc843d97bd5c82ef8ed4eeae01a34)) + +* Rationalize handling of values in TXT records + +* Do not interpret received values; use None if a property has no value +* When encoding values, use either raw bytes or UTF-8 ([`8e3adf8`](https://github.com/python-zeroconf/python-zeroconf/commit/8e3adf8300a6f2b0bc0dcc4cde54d8890e0727e9)) + + +## v0.24.5 (2020-03-08) + +### Unknown + +* Release version 0.24.5 ([`aba2858`](https://github.com/python-zeroconf/python-zeroconf/commit/aba28583f5431f584587770b6c149e4a607a987e)) + +* Resolve memory leak in DNSCache + +When all the records for a given name were removed from the cache, the +name itself that contain the list was never removed. This left an empty list +in memory for every device that was no longer broadcasting on the +network. ([`eac53f4`](https://github.com/python-zeroconf/python-zeroconf/commit/eac53f45bddb8d3d559b1d4672a926b746435771)) + +* Optimize handle_response cache check + +The handle_response loop would encounter a unique record +it would search the cache in order to remove keys that +matched the DNSEntry for the record. + +Since the cache is stored as a list of records with the key as the record name, + we can avoid searching the entire cache each time and on +search for the DNSEntry of the record. In practice this means +with 5000 entries and records in the cache we now only need to search +4 or 5. + +When looping over the cache entries for the name, we now check the expire time +first as its cheaper than calling DNSEntry.__eq__ + +Test environment: + + Home Assistant running on home networking with a /22 + and a significant amount of broadcast traffic + + Testing was done with py-spy v0.3.3 + (https://github.com/benfred/py-spy/releases) + + # py-spy top --pid + +Before: +``` +Collecting samples from '/usr/local/bin/python3 -m homeassistant --config /config' (python v3.7.6) +Total Samples 10200 +GIL: 0.00%, Active: 0.00%, Threads: 35 + + %Own %Total OwnTime TotalTime Function (filename:line) + 0.00% 0.00% 18.13s 18.13s _worker (concurrent/futures/thread.py:78) + 0.00% 0.00% 2.51s 2.56s run (zeroconf/__init__.py:1221) + 0.00% 0.00% 0.420s 0.420s __eq__ (zeroconf/__init__.py:394) + 0.00% 0.00% 0.390s 0.390s handle_read (zeroconf/__init__.py:1260) + 0.00% 0.00% 0.240s 0.670s handle_response (zeroconf/__init__.py:2452) + 0.00% 0.00% 0.230s 0.230s __eq__ (zeroconf/__init__.py:606) + 0.00% 0.00% 0.200s 0.810s handle_response (zeroconf/__init__.py:2449) + 0.00% 0.00% 0.140s 0.150s __eq__ (zeroconf/__init__.py:632) + 0.00% 0.00% 0.130s 0.130s entries (zeroconf/__init__.py:1185) + 0.00% 0.00% 0.090s 0.090s notify (threading.py:352) + 0.00% 0.00% 0.080s 0.080s read_utf (zeroconf/__init__.py:818) + 0.00% 0.00% 0.080s 0.080s __eq__ (zeroconf/__init__.py:678) + 0.00% 0.00% 0.070s 0.080s __eq__ (zeroconf/__init__.py:533) + 0.00% 0.00% 0.060s 0.060s __eq__ (zeroconf/__init__.py:677) + 0.00% 0.00% 0.050s 0.050s get (zeroconf/__init__.py:1146) + 0.00% 0.00% 0.050s 0.050s do_commit (sqlalchemy/engine/default.py:541) + 0.00% 0.00% 0.040s 2.86s run (zeroconf/__init__.py:1226) +``` + +After +``` +Collecting samples from '/usr/local/bin/python3 -m homeassistant --config /config' (python v3.7.6) +Total Samples 10200 +GIL: 7.00%, Active: 61.00%, Threads: 35 + + %Own %Total OwnTime TotalTime Function (filename:line) + 47.00% 47.00% 24.84s 24.84s _worker (concurrent/futures/thread.py:78) + 5.00% 5.00% 2.97s 2.97s run (zeroconf/__init__.py:1226) + 1.00% 1.00% 0.390s 0.390s handle_read (zeroconf/__init__.py:1265) + 1.00% 1.00% 0.200s 0.200s read_utf (zeroconf/__init__.py:818) + 0.00% 0.00% 0.120s 0.120s unpack (zeroconf/__init__.py:723) + 0.00% 1.00% 0.120s 0.320s read_name (zeroconf/__init__.py:834) + 0.00% 0.00% 0.100s 0.240s update_record (zeroconf/__init__.py:2440) + 0.00% 0.00% 0.090s 0.090s notify (threading.py:352) + 0.00% 0.00% 0.070s 0.070s update_record (zeroconf/__init__.py:1469) + 0.00% 0.00% 0.060s 0.070s __eq__ (zeroconf/__init__.py:606) + 0.00% 0.00% 0.050s 0.050s acquire (logging/__init__.py:843) + 0.00% 0.00% 0.050s 0.050s unpack (zeroconf/__init__.py:722) + 0.00% 0.00% 0.050s 0.050s read_name (zeroconf/__init__.py:828) + 0.00% 0.00% 0.050s 0.050s is_expired (zeroconf/__init__.py:494) + 0.00% 0.00% 0.040s 0.040s emit (logging/__init__.py:1028) + 1.00% 1.00% 0.040s 0.040s __init__ (zeroconf/__init__.py:386) + 0.00% 0.00% 0.040s 0.040s __enter__ (threading.py:241) +``` ([`37fa0a0`](https://github.com/python-zeroconf/python-zeroconf/commit/37fa0a0d59a5b5d09295a462bf911e82d2d770ed)) + +* Support cooperating responders (#224) ([`1ca023f`](https://github.com/python-zeroconf/python-zeroconf/commit/1ca023fae4b586679446ceaf3e2e9955ea5bf180)) + +* Remove duplciate update messages sent to listeners + +The prior code used to send updates even when the new record was identical to the old. + +This resulted in duplciate update messages when there was in fact no update (apart from TTL refresh) ([`d8caa4e`](https://github.com/python-zeroconf/python-zeroconf/commit/d8caa4e2d71025ed42b33abb4d329329437b44fb)) + +* Refactor out unique assertion ([`5e4f496`](https://github.com/python-zeroconf/python-zeroconf/commit/5e4f496778d91ccfc65e946d3d94c39ab6388b29)) + +* Fix representation of IPv6 DNSAddress (#230) ([`f6690d2`](https://github.com/python-zeroconf/python-zeroconf/commit/f6690d2048cb87cb0fb3a7c3b832cf1a1f40e61a)) + +* Do not exclude interfaces with host-only netmasks from InterfaceChoice.All (#227) + +Host-only netmasks do not forbid multicast. + +Tested on Debian 10 running in Qubes and on Ubuntu 18.04. ([`ca8e53d`](https://github.com/python-zeroconf/python-zeroconf/commit/ca8e53de55a563f5c7049be2eda14ae0ecd1a7cf)) + +* Ensure all TXT, SRV, A records are unique + +Fixes issues with shared records being used where they shouldn't be. + +PTR records should be shared, but SRV, TXT and A/AAAA records should be unique. + +Whilst mDNS and DNS-SD in theory support shared records for these types of record, they are not implemented in python-zeroconf at the moment. + +See zeroconf.check_service() method which verifies the service is unique on the network before registering. ([`bef8f59`](https://github.com/python-zeroconf/python-zeroconf/commit/bef8f593ae820eb8465934de91eb27468edf6444)) + + +## v0.24.4 (2019-12-30) + +### Unknown + +* Release version 0.24.4 ([`29432bf`](https://github.com/python-zeroconf/python-zeroconf/commit/29432bfffd057cf4da7636ba0c28c9d8a7ad4357)) + +* Clean up output of ttl remaining to be whole seconds only ([`ba1b78d`](https://github.com/python-zeroconf/python-zeroconf/commit/ba1b78dbdcc64f8d35c951e7ca53d2898e7d7900)) + +* Clean up format to cleanly separate [question]=ttl,answer ([`4b735dc`](https://github.com/python-zeroconf/python-zeroconf/commit/4b735dc5411f7b563f23b60b5c2aa806151cca1a)) + +* Update DNS entries so all subclasses of DNSRecord use to_string for display + +All records based on DNSRecord now properly use to_string in repr, some were +only dumping the answer without the question (inconsistent). ([`8ccad54`](https://github.com/python-zeroconf/python-zeroconf/commit/8ccad54dab4a0ab7f573996f6fc0c2f2bad7eafe)) + +* Fix resetting of TTL (#209) + +Fix resetting of TTL + +Previously the reset_ttl method changed the time created and the TTL value, but did not change the expiration time or stale times. As a result a record would expire even when this method had been called. ([`b47efd8`](https://github.com/python-zeroconf/python-zeroconf/commit/b47efd8eed0b5ed9d3b6bca8573a6ed1916c982a)) + + +## v0.24.3 (2019-12-23) + +### Unknown + +* Release version 0.24.3 ([`2316027`](https://github.com/python-zeroconf/python-zeroconf/commit/2316027e5e96d8f10fae7607da5b72a9bab819fc)) + +* Fix import-time TypeError on CPython 3.5.2 + +The error: TypeError: 'ellipsis' object is not iterable." + +Explanation can be found here: https://github.com/jstasiak/python-zeroconf/issues/208 + +Closes GH-208. ([`f53e24b`](https://github.com/python-zeroconf/python-zeroconf/commit/f53e24bddb3a6cb242cace2a541ed507e823be33)) + + +## v0.24.2 (2019-12-17) + +### Unknown + +* Release version 0.24.2 ([`76bc675`](https://github.com/python-zeroconf/python-zeroconf/commit/76bc67532ad26f54c194e1e6537d2da4390f83e2)) + +* Provide and enforce type hints everywhere except for tests + +The tests' time will come too in the future, though, I think. I believe +nose has problems with running annotated tests right now so let's leave +it for later. + +DNSEntry.to_string renamed to entry_to_string because DNSRecord +subclasses DNSEntry and overrides to_string with a different signature, +so just to be explicit and obvious here I renamed it – I don't think any +client code will break because of this. + +I got rid of ServicePropertiesType in favor of generic Dict because +having to type all the properties got annoying when variance got +involved – maybe it'll be restored in the future but it seems like too +much hassle now. ([`f771587`](https://github.com/python-zeroconf/python-zeroconf/commit/f7715874c2242b95cf9815549344ea66ac107b6e)) + +* Fix get_expiration_time percent parameter annotation + +It takes integer percentage values at the moment so let's document that. ([`5986bf6`](https://github.com/python-zeroconf/python-zeroconf/commit/5986bf66e77e77f9e0b6ba43a4758ecb0da04ff6)) + +* Add support for AWDL interface on macOS + +The API is inspired by Apple's NetService.includesPeerToPeer +(see https://developer.apple.com/documentation/foundation/netservice/1414086-includespeertopeer) ([`fcafdc1`](https://github.com/python-zeroconf/python-zeroconf/commit/fcafdc1e285cc5c3c1f2c413ac9309d3426179f4)) + + +## v0.24.1 (2019-12-16) + +### Unknown + +* Release version 0.24.1 ([`53dd06c`](https://github.com/python-zeroconf/python-zeroconf/commit/53dd06c37f6205129e81f5c6b69e508a54f94d07)) + +* Bugfix: TXT record's name is never equal to Service Browser's type. + +TXT record's name is never equal to Service Browser's type. We should +check whether TXT record's name ends with Service Browser's type. +Otherwise, we never get updates of TXT records. ([`2a597ee`](https://github.com/python-zeroconf/python-zeroconf/commit/2a597ee80906a27effd442d033de10b5129e6900)) + +* Bugfix: Flush outdated cache entries when incoming record is unique. + +According to RFC 6762 10.2. Announcements to Flush Outdated Cache Entries, +when the incoming record's cache-flush bit is set (record.unique == True +in this module), "Instead of merging this new record additively into the +cache in addition to any previous records with the same name, rrtype, and +rrclass, all old records with that name, rrtype, and rrclass that were +received more than one second ago are declared invalid, and marked to +expire from the cache in one second." ([`1d39b3e`](https://github.com/python-zeroconf/python-zeroconf/commit/1d39b3edd141093f9e579ab83377fe8f5ecb357d)) + +* Change order of equality check to favor cheaper checks first + +Comparing two strings is much cheaper than isinstance, so we should try +those first + +A performance test was run on a network with 170 devices running Zeroconf. +There was a ServiceBrowser running on a separate thread while a timer ran +on the main thread that forced a thread switch every 2 seconds (to include +the effect of thread switching in the measurements). Every minute, +a Zeroconf broadcast was made on the network. + +This was ran this for an hour on a Macbook Air from 2015 (Intel Core +i7-5650U) using Ubuntu 19.10 and Python 3.7, both before this commit and +after. + +These are the results of the performance tests: +Function Before count Before time Before time per count After count After time After time per count Time reduction +DNSEntry.__eq__ 528 0.001s 1.9μs 538 0.001s 1.9μs 1.9% +DNSPointer.__eq__ 24369256 (24.3M) 134.641s 5.5μs 25989573 (26.0M) 86.405s 3.3μs 39.8% +DNSText.__eq__ 52966716 (53.0M) 190.640s 3.6μs 53604915 (53.6M) 169.104s 3.2μs 12.4% +DNSService.__eq__ 52620538 (52.6M) 171.660s 3.3μs 56557448 (56.6M) 170.222s 3.0μs 7.8% ([`815ac77`](https://github.com/python-zeroconf/python-zeroconf/commit/815ac77e9146c37afd7c5389ed45adee9f1e2e36)) + +* Dont recalculate the expiration and stale time every update + +I have a network with 170 devices running Zeroconf. Every minute +a zeroconf request for broadcast is cast out. Then we were listening for +Zeroconf devices on that network. + +To get a more realistic test, the Zeroconf ServiceBrowser is ran on +a separate thread from a main thread. On the main thread an I/O limited +call to QNetworkManager is made every 2 seconds, + +in order to include performance penalties due to thread switching. The +experiment was ran on a MacBook Air 2015 (Intel Core i7-5650U) through +Ubuntu 19.10 and Python 3.7. + +This was left running for exactly one hour, both before and after this commit. + +Before this commit, there were 132107499 (132M) calls to the +get_expiration_time function, totalling 141.647s (just over 2 minutes). + +After this commit, there were 1661203 (1.6M) calls to the +get_expiration_time function, totalling 2.068s. + +This saved about 2 minutes of processing time out of the total 60 minutes, +on average 3.88% processing power on the tested CPU. It is expected to see +similar improvements on all CPU architectures. ([`2e9699c`](https://github.com/python-zeroconf/python-zeroconf/commit/2e9699c542f691fc605e4a1c03cbf496273a9835)) + +* Significantly improve the speed of the entries function of the cache + +Tested this with Python 3.6.8, Fedora 28. This was done in a network with +a lot of discoverable devices. + +before: +Total time: 1.43086 s + +Line # Hits Time Per Hit % Time Line Contents +============================================================== + 1138 @profile + 1139 def entries(self): + 1140 """Returns a list of all entries""" + 1141 2063 3578.0 1.7 0.3 if not self.cache: + 1142 2 3.0 1.5 0.0 return [] + 1143 else: + 1144 # avoid size change during iteration by copying the cache + 1145 2061 22051.0 10.7 1.5 values = list(self.cache.values()) + 1146 2061 1405227.0 681.8 98.2 return reduce(lambda a, b: a + b, values) + +After: +Total time: 0.43725 s + +Line # Hits Time Per Hit % Time Line Contents +============================================================== + 1138 @profile + 1139 def entries(self): + 1140 """Returns a list of all entries""" + 1141 3651 10171.0 2.8 2.3 if not self.cache: + 1142 2 7.0 3.5 0.0 return [] + 1143 else: + 1144 # avoid size change during iteration by copying the cache + 1145 3649 67054.0 18.4 15.3 values = list(self.cache.values()) + 1146 3649 360018.0 98.7 82.3 return list(itertools.chain.from_iterable(values)) ([`157fc20`](https://github.com/python-zeroconf/python-zeroconf/commit/157fc2003318d785d07b362e1fd2ba3fe5d373f0)) + +* The the formatting of the IPv6 section in the readme ([`6ab7dbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6ab7dbf27a2086e20f4486e693e2091d043af1db)) + + +## v0.24.0 (2019-11-19) + +### Unknown + +* Release version 0.24.0 ([`f03dc42`](https://github.com/python-zeroconf/python-zeroconf/commit/f03dc42d6234419053bda18ca6f2b90bec1b9257)) + +* Improve type hint coverage ([`c827f9f`](https://github.com/python-zeroconf/python-zeroconf/commit/c827f9fdc4c58433143ea8815029c3387b500ff5)) + +* Add py.typed marker (closes #199) + +This required changing to a proper package. ([`41b31cb`](https://github.com/python-zeroconf/python-zeroconf/commit/41b31cb338e8a8a7d1a548662db70d9014e8a352)) + +* Link to the documentation ([`3db9d82`](https://github.com/python-zeroconf/python-zeroconf/commit/3db9d82d888abe880bfdd2fb2c3fe3eddcb48ae9)) + +* Setup basic Sphinx documentation + +Closes #200 ([`1c33e5f`](https://github.com/python-zeroconf/python-zeroconf/commit/1c33e5f5b44732d446d629cc13000cff3527afef)) + +* ENOTCONN is not an error during shutdown + +When `python-zeroconf` is used in conjunction with `eventlet`, `select.select()` will return with an error code equal to `errno.ENOTCONN` instead of `errno.EBADF`. As a consequence, an exception is shown in the console during shutdown. I believe that it should not cause any harm to treat `errno.ENOTCONN` the same way as `errno.EBADF` to prevent this exception. ([`c86423a`](https://github.com/python-zeroconf/python-zeroconf/commit/c86423ab0223bab682614e18a6a09050dfc80087)) + +* Rework exposing IPv6 addresses on ServiceInfo + +* Return backward compatibility for ServiceInfo.addresses by making + it return V4 addresses only +* Add ServiceInfo.parsed_addresses for convenient access to addresses +* Raise TypeError if addresses are not provided as bytes (otherwise + an ugly assertion error is raised when sending) +* Add more IPv6 unit tests ([`98a1ce8`](https://github.com/python-zeroconf/python-zeroconf/commit/98a1ce8b99ddb03de9f6cccca49396fcf177e0d0)) + +* Finish AAAA records support + +The correct record type was missing in a few places. Also use +addresses_by_version(All) in preparation for switching addresses +to V4 by default. ([`aae7fd3`](https://github.com/python-zeroconf/python-zeroconf/commit/aae7fd3ba851d1894732c4270cef745127cc03da)) + +* Test with pypy3.6 + +Right now this is available as pypy3 in Travis CI. Running black on PyPy +needs to be disabled for now because of an issue[1] that's been patched +only recently and it's not available in Travis yet. + +[1] https://bitbucket.org/pypy/pypy/issues/2985/pypy36-osreplace-pathlike-typeerror ([`fec839a`](https://github.com/python-zeroconf/python-zeroconf/commit/fec839ae4fdcb870066fff855809583dcf7d7a17)) + +* Stop specifying precise pypy3.5 version + +This allows us to test with the latest available one. ([`c2e8bde`](https://github.com/python-zeroconf/python-zeroconf/commit/c2e8bdebc6cec128d01197d53c3402278a4b62ed)) + +* Simplify Travis CI configuration regarding Python 3.7 + +Selecting xenial manually is no longer needed. ([`5359ea0`](https://github.com/python-zeroconf/python-zeroconf/commit/5359ea0a0b4cdca0854ae97c5d11036633102c67)) + +* Test with Python 3.8 ([`15118c8`](https://github.com/python-zeroconf/python-zeroconf/commit/15118c837a148a37edd29a20294e598ecf09c3cf)) + +* Make AAAA records work (closes #52) (#191) + +This PR incorporates changes from the earlier PR #179 (thanks to Mikael Pahmp), adding tests and a few more fixes to make AAAA records work in practice. + +Note that changing addresses to container IPv6 addresses may be considered a breaking change, for example, for consumers that unconditionally apply inet_aton to them. I'm introducing a new function to be able to retries only addresses from one family. ([`5bb9531`](https://github.com/python-zeroconf/python-zeroconf/commit/5bb9531be48f6f1e119643677c36d9e714204a8b)) + +* Improve static typing coverage ([`e5323d8`](https://github.com/python-zeroconf/python-zeroconf/commit/e5323d8c9795c59019173b8d202a50a49c415039)) + +* Add additional recommended records to PTR responses (#184) + +RFC6763 indicates a server should include the SRV/TXT/A/AAAA records +when responding to a PTR record request. This optimization ensures +the client doesn't have to then query for these additional records. + +It has been observed that when multiple Windows 10 machines are monitoring +for the same service, this unoptimized response to the PTR record +request can cause extremely high CPU usage in both the DHCP Client +& Device Association service (I suspect due to all clients having to +then sending/receiving the additional queries/responses). ([`ea64265`](https://github.com/python-zeroconf/python-zeroconf/commit/ea6426547f79c32c6d5d3bcc2d0a261bf503197a)) + +* Rename IpVersion to IPVersion + +A follow up to 3d5787b8c5a92304b70c04f48dc7d5cec8d9aac8. ([`ceb602c`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb602c0d1bc1d3a269fd233b072a9b929076438)) + +* First stab at supporting listening on IPv6 interfaces + +This change adds basic support for listening on IPv6 interfaces. +Some limitations exist for non-POSIX platforms, pending fixes in +Python and in the ifaddr library. Also dual V4-V6 sockets may not +work on all BSD platforms. As a result, V4-only is used by default. + +Unfortunately, Travis does not seem to support IPv6, so the tests +are disabled on it, which also leads to coverage decrease. ([`3d5787b`](https://github.com/python-zeroconf/python-zeroconf/commit/3d5787b8c5a92304b70c04f48dc7d5cec8d9aac8)) + + +## v0.23.0 (2019-06-04) + +### Unknown + +* Release version 0.23.0 ([`7bd0436`](https://github.com/python-zeroconf/python-zeroconf/commit/7bd04363c7ff0f583a17cc2fac42f9a9c1724769)) + +* Add support for multiple addresses when publishing a service (#170) + +This is a rebased and fixed version of PR #27, which also adds compatibility shim for ServiceInfo.address and does a proper deprecation for it. + +* Present all addresses that are available. + +* Add support for publishing multiple addresses. + +* Add test for backwards compatibility. + +* Provide proper deprecation of the "address" argument and field + +* Raise deprecation warnings when address is used +* Add a compatibility property to avoid breaking existing code + (based on suggestion by Bas Stottelaar in PR #27) +* Make addresses keyword-only, so that address can be eventually + removed and replaced with it without breaking consumers +* Raise TypeError instead of an assertion on conflicting address + and addresses + +* Disable black on ServiceInfo.__init__ until black is fixed + +Due to https://github.com/python/black/issues/759 black produces +code that is invalid Python 3.5 syntax even with --target-version py35. +This patch disables reformatting for this call (it doesn't seem to be +possible per line) until it's fixed. ([`c787610`](https://github.com/python-zeroconf/python-zeroconf/commit/c7876108150cd251786db4ab52dadd1b2283d262)) + +* Makefile: be specific which files to check with black (#169) + +Otherwise black tries to check the "env" directory, which fails. ([`6b85a33`](https://github.com/python-zeroconf/python-zeroconf/commit/6b85a333de21fa36187f081c3c115c8af40d7055)) + +* Run black --check as part of CI to enforce code style ([`12477c9`](https://github.com/python-zeroconf/python-zeroconf/commit/12477c954e7f051d10152f9ab970e28fd4222b30)) + +* Refactor the CI script a bit to make adding black check easier ([`69ad22c`](https://github.com/python-zeroconf/python-zeroconf/commit/69ad22cf852a12622f78aa2f4e7cf20c2d395db2)) + +* Reformat the code using Black + +We could use some style consistency in the project and Black looks like +the best tool for the job. + +Two flake8 errors are being silenced from now on: + +* E203 whitespace before : +* W503 line break before binary operator + +Both are to satisfy Black-formatted code (and W503 is somemwhat against +the latest PEP8 recommendations regarding line breaks and binary +operators in new code). ([`beb596c`](https://github.com/python-zeroconf/python-zeroconf/commit/beb596c345b0764bdfe1a828cfa744bcc560cf32)) + +* Add support for MyListener call getting updates to service TXT records (2nd attempt) (#166) + +Add support for MyListener call getting updates to service TXT records + +At the moment, the implementation supports notification to the ServiceListener class for additions and removals of service, but for service updates to the TXT record, the client must poll the ServiceInfo class. This draft PR provides a mechanism to have a callback on the ServiceListener class be invoked when the TXT record changes. ([`d4e06bc`](https://github.com/python-zeroconf/python-zeroconf/commit/d4e06bc54098bfa7a863bcc11bb9e2035738c8f5)) + +* Remove Python 3.4 from the Python compatibility section + +I forgot to do this in 4a02d0489da80e8b9e8d012bb7451cd172c753ca. ([`e1c2b00`](https://github.com/python-zeroconf/python-zeroconf/commit/e1c2b00c772a1538a6682c45884bbe89c8efba60)) + +* Drop Python 3.4 support (it's dead now) + +See https://devguide.python.org/#status-of-python-branches ([`4a02d04`](https://github.com/python-zeroconf/python-zeroconf/commit/4a02d0489da80e8b9e8d012bb7451cd172c753ca)) + + +## v0.22.0 (2019-04-27) + +### Unknown + +* Prepare release 0.22.0 ([`db1dcf6`](https://github.com/python-zeroconf/python-zeroconf/commit/db1dcf682e453766b53773d70c0091b81a87a192)) + +* Add arguments to set TTLs via ServiceInfo ([`ecc021b`](https://github.com/python-zeroconf/python-zeroconf/commit/ecc021b7a3cec863eed5a3f71a1f28e3026c25b0)) + +* Use recommended TTLs with overrides via ServiceInfo ([`a7aedb5`](https://github.com/python-zeroconf/python-zeroconf/commit/a7aedb58649f557a5e372fc776f98457ce84eb39)) + +* ttl: modify default used to respond to _services queries ([`f25989d`](https://github.com/python-zeroconf/python-zeroconf/commit/f25989d8cdae8f77e19eba70f236dd8103b33e8f)) + +* Fix service removal packets not being sent on shutdown ([`57310e1`](https://github.com/python-zeroconf/python-zeroconf/commit/57310e185a4f924dd257edd64f866da685a786c6)) + +* Adjust query intervals to match RFC 6762 (#159) + +* Limit query backoff time to one hour as-per rfc6762 section 5.2 +* tests: monkey patch backoff limit to focus testing on TTL expiry +* tests: speed up integration test +* tests: add test of query backoff interval and limit +* Set initial query interval to 1 second as-per rfc6762 sec 5.2 +* Add comments around timing constants +* tests: fix linting errors +* tests: fix float assignment to integer var + + +Sets the repeated query backoff limit to one hour as opposed to 20 seconds, reducing unnecessary network traffic +Adds a test for the behaviour of the backoff procedure +Sets the first repeated query to happen after one second as opposed to 500ms ([`bee8abd`](https://github.com/python-zeroconf/python-zeroconf/commit/bee8abdba49e2275d203e3b0b4a3afac330ec4ea)) + +* Turn on and address mypy check_untyped_defs ([`4218d75`](https://github.com/python-zeroconf/python-zeroconf/commit/4218d757994467ee710b0cad034ea1fb6035d3ea)) + +* Turn on and address mypy warn-return-any ([`006e614`](https://github.com/python-zeroconf/python-zeroconf/commit/006e614315c12e5232e6168ce0bacf0dc056ba8a)) + +* Turn on and address mypy no-implicit-optional ([`071c6ed`](https://github.com/python-zeroconf/python-zeroconf/commit/071c6edb924b6bc9b67859dc9860cfe09cc98d07)) + +* Add reminder to enable disallow_untyped_calls for mypy ([`24bb44f`](https://github.com/python-zeroconf/python-zeroconf/commit/24bb44f858cd325d7ff2892c53dc1dd9f26ed768)) + +* Enable some more mypy warnings ([`183a846`](https://github.com/python-zeroconf/python-zeroconf/commit/183a84636a9d4fec6306d065a4f855fec95086e4)) + +* Run mypy on test_zeroconf.py too + +This will reveal issues with current type hints as demonstrated by a +commit/issue to be submitted later, as well as prevent some others +from cropping up meanwhile. ([`74391d5`](https://github.com/python-zeroconf/python-zeroconf/commit/74391d5c124bf6f899059db93bbf7e99b96d8aad)) + +* Move mypy config to setup.cfg + +Removes need for a separate file, better to have more in one place. ([`2973931`](https://github.com/python-zeroconf/python-zeroconf/commit/29739319ccf71f48c06bc1b74cd193f17fb6b272)) + +* Don't bother with a universal wheel as we're Python >= 3 only ([`9c0f1ab`](https://github.com/python-zeroconf/python-zeroconf/commit/9c0f1ab03b90f87ff1d58278a0b9b77c16195185)) + +* Add unit tests for default ServiceInfo properties. ([`a12c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/a12c3b2a3b4300849e0a4dcdd4df5386286b88d3)) + +* Modify ServiceInfo's __init__ properties' default value. + +This commit modifies the default value of the argument properties of +ServiceInfo’s __init__() to byte array (properties=b’’). This enables +to instantiate it without setting the properties argument. As it is, +and because properties is not mandatory, if a user does not specify +the argument, an exception (AssertionError) is thrown: + +Traceback (most recent call last): + File "src/zeroconf-test.py", line 72, in + zeroconf.register_service(service) + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 1864, in register_service + self.send(out) + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 2091, in send + packet = out.packet() + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 1026, in packet + overrun_answers += self.write_record(answer, time_) + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 998, in write_record + record.write(self) + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 579, in write + out.write_string(self.text) + File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 903, in write_string + assert isinstance(value, bytes) +AssertionError + +The argument can be either a dictionary or a byte array. The function +_set_properties() will always create a byte array with the user's +properties. Changing the default value to a byte array, avoids the +conversion to byte array and avoids the exception. ([`9321007`](https://github.com/python-zeroconf/python-zeroconf/commit/93210079259bd0973e3b54a90dff971e14abf595)) + +* Fix some spelling errors ([`88fb0e3`](https://github.com/python-zeroconf/python-zeroconf/commit/88fb0e34f902498f6ceb583ce6fa9346745a14ca)) + +* Require flake8 >= 3.6.0, drop pycodestyle restriction + +Fixes current build breakage related to flake8 dependencies. + +The breakage: + +$ make flake8 +flake8 --max-line-length=110 examples *.py +Traceback (most recent call last): + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2329, in resolve + return functools.reduce(getattr, self.attrs, module) +AttributeError: module 'pycodestyle' has no attribute 'break_after_binary_operator' +During handling of the above exception, another exception occurred: +Traceback (most recent call last): + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 182, in load_plugin + self._load(verify_requirements) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 154, in _load + self._plugin = resolve() + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2331, in resolve + raise ImportError(str(exc)) +ImportError: module 'pycodestyle' has no attribute 'break_after_binary_operator' +During handling of the above exception, another exception occurred: +Traceback (most recent call last): + File "/home/travis/virtualenv/python3.5.6/bin/flake8", line 11, in + sys.exit(main()) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/cli.py", line 16, in main + app.run(argv) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 412, in run + self._run(argv) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 399, in _run + self.initialize(argv) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 381, in initialize + self.find_plugins() + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 197, in find_plugins + self.check_plugins.load_plugins() + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 434, in load_plugins + plugins = list(self.manager.map(load_plugin)) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 319, in map + yield func(self.plugins[name], *args, **kwargs) + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 432, in load_plugin + return plugin.load_plugin() + File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 189, in load_plugin + raise failed_to_load +flake8.exceptions.FailedToLoadPlugin: Flake8 failed to load plugin "pycodestyle.break_after_binary_operator" due to module 'pycodestyle' has no attribute 'break_after_binary_operator'. ([`73b3620`](https://github.com/python-zeroconf/python-zeroconf/commit/73b3620908cb5e2f54231692c17f6bbb8a42d09d)) + +* Drop flake8-blind-except + +Obsoleted by pycodestyle 2.1's E722. ([`e3b7e40`](https://github.com/python-zeroconf/python-zeroconf/commit/e3b7e40af52d05264794e2e4d37dfdb1c5d3814a)) + +* Test with PyPy 3.5 5.10.1 ([`51a6f70`](https://github.com/python-zeroconf/python-zeroconf/commit/51a6f7081bd5590ca5ea5418b39172714b7ef1fe)) + +* Fix a changelog typo ([`e08db28`](https://github.com/python-zeroconf/python-zeroconf/commit/e08db282edd8459e35d17ae4e7278106056a0c94)) + + +## v0.21.3 (2018-09-21) + +### Unknown + +* Prepare release 0.21.3 ([`059530d`](https://github.com/python-zeroconf/python-zeroconf/commit/059530d075fe1575ebbab535be67ac7d5ae7caed)) + +* Actually allow underscores in incoming service names + +This was meant to be released earlier, but I failed to merge part of my +patch. + +Fixes: ff4a262adc69 ("Allow underscores in incoming service names") +Closes #102 ([`ae3bd51`](https://github.com/python-zeroconf/python-zeroconf/commit/ae3bd517d84aae631db1cc294caf22541a7f4bd5)) + + +## v0.21.2 (2018-09-20) + +### Unknown + +* Prepare release 0.21.2 ([`af33c83`](https://github.com/python-zeroconf/python-zeroconf/commit/af33c83e72d6fa4171342f78d15b2f28038f1318)) + +* Fix typing-related TypeError + +Older typing versions don't allow what we did[1]. We don't really need +to be that precise here anyway. + +The error: + + $ python + Python 3.5.2 (default, Nov 23 2017, 16:37:01) + [GCC 5.4.0 20160609] on linux + Type "help", "copyright", "credits" or "license" for more information. + >>> import zeroconf + Traceback (most recent call last): + File "", line 1, in + File "/scraper/venv/lib/python3.5/site-packages/zeroconf.py", line 320, in + OptionalExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] + File "/usr/lib/python3.5/typing.py", line 649, in __getitem__ + return Union[arg, type(None)] + File "/usr/lib/python3.5/typing.py", line 552, in __getitem__ + dict(self.__dict__), parameters, _root=True) + File "/usr/lib/python3.5/typing.py", line 512, in __new__ + for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): + File "/usr/lib/python3.5/typing.py", line 512, in + for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): + File "/usr/lib/python3.5/typing.py", line 1077, in __subclasscheck__ + if super().__subclasscheck__(cls): + File "/usr/lib/python3.5/abc.py", line 225, in __subclasscheck__ + for scls in cls.__subclasses__(): + TypeError: descriptor '__subclasses__' of 'type' object needs an argument + +Closes #141 +Fixes: 1f33c4f8a805 ("Introduce some static type analysis to the codebase") + +[1] https://github.com/python/typing/issues/266 ([`627c22e`](https://github.com/python-zeroconf/python-zeroconf/commit/627c22e19166c123244567410adc390ed368eca7)) + + +## v0.21.1 (2018-09-17) + +### Unknown + +* Prepare release 0.21.1 ([`1684a46`](https://github.com/python-zeroconf/python-zeroconf/commit/1684a46d57a437fc8cc7b5887d51440424c6ded5)) + +* Bringing back compatibility with python 3.4 (#140) + +The latest release of zeroconf in PyPI (0.21.0) breaks compatibility with python 3.4 due to an unstated dependency on the typing package. ([`919191c`](https://github.com/python-zeroconf/python-zeroconf/commit/919191ca266d8d589ad33cc6dd2c197f75092634)) + + +## v0.21.0 (2018-09-16) + +### Unknown + +* Prepare release 0.21.0 ([`b03cee3`](https://github.com/python-zeroconf/python-zeroconf/commit/b03cee348973469e9ebfce6e9b0e0a367c146401)) + +* Allow underscores in incoming service names + +There are real world cases of services broadcasting names with +underscores in them so tough luck, let's accept those to be compatible. +Registering service names with underscores in them continues to be +disallowed. + +Closes https://github.com/jstasiak/python-zeroconf/issues/102 ([`ff4a262`](https://github.com/python-zeroconf/python-zeroconf/commit/ff4a262adc6926905c71e2952b3159b84a974d02)) + +* Don't mention unsupported Python versions ([`208ec1b`](https://github.com/python-zeroconf/python-zeroconf/commit/208ec1ba58a6ebf7160a760feffe62cf366137e5)) + +* using ifaddr instead of netifaces as ifaddr is a pure python lib ([`7c0500e`](https://github.com/python-zeroconf/python-zeroconf/commit/7c0500ee19869ce0e85e58a26b8fdb0868e0b142)) + +* Show that we actually support Python 3.7 + +We can't just add Python 3.7 like earlier versions because Travis +doesn't support it at the moment[1]. + +[1] https://github.com/travis-ci/travis-ci/issues/9815 ([`418b4b8`](https://github.com/python-zeroconf/python-zeroconf/commit/418b4b814e6483a20a5cac2178a2cd815d5b91c0)) + +* Introduce some static type analysis to the codebase + +The main purpose of this change is to make the code easier to read and +explore. Preventing some classes of bugs is a bonus. + +On top of just adding type hints and enabling mypy to verify them the +following changes were needed: +* casts in places where we know what we're doing but mypy can't know it +* RecordUpdateListener interfaces extracted out of ServiceBrowser and + ServiceInfo classes so that we have a common name we can use in places + where we only need an instance of one of those classes to call to call + update_record() on it. This way we can keep mypy happy +* assert isinstance(...) blocks to provide hints for mypy as to what + concrete types we're dealing with +* some local type mixing removed (previously we'd first assign a value + of one type to a variable and then overwrite with another type) +* explicit "return None" in case of function that returns optionally - + mypy enforces explicit return in this case ([`1f33c4f`](https://github.com/python-zeroconf/python-zeroconf/commit/1f33c4f8a8050cdfb051c0da7ebe80a9ff24cf25)) + +* Fix a logging call + +The format string expects three parameters, one of them was accidentally +passed to the log_warning_once() method instead. + +Fixes: aa1f48433cbd ("Improve test coverage, and fix issues found") ([`23fdcce`](https://github.com/python-zeroconf/python-zeroconf/commit/23fdcce35fa020d09267e6fa57cf21cfb744a2c4)) + +* Fix UTF-8 multibyte name compression ([`e11700f`](https://github.com/python-zeroconf/python-zeroconf/commit/e11700ff9ea9eb429c701dfb73c4cf2c45994015)) + +* Remove some legacy cruft + +The latest versions of flake8 and flake8-import-order can be used just +fine now (they've been ok for some time). + +Since with google style flake8-import-order would generate more issues +than with the cryptography style I decided to switch and fix one thing +it complained about. + +We switch to pycodestyle instead of pinned pep8 version as that pep8 +version can't be installed with latest flake8 and the name of the +package has been changed to pycodestyle. We still pin the version though +as there's a bad interaction between the latest pycodestyle and the +latest flake8. ([`6fe8132`](https://github.com/python-zeroconf/python-zeroconf/commit/6fe813212f46576cf305c17ee815536a83128fce)) + +* Fix UnboundLocalError for count after loop + +This code throws an `UnboundLocalError` as `count` doesn't exist in the `else` branch of the for loop. ([`42c8662`](https://github.com/python-zeroconf/python-zeroconf/commit/42c866298725a8e9667bf1230be845e856cb382a)) + +* examples: Add an example of resolving a known service by service name + +To use: +* `avahi-publish-service -s 'My Service Name' _test._tcp 0` +* `./examples/resolver.py` should print a `ServiceInfo` +* Kill the `avahi-publish-service` process +* `./examples/resolver.py` should print `None` + +Signed-off-by: Simon McVittie ([`703d971`](https://github.com/python-zeroconf/python-zeroconf/commit/703d97150de1c74b7c1a62b59c1ff7081dec8256)) + +* Handle Interface Quirck to make it work on WSL (Windows Service for Linux) ([`374f45b`](https://github.com/python-zeroconf/python-zeroconf/commit/374f45b783caf35b26f464130fbd1ff62591af2e)) + +* Make some variables PEP 8-compatible + +Previously pep8-naming would complain about those: + +test_zeroconf.py:221:10: N806 variable 'numQuestions' in function should be lowercase + (numQuestions, numAnswers, numAuthorities, ([`49fc106`](https://github.com/python-zeroconf/python-zeroconf/commit/49fc1067245b2d3a7bcc1e7611f36ba8d9a36598)) + +* Fix flake8 (#131) + +* flake8 and therefore Travis should be happy now + +* attempt to fix flake8 + +* happy flake8 ([`53bc65a`](https://github.com/python-zeroconf/python-zeroconf/commit/53bc65af14ed979a5234bfa03c1295a2b27f6e40)) + +* implementing unicast support (#124) + +* implementing unicast support + +* use multicast=False for outgoing dns requests in unicast mode ([`826c961`](https://github.com/python-zeroconf/python-zeroconf/commit/826c9619797e4cf1f2c39b95ed1c93faed7eee2a)) + +* Remove unwanted whitespace ([`d0d1cfb`](https://github.com/python-zeroconf/python-zeroconf/commit/d0d1cfbb31f0ea6bd08b0c8ffa97ba3d7604bccc)) + +* Fix TTL handling for published service, align default TTL with RFC6762 (#113) + +Honor TTL passed in service registration +Set default TTL to 120 s as recommended by RFC6762 ([`14e3ad5`](https://github.com/python-zeroconf/python-zeroconf/commit/14e3ad5f15f5a0f5235ad7dbb22924b4b5ae1c77)) + +* add import error for Python <= 3.3 (#123) ([`fe62ba3`](https://github.com/python-zeroconf/python-zeroconf/commit/fe62ba31a8ab05a948ed6036dc319b1a1fa14e66)) + + +## v0.20.0 (2018-02-21) + +### Unknown + +* Release version 0.20.0 ([`0622570`](https://github.com/python-zeroconf/python-zeroconf/commit/0622570645116b0c45ee03d38b7b308be2026bd4)) + +* Add some missing release information ([`5978bdb`](https://github.com/python-zeroconf/python-zeroconf/commit/5978bdbdab017d06ea496ea6d7c66c672751b255)) + +* Drop support for Python 2 and 3.3 + +This simplifies the code slightly, reduces the number of dependencies +and otherwise speeds up the CI process. If someone *really* needs to use +really old Python they have the option of using older versions of the +package. ([`f22f421`](https://github.com/python-zeroconf/python-zeroconf/commit/f22f421e4e6bf1ca7671b1eb540ba09fbf1e04b1)) + +* Add license and readme file to source tarball (#108) + +Closes #97 ([`6ad04a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6ad04a5d7f6d63c1f48b5948b6ade0e56cafe258)) + +* Allow the usage of newer netifaces in development + +We're being consistent with c5e1f65c19b2f63a09b6517f322d600911fa1e13 +here. ([`7123f8e`](https://github.com/python-zeroconf/python-zeroconf/commit/7123f8ed7dfd9277245748271d8870f18299b035)) + +* Correct broken __eq__ in child classes to DNSRecord ([`4d6dd73`](https://github.com/python-zeroconf/python-zeroconf/commit/4d6dd73a8313b81bbfef8b074d6fe4878bce4f74)) + +* Refresh ServiceBrowser entries already when 'stale' +Updated integration testcase to test for this. ([`37c5211`](https://github.com/python-zeroconf/python-zeroconf/commit/37c5211980548ab701bba725feeb5395ed1af0a7)) + +* Add new records first in cache entry instead of last (#110) + +* Add new records first in cache entry instead of last + +* Added DNSCache unit test ([`8101b55`](https://github.com/python-zeroconf/python-zeroconf/commit/8101b557199c4d3d001c75a717eafa4d5544142f)) + + +## v0.19.1 (2017-06-13) + +### Unknown + +* Use more recent PyPy3 on Travis CI + +The default PyPy3 is really old (implements Python 3.2) and some +packages won't cooperate with it anymore. ([`d0e4712`](https://github.com/python-zeroconf/python-zeroconf/commit/d0e4712eaa696ff13470b719cb6842260a3ada11)) + +* Release version 0.19.1 ([`1541191`](https://github.com/python-zeroconf/python-zeroconf/commit/1541191090a92ef23b8e3747933c95f7233aa2de)) + +* Allow newer netifaces releases + +The bug that was concerning us[1] is fixed now. + +[1] https://bitbucket.org/al45tair/netifaces/issues/39/netmask-is-always-255255255255 ([`c5e1f65`](https://github.com/python-zeroconf/python-zeroconf/commit/c5e1f65c19b2f63a09b6517f322d600911fa1e13)) + + +## v0.19.0 (2017-03-21) + +### Unknown + +* Release version 0.19.0 ([`ecadb8c`](https://github.com/python-zeroconf/python-zeroconf/commit/ecadb8c30cd8e75da5b6d3e0e93d024f013dbfa2)) + +* Fix a whitespace issue flake8 doesn't like ([`87aa4e5`](https://github.com/python-zeroconf/python-zeroconf/commit/87aa4e587221e982902233ed2c8990ed27a2290f)) + +* Remove outdated example ([`d8686b5`](https://github.com/python-zeroconf/python-zeroconf/commit/d8686b5642d66b2c9ecc6f40b92e1a1a28279f79)) + +* Remove outdated comment ([`5aa6e85`](https://github.com/python-zeroconf/python-zeroconf/commit/5aa6e8546438d76b3fba5e91f9e4d4e3a3901757)) + +* Work around netifaces Windows netmask bug ([`6231d6d`](https://github.com/python-zeroconf/python-zeroconf/commit/6231d6d48d89240d95de9644570baf1b07ab04b0)) + + +## v0.18.0 (2017-02-03) + +### Unknown + +* Release version 0.18.0 ([`48b1949`](https://github.com/python-zeroconf/python-zeroconf/commit/48b19498724825237d3002ee7681b6296c625b12)) + +* Add a missing changelog entry ([`5343510`](https://github.com/python-zeroconf/python-zeroconf/commit/53435104d5fb29847ac561f58e16cb48dd97b9f8)) + +* Handle select errors when closing Zeroconf + +Based on a pull request by someposer[1] (code adapted to work on +Python 3). + +Fixes two pychromecast issues[2][3]. + +[1] https://github.com/jstasiak/python-zeroconf/pull/88 +[2] https://github.com/balloob/pychromecast/issues/59 +[3] https://github.com/balloob/pychromecast/issues/120 ([`6e229f2`](https://github.com/python-zeroconf/python-zeroconf/commit/6e229f2714c8aff6555dfee2bdff34bda980a0c3)) + +* Explicitly support Python 3.6 ([`0a5ea31`](https://github.com/python-zeroconf/python-zeroconf/commit/0a5ea31543941033bcb4b2cb76fa7e125cb33550)) + +* Pin flake8 because flake8-import-order is pinned ([`9f0d8fe`](https://github.com/python-zeroconf/python-zeroconf/commit/9f0d8fe87dedece1365149911ce9587482fe1501)) + +* Drop Python 2.6 support, no excuse to use 2.6 these days ([`56ea542`](https://github.com/python-zeroconf/python-zeroconf/commit/56ea54245eeab9d544d96c38d136f9f47eedcda4)) + + +## v0.17.7 (2017-02-01) + +### Unknown + +* Prepare the 0.17.7 release ([`376e011`](https://github.com/python-zeroconf/python-zeroconf/commit/376e011ad60c051f27632c77e6d50b64cf1defec)) + +* Merge pull request #77 from stephenrauch/fix-instance-name-with-dot + +Allow dots in service instance name ([`9035c6a`](https://github.com/python-zeroconf/python-zeroconf/commit/9035c6a246b6856b5087b1bba9a9f3ce5873fcda)) + +* Allow dots in service instance name ([`e46af83`](https://github.com/python-zeroconf/python-zeroconf/commit/e46af83d35b4430d4577481b371d569797427858)) + +* Merge pull request #75 from stephenrauch/Fix-name-change + +Fix for #29 ([`136dce9`](https://github.com/python-zeroconf/python-zeroconf/commit/136dce985fd66c81159d48b5f40e44349d1070ef)) + +* Fix/Implement duplicate name change (Issue 29) ([`788a48f`](https://github.com/python-zeroconf/python-zeroconf/commit/788a48f78466e048bdfc3028618bc4eaf807ef5b)) + +* some docs, cleanup and a couple of small test cases ([`b629ffb`](https://github.com/python-zeroconf/python-zeroconf/commit/b629ffb9c860a30366fa83b71487b546d6edd15b)) + +* Merge pull request #73 from stephenrauch/simplify-and-fix-pr-70 + +Simplify and fix PR 70 ([`6b67c0d`](https://github.com/python-zeroconf/python-zeroconf/commit/6b67c0d562866e63b81d1ec1c7f540c56244ade1)) + +* Simplify and fix PR 70 ([`2006cdd`](https://github.com/python-zeroconf/python-zeroconf/commit/2006cddf99377f43b528fbafea7d98be9d6282f0)) + +* Merge pull request #72 from stephenrauch/Catch-and-log-sendto-exceptions + +Catch and log sendto() exceptions ([`c3f563f`](https://github.com/python-zeroconf/python-zeroconf/commit/c3f563f6d108d46732a380b7912f8f5c23d5e548)) + +* Catch and log sendto() exceptions ([`0924310`](https://github.com/python-zeroconf/python-zeroconf/commit/0924310415b79f0fa2523494d8a60803ec295e09)) + +* Merge pull request #71 from stephenrauch/improved-test-coverage + +Improve test coverage, and fix issues found ([`254c207`](https://github.com/python-zeroconf/python-zeroconf/commit/254c2077f727d5e130aab2aaec111d58c134bd79)) + +* Improve test coverage, and fix issues found ([`aa1f484`](https://github.com/python-zeroconf/python-zeroconf/commit/aa1f48433cbd4dbf52565ec0c2635e5d52a37086)) + +* Merge pull request #70 from stephenrauch/Limit-size-of-packet + +Limit the size of the packet that can be built ([`208e221`](https://github.com/python-zeroconf/python-zeroconf/commit/208e2219a1268e637e3cf02e1838cb94a6de2f31)) + +* Limit the size of the packet that can be built ([`8355c85`](https://github.com/python-zeroconf/python-zeroconf/commit/8355c8556929fcdb777705c97fc99de6012367b4)) + +* Merge pull request #69 from stephenrauch/name-compression + +Help for oversized packets ([`5d9f40d`](https://github.com/python-zeroconf/python-zeroconf/commit/5d9f40de1a8549633cb5592fafc34d34df172965)) + +* Implement Name Compression ([`59877eb`](https://github.com/python-zeroconf/python-zeroconf/commit/59877ebb1b20ccd2747a0601e30329162ddcba4c)) + +* Drop oversized packets in send() ([`035605a`](https://github.com/python-zeroconf/python-zeroconf/commit/035605ab000fc8a8af94b4b9e1be9b81880b6bca)) + +* Add exception handler for oversized packets ([`af19c12`](https://github.com/python-zeroconf/python-zeroconf/commit/af19c12ec2286ee49e789a11599551dc43391383)) + +* Add QuietLogger mixin ([`0b77872`](https://github.com/python-zeroconf/python-zeroconf/commit/0b77872f7bb06ba6949c69bbfb70e8ae21f8ff9b)) + +* Improve service name validation error messages ([`fad66ca`](https://github.com/python-zeroconf/python-zeroconf/commit/fad66ca696530d39d8d5ae598e1724077eba8a5e)) + +* Merge pull request #68 from stephenrauch/Handle-dnsincoming-exceptions + +Handle DNSIncoming exceptions ([`6c0a32d`](https://github.com/python-zeroconf/python-zeroconf/commit/6c0a32d6e4bd7be0b7573b95a5325b19dfd509d2)) + +* Make all test cases localhost only ([`080d0c0`](https://github.com/python-zeroconf/python-zeroconf/commit/080d0c09f1e58d4f8c430dac513948e5919e3f3b)) + +* Handle DNS Incoming Exception + +This fixes a regression from removal of some overly broad exception +handling in 0.17.6. This change adds an explicit handler for +DNSIncoming(). Will also log at warn level the first time it sees a +particular parsing exception. ([`061a2aa`](https://github.com/python-zeroconf/python-zeroconf/commit/061a2aa3c6e8a7c954a313c8a7d396f26f544c2b)) + + +## v0.17.6 (2016-07-08) + +### Testing + +* test: added test for DNS-SD subtype discovery ([`914241b`](https://github.com/python-zeroconf/python-zeroconf/commit/914241b92c3097669e1e8c1a380f6c2f23a14cf8)) + +### Unknown + +* Fix readme to valid reStructuredText, ([`94570b7`](https://github.com/python-zeroconf/python-zeroconf/commit/94570b730aaab606db820b9c4d48b1c313fdaa98)) + +* Prepare release 0.17.6 ([`e168a6f`](https://github.com/python-zeroconf/python-zeroconf/commit/e168a6fa5486d92114fb02d4c40b36f8298a022f)) + +* Merge pull request #61 from stephenrauch/add-python3.5 + +Add python 3.5 to Travis ([`617d9fd`](https://github.com/python-zeroconf/python-zeroconf/commit/617d9fd0db5bef350eaebd13cfcc73803900ad24)) + +* Add python 3.5 to Travis ([`6198e89`](https://github.com/python-zeroconf/python-zeroconf/commit/6198e8909b968430ddac9261f4dd9c508d96db65)) + +* Merge pull request #60 from stephenrauch/delay_ServiceBrowser_connect + +Delay connecting ServiceBrowser() until it is running ([`56d9ac1`](https://github.com/python-zeroconf/python-zeroconf/commit/56d9ac13381a3ae205cb2b9339981a50f0a2eb62)) + +* Delay connecting ServiceBrowser() until it is running ([`6d1370c`](https://github.com/python-zeroconf/python-zeroconf/commit/6d1370cc2aa6d2c125aa924342e224b6b92ef8d9)) + +* Merge pull request #57 from herczy/master + +resolve issue #56: service browser initialization race ([`0225a18`](https://github.com/python-zeroconf/python-zeroconf/commit/0225a18957a26855720d7ab002f3983cb9d76e0e)) + +* resolve issue #56: service browser initialization race ([`1567016`](https://github.com/python-zeroconf/python-zeroconf/commit/15670161c597bc035c0e9411d0bb830b9520589f)) + +* Merge pull request #58 from strahlex/subtype-test + +added test for DNS-SD subtype discovery ([`4a569fe`](https://github.com/python-zeroconf/python-zeroconf/commit/4a569fe389d2fb5fd4b4f294ae9ebc0e38164e4a)) + +* Merge pull request #53 from stephenrauch/validate_service_names + +Validate service names ([`76a5e99`](https://github.com/python-zeroconf/python-zeroconf/commit/76a5e99f2e772a9462d0f4b3ab4c80f1b0a3b542)) + +* Service Name Validation + +This change validates service, instance and subtype names against +rfc6763. + +Also adds test code for subtypes and provides a fix for issue 37. ([`88fa059`](https://github.com/python-zeroconf/python-zeroconf/commit/88fa0595cd880b6d82ac8580512461e64eb32d6b)) + +* Test Case and fixes for DNSHInfo (#49) + +* Fix ability for a cache lookup to match properly + +When querying for a service type, the response is processed. During the +processing, an info lookup is performed. If the info is not found in +the cache, then a query is sent. Trouble is that the info requested is +present in the same packet that triggered the lookup, and a query is not +necessary. But two problems caused the cache lookup to fail. + +1) The info was not yet in the cache. The call back was fired before +all answers in the packet were cached. + +2) The test for a cache hit did not work, because the cache hit test +uses a DNSEntry as the comparison object. But some of the objects in +the cache are descendents of DNSEntry and have their own __eq__() +defined which accesses fields only present on the descendent. Thus the +test can NEVER work since the descendent's __eq__() will be used. + +Also continuing the theme of some other recent pull requests, add three +_GLOBAL_DONE tests to avoid doing work after the attempted stop, and +thus avoid generating (harmless, but annoying) exceptions during +shutdown + +* Remove unnecessary packet send in ServiceInfo.request() + +When performing an info query via request(), a listener is started, and +a packet is formed. As the packet is formed, known answers are taken +from the cache and placed into the packet. Then the packet is sent. +The packet is self received (via multicast loopback, I assume). At that +point the listener is fired and the answers in the packet are propagated +back to the object that started the request. This is a really long way +around the barn. + +The PR queries the cache directly in request() and then calls +update_record(). If all of the information is in the cache, then no +packet is formed or sent or received. This approach was taken because, +for whatever reason, the reception of the packets on windows via the +loopback was proving to be unreliable. The method has the side benefit +of being a whole lot faster. + +This PR also incorporates the joins() from PR #30. In addition it moves +the two joins() in close() to their own thread because they can take +quite a while to execute. + +* Fix locking race condition in Engine.run() + +This fixes a race condition in which the receive engine was waiting +against its condition variable under a different lock than the one it +used to determine if it needed to wait. This was causing the code to +sometimes take 5 seconds to do anything useful. + +When fixing the race condition, decided to also fix the other +correctness issues in the loop which was likely causing the errors that +led to the inclusion of the 'except Exception' catch all. This in turn +allowed the use of EBADF error due to closing the socket during exit to +be used to get out of the select in a timely manner. + +Finally, this allowed reorganizing the shutdown code to shutdown from +the front to the back. That is to say, shutdown the recv socket first, +which then allows a clean join with the engine thread. After the engine +thread exits most everything else is inert as all callbacks have been +unwound. + +* Remove a now invalid test case + +With the restructure of shutdown, Listener() now needs to throw EBADF on +a closed socket to allow a timely and graceful shutdown. + +* Shutdown the service listeners in an organized fashion + +Also adds names to the various threads to make debugging easier. + +* Improve test coverage + +Add more needed shutdown cleanup found via additional test coverage. + +Force timeout calculation from milli to seconds to use floating point. + +* init ServiceInfo._properties + +* Add query support and test case for _services._dns-sd._udp.local. + +* pep8 cleanup + +* Add testcase and fixes for HInfo Record Generation + +The DNSHInfo packet generation code was broken. There was no test case for that +functionality, and adding a test case showed four issues. Two of which were +relative to PY3 string, one of which was a typoed reference to an attribute, +and finally the two fields present in the HInfo record were using the wrong +encoding, which is what necessitated the change from write_string() to +write_character_string(). ([`6b39c70`](https://github.com/python-zeroconf/python-zeroconf/commit/6b39c70fa1ed7cfac89e02e2b3764a9038b87267)) + +* Merge pull request #48 from stephenrauch/Find-Service-Types + +Find service types ([`1dfc40f`](https://github.com/python-zeroconf/python-zeroconf/commit/1dfc40f4da145a55d60a952df90301ee0e5d65c4)) + +* Add query support and test case for _services._dns-sd._udp.local. ([`cfbb157`](https://github.com/python-zeroconf/python-zeroconf/commit/cfbb1572e44c4d8af1b50cb62abc0d426fc8e3ea)) + +* Merge pull request #45 from stephenrauch/master + +Multiple fixes to speed up querys and remove exceptions at shutdown ([`183cd81`](https://github.com/python-zeroconf/python-zeroconf/commit/183cd81d9274bf28c642314df2f9e32f1f60020b)) + +* init ServiceInfo._properties ([`d909942`](https://github.com/python-zeroconf/python-zeroconf/commit/d909942e2c9479819e9113ffb3a354b1d99d6814)) + +* Improve test coverage + +Add more needed shutdown cleanup found via additional test coverage. + +Force timeout calculation from milli to seconds to use floating point. ([`75232cc`](https://github.com/python-zeroconf/python-zeroconf/commit/75232ccf28a820ee723db072951078eba31145a5)) + +* Shutdown the service listeners in an organized fashion + +Also adds names to the various threads to make debugging easier. ([`ad3c248`](https://github.com/python-zeroconf/python-zeroconf/commit/ad3c248e4b67d5d2e9a4448a56b4e4648284ecd4)) + +* Remove a now invalid test case + +With the restructure of shutdown, Listener() now needs to throw EBADF on +a closed socket to allow a timely and graceful shutdown. ([`7bbee59`](https://github.com/python-zeroconf/python-zeroconf/commit/7bbee590e553a1ff0e4dde3b1fdcf614b7e1ecd5)) + +* Fix locking race condition in Engine.run() + +This fixes a race condition in which the receive engine was waiting +against its condition variable under a different lock than the one it +used to determine if it needed to wait. This was causing the code to +sometimes take 5 seconds to do anything useful. + +When fixing the race condition, decided to also fix the other +correctness issues in the loop which was likely causing the errors that +led to the inclusion of the 'except Exception' catch all. This in turn +allowed the use of EBADF error due to closing the socket during exit to +be used to get out of the select in a timely manner. + +Finally, this allowed reorganizing the shutdown code to shutdown from +the front to the back. That is to say, shutdown the recv socket first, +which then allows a clean join with the engine thread. After the engine +thread exits most everything else is inert as all callbacks have been +unwound. ([`8a110f5`](https://github.com/python-zeroconf/python-zeroconf/commit/8a110f58b02825100f5bdb56c119495ae42ae54c)) + +* Remove unnecessary packet send in ServiceInfo.request() + +When performing an info query via request(), a listener is started, and +a packet is formed. As the packet is formed, known answers are taken +from the cache and placed into the packet. Then the packet is sent. +The packet is self received (via multicast loopback, I assume). At that +point the listener is fired and the answers in the packet are propagated +back to the object that started the request. This is a really long way +around the barn. + +The PR queries the cache directly in request() and then calls +update_record(). If all of the information is in the cache, then no +packet is formed or sent or received. This approach was taken because, +for whatever reason, the reception of the packets on windows via the +loopback was proving to be unreliable. The method has the side benefit +of being a whole lot faster. + +This PR also incorporates the joins() from PR #30. In addition it moves +the two joins() in close() to their own thread because they can take +quite a while to execute. ([`c49145c`](https://github.com/python-zeroconf/python-zeroconf/commit/c49145c35de09b2631d8a2b4751d787a6b4dc904)) + +* Fix ability for a cache lookup to match properly + +When querying for a service type, the response is processed. During the +processing, an info lookup is performed. If the info is not found in +the cache, then a query is sent. Trouble is that the info requested is +present in the same packet that triggered the lookup, and a query is not +necessary. But two problems caused the cache lookup to fail. + +1) The info was not yet in the cache. The call back was fired before +all answers in the packet were cached. + +2) The test for a cache hit did not work, because the cache hit test +uses a DNSEntry as the comparison object. But some of the objects in +the cache are descendents of DNSEntry and have their own __eq__() +defined which accesses fields only present on the descendent. Thus the +test can NEVER work since the descendent's __eq__() will be used. + +Also continuing the theme of some other recent pull requests, add three +_GLOBAL_DONE tests to avoid doing work after the attempted stop, and +thus avoid generating (harmless, but annoying) exceptions during +shutdown ([`d8562fd`](https://github.com/python-zeroconf/python-zeroconf/commit/d8562fd3546d6cd27b1ba9e95105ea534649a43e)) + + +## v0.17.5 (2016-03-14) + +### Unknown + +* Prepare release 0.17.5 ([`f33b8f9`](https://github.com/python-zeroconf/python-zeroconf/commit/f33b8f9c182245b14b9b73a86aefedcee4520eb5)) + +* resolve issue #38: size change during iteration ([`fd9d531`](https://github.com/python-zeroconf/python-zeroconf/commit/fd9d531f294e7fa5b9b934f192b061f56eaf1d37)) + +* Installation on system with ASCII encoding + +The default open function in python2 made a best effort to open text files of any encoding. +After 3.0 the encoding has to be set correctly and it defaults to the user preferences. ([`6007537`](https://github.com/python-zeroconf/python-zeroconf/commit/60075379d57664f94fa41a96dea7c7c64489ef3d)) + +* Revert "Switch from netifaces to psutil" + +psutil doesn't seem to work on pypy3: + + Traceback (most recent call last): + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/failure.py", line 39, in runTest + raise self.exc_val.with_traceback(self.tb) + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/loader.py", line 414, in loadTestsFromName + addr.filename, addr.module) + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/importer.py", line 47, in importFromPath + return self.importFromDir(dir_path, fqname) + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/importer.py", line 94, in importFromDir + mod = load_module(part_fqname, fh, filename, desc) + File "/home/travis/build/jstasiak/python-zeroconf/test_zeroconf.py", line 17, in + import zeroconf as r + File "/home/travis/build/jstasiak/python-zeroconf/zeroconf.py", line 35, in + import psutil + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/__init__.py", line 62, in + from . import _pslinux as _psplatform + File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_pslinux.py", line 23, in + from . import _psutil_linux as cext + ImportError: unable to load extension module + '/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_psutil_linux.pypy3-24.so': + /home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_psutil_linux.pypy3-24.so: undefined symbol: PyModule_GetState + +Additionally netifaces turns out to be possible to install on Python 3, +therefore making it necessary to investigate the original issue. + +This reverts commit dd907f2eed3768a3c1e3889af84b5dbeb700a1e7. ([`6349d19`](https://github.com/python-zeroconf/python-zeroconf/commit/6349d197b442209331a0ff8676541967f7142991)) + +* fix issue #23 race-condition on ServiceBrowser startup ([`30bd44f`](https://github.com/python-zeroconf/python-zeroconf/commit/30bd44f04f94a9b26622a7213dd9950ae57df21c)) + +* Switch from netifaces to psutil + +netifaces installation on Python 3.x is broken and there doesn't seem to +be any plan to release a working version on PyPI, instead of using its +fork I decided to use another package providing the required +information. + +This closes https://github.com/jstasiak/python-zeroconf/issues/31 + +[1] https://bitbucket.org/al45tair/netifaces/issues/13/0104-install-is-broken-on-python-3x ([`dd907f2`](https://github.com/python-zeroconf/python-zeroconf/commit/dd907f2eed3768a3c1e3889af84b5dbeb700a1e7)) + +* Fix multicast TTL and LOOP options on OpenBSD + +IP_MULTICAST_TTL and IP_MULTICAST_LOOP socket options on OpenBSD don't +accept int, only unsigned char. Otherwise you will get an error: +[Errno 22] Invalid argument. ([`0f46a06`](https://github.com/python-zeroconf/python-zeroconf/commit/0f46a0609931e6dc299c0473312e434e84abe7b0)) + + +## v0.17.4 (2015-09-22) + +### Unknown + +* Prepare release 0.17.4 ([`0b9093d`](https://github.com/python-zeroconf/python-zeroconf/commit/0b9093de863928d7f13092aaf2be1f0a33f4ead2)) + +* Support kernel versions <3.9 + +added catch of OSError +added catch of socket.error for python2 ([`023426e`](https://github.com/python-zeroconf/python-zeroconf/commit/023426e0f8982640f46bca3dfcd3abeee2cb832f)) + +* Make it explicit who says what in the readme ([`ddb1048`](https://github.com/python-zeroconf/python-zeroconf/commit/ddb10485ef17aec3f37ef70dcb37af167271bfe1)) + + +## v0.17.3 (2015-08-19) + +### Unknown + +* Make the package's status explicit ([`f29c0f4`](https://github.com/python-zeroconf/python-zeroconf/commit/f29c0f475be76f70ecbb1586deb4618180dd1969)) + +* Prepare release 0.17.3 ([`9c3a81a`](https://github.com/python-zeroconf/python-zeroconf/commit/9c3a81af84c3450459795e5fc5142300f9680804)) + +* Add a DNSText __repr__ test -# 0.39.5 +The test helps making sure the situation fixed by +e8299c0527c965f83c1326b18e484652a9eb829c doesn't happen again. ([`c7567d6`](https://github.com/python-zeroconf/python-zeroconf/commit/c7567d6b065d7460e2022b8cde5dd0b52a3828a7)) - - This is a stub version to initialize python-semantic-release +* Fix DNSText repr Python 3 issue - This version will not be published +Prevents following exception: +``` + File "/Users/paulus/dev/python/netdisco/lib/python3.4/site-packages/zeroconf.py", line 412, in __repr__ + return self.to_string(self.text[:7] + "...") +TypeError: can't concat bytes to str +``` ([`e8299c0`](https://github.com/python-zeroconf/python-zeroconf/commit/e8299c0527c965f83c1326b18e484652a9eb829c)) -# 0.39.4 - - Fix IP changes being missed by ServiceInfo (\#1102) @bdraco +## v0.17.2 (2015-07-12) -# 0.39.3 +### Unknown - - Fix port changes not being seen by ServiceInfo (\#1100) @bdraco +* Release version 0.17.2 ([`d1ee5ce`](https://github.com/python-zeroconf/python-zeroconf/commit/d1ee5ce7558060ea8d92f804172f67f960f814bb)) -# 0.39.2 +* Fix a typo, meant strictly lesser than 0.6 :< ([`dadbbfc`](https://github.com/python-zeroconf/python-zeroconf/commit/dadbbfc9e1787561981807d3e008433a107c1e5e)) - - Performance improvements for parsing incoming packet data (\#1095) - (\#1097) @bdraco +* Restrict flake8-import-order version -# 0.39.1 +There seems to be a bug in 0.6.x, see +https://github.com/public/flake8-import-order/issues/42 ([`4435a2a`](https://github.com/python-zeroconf/python-zeroconf/commit/4435a2a4ae1c0b0877785f1a5047f65bb80a14bd)) - - Performance improvements for constructing outgoing packet data - (\#1090) @bdraco +* Use enum-compat instead of enum34 directly -# 0.39.0 +This is in order for the package's installation to work on Python 3.4+, +solves the same issue as +https://github.com/jstasiak/python-zeroconf/pull/22. ([`ba89455`](https://github.com/python-zeroconf/python-zeroconf/commit/ba894559f43fa6955989b92533c06fd8e8b92c74)) -Technically backwards incompatible: - - Switch to using async\_timeout for timeouts (\#1081) @bdraco - - Significantly reduces the number of asyncio tasks that are created - when using ServiceInfo or - AsyncServiceInfo +## v0.17.1 (2015-04-10) -# 0.38.7 +### Unknown - - Performance improvements for parsing incoming packet data (\#1076) - @bdraco +* Restrict pep8 version as something depends on it ([`4dbd04b`](https://github.com/python-zeroconf/python-zeroconf/commit/4dbd04b807813384108ff8e4cb5291c2560eed6b)) -# 0.38.6 +* Bump version to 0.17.1 ([`0b8936b`](https://github.com/python-zeroconf/python-zeroconf/commit/0b8936b94011c0783c7d0469b9ebae76cd4d1976)) - - Performance improvements for fetching ServiceInfo (\#1068) @bdraco +* Fix some typos in the readme ([`7c64ebf`](https://github.com/python-zeroconf/python-zeroconf/commit/7c64ebf6129fb6c0c533a1fed618c9d5926d5100)) -# 0.38.5 +* Update README.rst ([`44fa62a`](https://github.com/python-zeroconf/python-zeroconf/commit/44fa62a738335781ecdd789ad636f82e6542ecd2)) - - Fix ServiceBrowsers not getting ServiceStateChange.Removed callbacks - on PTR record expire (\#1064) @bdraco - - ServiceBrowsers were only getting a - ServiceStateChange.Removed callback - when the record was sent with a TTL of 0. ServiceBrowsers now - correctly get a - ServiceStateChange.Removed callback - when the record expires as well. +* Update README.rst ([`a22484a`](https://github.com/python-zeroconf/python-zeroconf/commit/a22484af90c7c4cbdee849d2b75efab2772c3592)) - - Fix missing minimum version of python 3.7 (\#1060) @stevencrader +* Getting an EADDRNOTAVAIL error when adding an address to the multicast group on windows. ([`93d34f9`](https://github.com/python-zeroconf/python-zeroconf/commit/93d34f925cd8913ff6836f9393cdce15679e4794)) -# 0.38.4 - - Fix IP Address updates when hostname is uppercase (\#1057) @bdraco - - ServiceBrowsers would not callback updates when the ip address - changed if the hostname contained uppercase characters +## v0.17.0 (2015-04-10) -# 0.38.3 +### Unknown -Version bump only, no changes from 0.38.2 +* Do 0.17.0 release ([`a6d75b3`](https://github.com/python-zeroconf/python-zeroconf/commit/a6d75b3d63a0c13c63473910b832e6db12635e79)) -# 0.38.2 +* Advertise pypy3 support ([`4783611`](https://github.com/python-zeroconf/python-zeroconf/commit/4783611de72ac11bdbfea9e4324e58746a91e70a)) - - Make decode errors more helpful in finding the source of the bad - data (\#1052) @bdraco +* Handle recent flake8 change ([`0009b5e`](https://github.com/python-zeroconf/python-zeroconf/commit/0009b5ea2bca77f395eb2bacc69d0dcfa5dd37dc)) -# 0.38.1 +* Describe recent changes ([`5c32a27`](https://github.com/python-zeroconf/python-zeroconf/commit/5c32a27a6ae0cccf7af25961cd98560a5173b065)) - - Improve performance of query scheduler (\#1043) @bdraco - - Avoid linear type searches in ServiceBrowsers (\#1044) @bdraco +* Add pypy3 build ([`a298785`](https://github.com/python-zeroconf/python-zeroconf/commit/a298785cf63d26b184495f972c619d31515a1468)) -# 0.38.0 +* Restore old listener interface (and example) for now ([`c748294`](https://github.com/python-zeroconf/python-zeroconf/commit/c748294fdc6f3bf527f62d4c0cb76ace32890128)) - - Handle Service types that end with another service type (\#1041) - @apworks1 +* Fix test breakage ([`b5fb3e8`](https://github.com/python-zeroconf/python-zeroconf/commit/b5fb3e86a688f6161c1292ccdffeec9f455c1fbd)) -Backwards incompatible: +* Prepare for new release ([`275a22b`](https://github.com/python-zeroconf/python-zeroconf/commit/275a22b997331d499526293b98faff11ca6edea5)) - - Dropped Python 3.6 support (\#1009) @bdraco +* Move self test example out of main module ([`ac5a63e`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5a63ece96fbf9d64e41e7a4867cc1d8b2f6b96)) -# 0.37.0 +* Fix using binary strings as property values -Technically backwards incompatible: +Previously it'd fall trough and set the value to False ([`b443027`](https://github.com/python-zeroconf/python-zeroconf/commit/b4430274ba8355ceaadc2d89a84752f1ac1485e7)) - - Adding a listener that does not inherit from RecordUpdateListener - now logs an error (\#1034) @bdraco +* Reformat a bit ([`2190818`](https://github.com/python-zeroconf/python-zeroconf/commit/219081860d28e49b1ae71a78e1a0da459689ab9c)) - - The NotRunningException exception is now thrown when Zeroconf is not - running (\#1033) @bdraco - - Before this change the consumer would get a timeout or an - EventLoopBlocked exception when calling - ServiceInfo.\*request when the - instance had already been shutdown or had failed to startup. +* Make examples' output quiet by default ([`08e0dc2`](https://github.com/python-zeroconf/python-zeroconf/commit/08e0dc2c7c1551ffa9a1e7297112b0f46b7ccc4e)) - - The EventLoopBlocked exception is now thrown when a coroutine times - out (\#1032) @bdraco - - Previously - concurrent.futures.TimeoutError would - have been raised instead. This is never expected to happen during - normal operation. +* Change ServiceBrowser interface experimentally ([`d162e54`](https://github.com/python-zeroconf/python-zeroconf/commit/d162e54c6aad175505028aa7beb8a1a0cb7a231d)) -# 0.36.13 +* Handle exceptions better ([`7cad7a4`](https://github.com/python-zeroconf/python-zeroconf/commit/7cad7a43179e3f547796b125e3ed8169ef3f4157)) - - Unavailable interfaces are now skipped during socket bind (\#1028) - @bdraco +* Add some debug logging ([`451c072`](https://github.com/python-zeroconf/python-zeroconf/commit/451c0729e2490ac6283010ddcbbcc723d86e6765)) - - Downgraded incoming corrupt packet logging to debug (\#1029) @bdraco - - Warning about network traffic we have no control over is confusing - to users as they think there is something wrong with zeroconf +* Make the code nicer -# 0.36.12 +This includes: - - Prevented service lookups from deadlocking if time abruptly moves - backwards (\#1006) @bdraco - - The typical reason time moves backwards is via an ntp update +* rearranging code to make it more readable +* catching KeyError instead of all exceptions and making it obvious what + can possibly raise there +* renaming things ([`df88670`](https://github.com/python-zeroconf/python-zeroconf/commit/df88670963e8c3a1f11a6af026b484ff4343d271)) -# 0.36.11 +* Remove redundant parentheses ([`3775c47`](https://github.com/python-zeroconf/python-zeroconf/commit/3775c47d8cf3c941603fa393265b86d05f61b915)) -No functional changes from 0.36.10. This release corrects an error in -the README.rst file that prevented the build from uploading to PyPI +* Make examples nicer and make them show all logs ([`193ee64`](https://github.com/python-zeroconf/python-zeroconf/commit/193ee64d6212ff9a814b76b13f9ef46676025dc3)) -# 0.36.10 +* Remove duplicates from all interfaces list - - scope\_id is now stripped from IPv6 addresses if given (\#1020) - @StevenLooman - - cpython 3.9 allows a suffix %scope\_id in IPv6Address. This caused - an error with the existing code if it was not stripped +It has been mentioned in GH #12 that the list of all machine's network +interfaces can contain duplicates; it shouldn't break anything but +there's no need to open multiple sockets in such case. ([`af5e363`](https://github.com/python-zeroconf/python-zeroconf/commit/af5e363e7fcb392081dc98915defd93c5002c3fc)) - - Optimized decoding labels from incoming packets (\#1019) @bdraco +* Don't fail when the netmask is unknown ([`463428f`](https://github.com/python-zeroconf/python-zeroconf/commit/463428ff8550a4f0e12b60e6f6a35efedca31271)) -# 0.36.9 +* Skip host only network interfaces - - Ensure ServiceInfo orders newest addresses first (\#1012) @bdraco - - This change effectively restored the behavior before 1s cache flush - expire behavior described in rfc6762 section 10.2 was added for - callers that rely on this. +On Ubuntu Linux treating such interface (network mask 255.255.255.255) +would result in: -# 0.36.8 +* EADDRINUSE "Address already in use" when trying to add multicast group + membership using IP_ADD_MEMBERSHIP +* success when setting the interface as outgoing multicast interface + using IP_MULTICAST_IF +* EINVAL "Invalid argument" when trying to send multicast datagram using + socket with that interface set as the multicast outgoing interface ([`b5e9e94`](https://github.com/python-zeroconf/python-zeroconf/commit/b5e9e944e6f3c990862b3b03831bb988579ed340)) - - Fixed ServiceBrowser infinite loop when zeroconf is closed before it - is canceled (\#1008) @bdraco +* Configure logging during the tests ([`0208228`](https://github.com/python-zeroconf/python-zeroconf/commit/0208228d8c760f3672954f5434c2ea54d7fd4196)) -# 0.36.7 +* Use all network interfaces by default ([`193cf47`](https://github.com/python-zeroconf/python-zeroconf/commit/193cf47a1144afc9158f0075a886c1f754d96f18)) - - Improved performance of responding to queries (\#994) (\#996) - (\#997) @bdraco - - Improved log message when receiving an invalid or corrupt packet - (\#998) @bdraco +* Ignore EADDRINUSE when appropriate -# 0.36.6 +On some systems it's necessary to do so ([`0f7c64f`](https://github.com/python-zeroconf/python-zeroconf/commit/0f7c64f8cdacae34c227edd5da4f445ece12da89)) - - Improved performance of sending outgoing packets (\#990) @bdraco +* Export Error and InterfaceChoice ([`500a76b`](https://github.com/python-zeroconf/python-zeroconf/commit/500a76bb1332fe34b45e681c767baddfbece4916)) -# 0.36.5 +* Fix ServiceInfo repr and text on Python 3 - - Reduced memory usage for incoming and outgoing packets (\#987) - @bdraco +Closes #1 ([`f3fd4cd`](https://github.com/python-zeroconf/python-zeroconf/commit/f3fd4cd69e9707221d8bd5ee6b3bb86b0985f604)) -# 0.36.4 +* Add preliminary support for mulitple net interfaces ([`442a599`](https://github.com/python-zeroconf/python-zeroconf/commit/442a59967f7b0f2d5c2ef512874ad2ab13dedae4)) - - Improved performance of constructing outgoing packets (\#978) - (\#979) @bdraco - - Deferred parsing of incoming packets when it can be avoided (\#983) - @bdraco +* Rationalize error handling when sending data ([`a0ee3d6`](https://github.com/python-zeroconf/python-zeroconf/commit/a0ee3d62db7b5350a21091e37824e187ebf99348)) -# 0.36.3 +* Make Zeroconf.socket private ([`78449ef`](https://github.com/python-zeroconf/python-zeroconf/commit/78449ef1e07dc68b63bb68038cb66f22e083fdfe)) - - Improved performance of parsing incoming packets (\#975) @bdraco +* Refactor Condition usage to use context manager interface ([`8d32fa4`](https://github.com/python-zeroconf/python-zeroconf/commit/8d32fa4b12e1b52d72a7ba9588437c4c787e0ffd)) -# 0.36.2 +* Use six for Python 2/3 compatibility ([`f0c3979`](https://github.com/python-zeroconf/python-zeroconf/commit/f0c39797869175cf88d76c75d39835abb2052f88)) - - Include NSEC records for non-existent types when responding with - addresses (\#972) (\#971) @bdraco Implements RFC6762 sec 6.2 - () +* Use six for Python 2/3 compatibility ([`54ed4b7`](https://github.com/python-zeroconf/python-zeroconf/commit/54ed4b79bb8de9523b5a5b74a79b01c8aa2291a7)) -# 0.36.1 - - - Skip goodbye packets for addresses when there is another service - registered with the same name (\#968) @bdraco - - If a ServiceInfo that used the same server name as another - ServiceInfo was unregistered, goodbye packets would be sent for the - addresses and would cause the other service to be seen as offline. - - - Fixed equality and hash for dns records with the unique bit (\#969) - @bdraco - - These records should have the same hash and equality since the - unique bit (cache flush bit) is not considered when adding or - removing the records from the cache. - -# 0.36.0 - -Technically backwards incompatible: - - - Fill incomplete IPv6 tuples to avoid WinError on windows (\#965) - @lokesh2019 - - Fixed \#932 - -# 0.35.1 +* Refactor version detection in the setup script - - Only reschedule types if the send next time changes (\#958) @bdraco - - When the PTR response was seen again, the timer was being canceled - and rescheduled even if the timer was for the same time. While this - did not cause any breakage, it is quite inefficient. +This doesn't depend on zeroconf module being importable when setup is +ran ([`1c2205d`](https://github.com/python-zeroconf/python-zeroconf/commit/1c2205d5c9b364a825d51acd03add4de91cb645a)) - - Cache DNS record and question hashes (\#960) @bdraco - - The hash was being recalculated every time the object was being used - in a set or dict. Since the hashes are effectively immutable, we - only calculate them once now. +* Drop "zero dependencies" feature ([`d8c1ec8`](https://github.com/python-zeroconf/python-zeroconf/commit/d8c1ec8ee13191e8ec4412770994f0676ace442c)) -# 0.35.0 +* Stop dropping multicast group membership - - Reduced chance of accidental synchronization of ServiceInfo requests - (\#955) @bdraco - - Sort aggregated responses to increase chance of name compression - (\#954) @bdraco - -Technically backwards incompatible: - - - Send unicast replies on the same socket the query was received - (\#952) @bdraco - - When replying to a QU question, we do not know if the sending host - is reachable from all of the sending sockets. We now avoid this - problem by replying via the receiving socket. This was the existing - behavior when InterfaceChoice.Default - is set. - - This change extends the unicast relay behavior to used with - InterfaceChoice.Default to apply when - InterfaceChoice.All or interfaces are - explicitly passed when instantiating a - Zeroconf instance. - - Fixes \#951 - -# 0.34.3 - - - Fix sending immediate multicast responses (\#949) @bdraco - -# 0.34.2 - - - Coalesce aggregated multicast answers (\#945) @bdraco - - When the random delay is shorter than the last scheduled response, - answers are now added to the same outgoing time group. - - This reduces traffic when we already know we will be sending a group - of answers inside the random delay window described in - datatracker.ietf.org/doc/html/rfc6762\#section-6.3 - - - Ensure ServiceInfo requests can be answered inside the default - timeout with network protection (\#946) @bdraco - - Adjust the time windows to ensure responses that have triggered the - protection against against excessive packet flooding due to software - bugs or malicious attack described in RFC6762 section 6 can respond - in under 1350ms to ensure ServiceInfo can ask two questions within - the default timeout of 3000ms - -# 0.34.1 - - - Ensure multicast aggregation sends responses within 620ms (\#942) - @bdraco - - Responses that trigger the protection against against excessive - packet flooding due to software bugs or malicious attack described - in RFC6762 section 6 could cause the multicast aggregation response - to be delayed longer than 620ms (The maximum random delay of 120ms - and 500ms additional for aggregation). - - Only responses that trigger the protection are delayed longer than - 620ms - -# 0.34.0 - - - Implemented Multicast Response Aggregation (\#940) @bdraco - - Responses are now aggregated when possible per rules in RFC6762 - section 6.4 - - Responses that trigger the protection against against excessive - packet flooding due to software bugs or malicious attack described - in RFC6762 section 6 are delayed instead of discarding as it was - causing responders that implement Passive Observation Of Failures - (POOF) to evict the records. - - Probe responses are now always sent immediately as there were cases - where they would fail to be answered in time to defend a name. - -# 0.33.4 - - - Ensure zeroconf can be loaded when the system disables IPv6 (\#933) - @che0 - -# 0.33.3 - - - Added support for forward dns compression pointers (\#934) @bdraco - - Provide sockname when logging a protocol error (\#935) @bdraco - -# 0.33.2 - - - Handle duplicate goodbye answers in the same packet (\#928) @bdraco - - Solves an exception being thrown when we tried to remove the known - answer from the cache when the second goodbye answer in the same - packet was processed - - Fixed \#926 - - - Skip ipv6 interfaces that return ENODEV (\#930) @bdraco - -# 0.33.1 - - - Version number change only with less restrictive directory - permissions - - Fixed \#923 - -# 0.33.0 - -This release eliminates all threading locks as all non-threadsafe -operations now happen in the event loop. - - - Let connection\_lost close the underlying socket (\#918) @bdraco - - The socket was closed during shutdown before asyncio's - connection\_lost handler had a chance to close it which resulted in - a traceback on windows. - - Fixed \#917 - -Technically backwards incompatible: - - - Removed duplicate unregister\_all\_services code (\#910) @bdraco - - Calling Zeroconf.close from same asyncio event loop zeroconf is - running in will now skip unregister\_all\_services and log a warning - as this a blocking operation and is not async safe and never has - been. - - Use AsyncZeroconf instead, or for legacy code call - async\_unregister\_all\_services before Zeroconf.close - -# 0.32.1 - - - Increased timeout in ServiceInfo.request to handle loaded systems - (\#895) @bdraco - - It can take a few seconds for a loaded system to run the - async\_request coroutine when the - event loop is busy, or the system is CPU bound (example being Home - Assistant startup). We now add an additional - \_LOADED\_SYSTEM\_TIMEOUT (10s) to - the run\_coroutine\_threadsafe calls - to ensure the coroutine has the total amount of time to run up to - its internal timeout (default of 3000ms). - - Ten seconds is a bit large of a timeout; however, it is only used in - cases where we wrap other timeouts. We now expect the only instance - the run\_coroutine\_threadsafe result - timeout will happen in a production circumstance is when someone is - running a ServiceInfo.request() in a - thread and another thread calls - Zeroconf.close() at just the right - moment that the future is never completed unless the system is so - loaded that it is nearly unresponsive. - - The timeout for - run\_coroutine\_threadsafe is the - maximum time a thread can cleanly shut down when zeroconf is closed - out in another thread, which should always be longer than the - underlying thread operation. - -# 0.32.0 - -This release offers 100% line and branch coverage. - - - Made ServiceInfo first question QU (\#852) @bdraco - - We want an immediate response when requesting with ServiceInfo by - asking a QU question; most responders will not delay the response - and respond right away to our question. This also improves - compatibility with split networks as we may not have been able to - see the response otherwise. If the responder has not multicast the - record recently, it may still choose to do so in addition to - responding via unicast - - Reduces traffic when there are multiple zeroconf instances running - on the network running ServiceBrowsers - - If we don't get an answer on the first try, we ask a QM question in - the event, we can't receive a unicast response for some reason - - This change puts ServiceInfo inline with ServiceBrowser which also - asks the first question as QU since ServiceInfo is commonly called - from ServiceBrowser callbacks - - - Limited duplicate packet suppression to 1s intervals (\#841) @bdraco - - Only suppress duplicate packets that happen within the same second. - Legitimate queriers will retry the question if they are suppressed. - The limit was reduced to one second to be in line with rfc6762 - - - Made multipacket known answer suppression per interface (\#836) - @bdraco - - The suppression was happening per instance of Zeroconf instead of - per interface. Since the same network can be seen on multiple - interfaces (usually and wifi and ethernet), this would confuse the - multi-packet known answer supression since it was not expecting to - get the same data more than once - - - New ServiceBrowsers now request QU in the first outgoing when - unspecified (\#812) @bdraco - - When we - start a ServiceBrowser and zeroconf has just started up, the known - answer list will be small. By asking a QU question first, it is - likely that we have a large known answer list by the time we ask the - QM question a second later (current default which is likely too low - but would be a breaking change to increase). This reduces the amount - of traffic on the network, and has the secondary advantage that most - responders will answer a QU question without the typical delay - answering QM questions. - - - IPv6 link-local addresses are now qualified with scope\_id (\#343) - @ibygrave - - When a service is advertised on an IPv6 address where the scope is - link local, i.e. fe80::/64 (see RFC 4007) the resolved IPv6 address - must be extended with the scope\_id that identifies through the "%" - symbol the local interface to be used when routing to that address. - A new API parsed\_scoped\_addresses() - is provided to return qualified addresses to avoid breaking - compatibility on the existing parsed\_addresses(). - - - Network adapters that are disconnected are now skipped (\#327) - @ZLJasonG - - - Fixed listeners missing initial packets if Engine starts too quickly - (\#387) @bdraco - - When manually creating a zeroconf.Engine object, it is no longer - started automatically. It must manually be started by calling - .start() on the created object. - - The Engine thread is now started after all the listeners have been - added to avoid a race condition where packets could be missed at - startup. - - - Fixed answering matching PTR queries with the ANY query (\#618) - @bdraco - - - Fixed lookup of uppercase names in the registry (\#597) @bdraco - - If the ServiceInfo was registered with an uppercase name and the - query was for a lowercase name, it would not be found and - vice-versa. - - - Fixed unicast responses from any source port (\#598) @bdraco - - Unicast responses were only being sent if the source port was 53, - this prevented responses when testing with dig: - - > dig -p 5353 @224.0.0.251 media-12.local - - The above query will now see a response - - - Fixed queries for AAAA records not being answered (\#616) @bdraco - - - Removed second level caching from ServiceBrowsers (\#737) @bdraco - - The ServiceBrowser had its own cache of the last time it saw a - service that was reimplementing the DNSCache and presenting a source - of truth problem that lead to unexpected queries when the two - disagreed. - - - Fixed server cache not being case-insensitive (\#731) @bdraco - - If the server name had uppercase chars and any of the matching - records were lowercase, and the server would not be found - - - Fixed cache handling of records with different TTLs (\#729) @bdraco - - There should only be one unique record in the cache at a time as - having multiple unique records will different TTLs in the cache can - result in unexpected behavior since some functions returned all - matching records and some fetched from the right side of the list to - return the newest record. Instead we now store the records in a dict - to ensure that the newest record always replaces the same unique - record, and we never have a source of truth problem determining the - TTL of a record from the cache. - - - Fixed ServiceInfo with multiple A records (\#725) @bdraco - - If there were multiple A records for the host, ServiceInfo would - always return the last one that was in the incoming packet, which - was usually not the one that was wanted. - - - Fixed stale unique records expiring too quickly (\#706) @bdraco - - Records now expire 1s in the future instead of instant removal. - - tools.ietf.org/html/rfc6762\#section-10.2 Queriers receiving a - Multicast DNS response with a TTL of zero SHOULD NOT immediately - delete the record from the cache, but instead record a TTL of 1 and - then delete the record one second later. In the case of multiple - Multicast DNS responders on the network described in Section 6.6 - above, if one of the responders shuts down and incorrectly sends - goodbye packets for its records, it gives the other cooperating - responders one second to send out their own response to "rescue" the - records before they expire and are deleted. - - - Fixed exception when unregistering a service multiple times (\#679) - @bdraco - - - Added an AsyncZeroconfServiceTypes to mirror ZeroconfServiceTypes to - zeroconf.asyncio (\#658) @bdraco - - - Fixed interface\_index\_to\_ip6\_address not skiping ipv4 adapters - (\#651) @bdraco - - - Added async\_unregister\_all\_services to AsyncZeroconf (\#649) - @bdraco - - - Fixed services not being removed from the registry when calling - unregister\_all\_services (\#644) @bdraco - - There was a race condition where a query could be answered for a - service in the registry, while goodbye packets which could result in - a fresh record being broadcast after the goodbye if a query came in - at just the right time. To avoid this, we now remove the services - from the registry right after we generate the goodbye packet - - - Fixed zeroconf exception on load when the system disables IPv6 - (\#624) @bdraco - - - Fixed the QU bit missing from for probe queries (\#609) @bdraco - - The bit should be set per - datatracker.ietf.org/doc/html/rfc6762\#section-8.1 - - - Fixed the TC bit missing for query packets where the known answers - span multiple packets (\#494) @bdraco - - - Fixed packets not being properly separated when exceeding maximum - size (\#498) @bdraco - - Ensure that questions that exceed the max packet size are moved to - the next packet. This fixes DNSQuestions being sent in multiple - packets in violation of: - datatracker.ietf.org/doc/html/rfc6762\#section-7.2 - - Ensure only one resource record is sent when a record exceeds - \_MAX\_MSG\_TYPICAL - datatracker.ietf.org/doc/html/rfc6762\#section-17 - - - Fixed PTR questions asked in uppercase not being answered (\#465) - @bdraco - - - Added Support for context managers in Zeroconf and AsyncZeroconf - (\#284) @shenek - - - Implemented an AsyncServiceBrowser to compliment the sync - ServiceBrowser (\#429) @bdraco - - - Added async\_get\_service\_info to AsyncZeroconf and async\_request - to AsyncServiceInfo (\#408) @bdraco - - - Implemented allowing passing in a sync Zeroconf instance to - AsyncZeroconf (\#406) @bdraco - - - Fixed IPv6 setup under MacOS when binding to "" (\#392) @bdraco - - - Fixed ZeroconfServiceTypes.find not always cancels the - ServiceBrowser (\#389) @bdraco - - There was a short window where the ServiceBrowser thread could be - left running after Zeroconf is closed because the .join() was never - waited for when a new Zeroconf object was created - - - Fixed duplicate packets triggering duplicate updates (\#376) @bdraco - - If TXT or SRV records update was already processed and then received - again, it was possible for a second update to be called back in the - ServiceBrowser - - - Fixed ServiceStateChange.Updated event happening for IPs that - already existed (\#375) @bdraco - - - Fixed RFC6762 Section 10.2 paragraph 2 compliance (\#374) @bdraco - - - Reduced length of ServiceBrowser thread name with many types (\#373) - @bdraco - - - Fixed empty answers being added in ServiceInfo.request (\#367) - @bdraco - - - Fixed ServiceInfo not populating all AAAA records (\#366) @bdraco - - Use get\_all\_by\_details to ensure all records are loaded into - addresses. - - Only load A/AAAA records from the cache once in load\_from\_cache if - there is a SRV record present - - Move duplicate code that checked if the ServiceInfo was complete - into its own function - - - Fixed a case where the cache list can change during iteration - (\#363) @bdraco - - - Return task objects created by AsyncZeroconf (\#360) @nocarryr - -Traffic Reduction: - - - Added support for handling QU questions (\#621) @bdraco - - Implements RFC 6762 sec 5.4: Questions Requesting Unicast Responses - datatracker.ietf.org/doc/html/rfc6762\#section-5.4 - - - Implemented protect the network against excessive packet flooding - (\#619) @bdraco - - - Additionals are now suppressed when they are already in the answers - section (\#617) @bdraco - - - Additionals are no longer included when the answer is suppressed by - known-answer suppression (\#614) @bdraco - - - Implemented multi-packet known answer supression (\#687) @bdraco - - Implements datatracker.ietf.org/doc/html/rfc6762\#section-7.2 - - - Implemented efficient bucketing of queries with known answers - (\#698) @bdraco - - - Implemented duplicate question suppression (\#770) @bdraco - - - -Technically backwards incompatible: - - - Update internal version check to match docs (3.6+) (\#491) @bdraco - - Python version earlier then 3.6 were likely broken with zeroconf - already, however, the version is now explicitly checked. - - - Update python compatibility as PyPy3 7.2 is required (\#523) @bdraco - -Backwards incompatible: - - - Drop oversize packets before processing them (\#826) @bdraco - - Oversized packets can quickly overwhelm the system and deny service - to legitimate queriers. In practice, this is usually due to broken - mDNS implementations rather than malicious actors. - - - Guard against excessive ServiceBrowser queries from PTR records - significantly lowerthan recommended (\#824) @bdraco - - We now enforce a minimum TTL for PTR records to avoid - ServiceBrowsers generating excessive queries refresh queries. Apple - uses a 15s minimum TTL, however, we do not have the same level of - rate limit and safeguards, so we use 1/4 of the recommended value. - - - RecordUpdateListener now uses async\_update\_records instead of - update\_record (\#419, \#726) @bdraco - - This allows the listener to receive all the records that have been - updated in a single transaction such as a packet or cache expiry. - - update\_record has been deprecated in favor of - async\_update\_records A compatibility shim exists to ensure classes - that use RecordUpdateListener as a base class continue to have - update\_record called, however, they should be updated as soon as - possible. - - A new method async\_update\_records\_complete is now called on each - listener when all listeners have completed processing updates and - the cache has been updated. This allows ServiceBrowsers to delay - calling handlers until they are sure the cache has been updated as - its a common pattern to call for ServiceInfo when a ServiceBrowser - handler fires. - - The async\_ prefix was chosen to make it clear that these functions - run in the eventloop and should never do blocking I/O. Before 0.32+ - these functions ran in a select() loop and should not have been - doing any blocking I/O, but it was not clear to implementors that - I/O would block the loop. - - - Pass both the new and old records to async\_update\_records (\#792) - @bdraco - - Pass the old\_record (cached) as the value and the new\_record - (wire) to async\_update\_records instead of forcing each consumer to - check the cache since we will always have the old\_record when - generating the async\_update\_records call. This avoids the overhead - of multiple cache lookups for each listener. - -# 0.31.0 - - - Separated cache loading from I/O in ServiceInfo and fixed cache - lookup (\#356), thanks to J. Nick Koston. - - The ServiceInfo class gained a load\_from\_cache() method to only - fetch information from Zeroconf cache (if it exists) with no IO - performed. Additionally this should reduce IO in cases where cache - lookups were previously incorrectly failing. - -# 0.30.0 - - - Some nice refactoring work including removal of the Reaper thread, - thanks to J. Nick Koston. - - Fixed a Windows-specific The requested address is not valid in its - context regression, thanks to Timothee ‘TTimo’ Besset and J. Nick - Koston. - - Provided an asyncio-compatible service registration layer (in the - zeroconf.asyncio module), thanks to J. Nick Koston. - -# 0.29.0 - - - A single socket is used for listening on responding when - InterfaceChoice.Default is chosen. - Thanks to J. Nick Koston. - -Backwards incompatible: - - - Dropped Python 3.5 support - -# 0.28.8 - - - Fixed the packet generation when multiple packets are necessary, - previously invalid packets were generated sometimes. Patch thanks to - J. Nick Koston. - -# 0.28.7 - - - Fixed the IPv6 address rendering in the browser example, thanks to - Alexey Vazhnov. - - Fixed a crash happening when a service is added or removed during - handle\_response and improved exception handling, thanks to J. Nick - Koston. - -# 0.28.6 - - - Loosened service name validation when receiving from the network - this lets us handle some real world devices previously causing - errors, thanks to J. Nick Koston. - -# 0.28.5 - - - Enabled ignoring duplicated messages which decreases CPU usage, - thanks to J. Nick Koston. - - Fixed spurious AttributeError: module 'unittest' has no attribute - 'mock' in tests. - -# 0.28.4 - - - Improved cache reaper performance significantly, thanks to J. Nick - Koston. - - Added ServiceListener to \_\_all\_\_ as it's part of the public API, - thanks to Justin Nesselrotte. +It'll be taken care of by socket being closed ([`f6425d1`](https://github.com/python-zeroconf/python-zeroconf/commit/f6425d1d727edfa124264bcabeffd77397809965)) -# 0.28.3 +* Remove dead code ([`88f5a51`](https://github.com/python-zeroconf/python-zeroconf/commit/88f5a5193ba2ab0eefc99481ccc6a1b911d8dbea)) - - Reduced a time an internal lock is held which should eliminate - deadlocks in high-traffic networks, thanks to J. Nick Koston. +* Stop using Zeroconf.group attribute ([`903cb78`](https://github.com/python-zeroconf/python-zeroconf/commit/903cb78d3ff7bc8762bf23910562b8f5042c2f85)) -# 0.28.2 +* Remove some unused methods ([`80e8e10`](https://github.com/python-zeroconf/python-zeroconf/commit/80e8e1008bc28c8ab9ca966b89109146112d0edd)) - - Stopped asking questions we already have answers for in cache, - thanks to Paul Daumlechner. - - Removed initial delay before querying for service info, thanks to - Erik Montnemery. +* Refactor exception handling here ([`4b8f68b`](https://github.com/python-zeroconf/python-zeroconf/commit/4b8f68b39230bb9cc3c202395b58cc822b8fe862)) -# 0.28.1 +* Update README.rst ([`8f18609`](https://github.com/python-zeroconf/python-zeroconf/commit/8f1860956ee9c86b7ba095fc1293919933e1c0ad)) - - Fixed a resource leak connected to using ServiceBrowser with - multiple types, thanks to - 10. Nick Koston. +* Release as 0.16.0 ([`4e54b67`](https://github.com/python-zeroconf/python-zeroconf/commit/4e54b6738a490dcc7d2f9e7e1040c5da53727155)) -# 0.28.0 +* Tune logging ([`05c3c02`](https://github.com/python-zeroconf/python-zeroconf/commit/05c3c02044d2b4bff946e00803d0ddb2619f0927)) - - Improved Windows support when using socket errno checks, thanks to - Sandy Patterson. - - Added support for passing text addresses to ServiceInfo. - - Improved logging (includes fixing an incorrect logging call) - - Improved Windows compatibility by using Adapter.index from ifaddr, - thanks to PhilippSelenium. - - Improved Windows compatibility by stopping using - socket.if\_nameindex. - - Fixed an OS X edge case which should also eliminate a memory leak, - thanks to Emil Styrke. +* Migrate from clazz to class_ ([`4a67e12`](https://github.com/python-zeroconf/python-zeroconf/commit/4a67e124cd8f8c4d19f8c6c4a455d075bb948362)) -Technically backwards incompatible: +* Migrate more camel case names to snake case ([`92e4713`](https://github.com/python-zeroconf/python-zeroconf/commit/92e47132dc761a9a722caec261ae53de1785838f)) - - `ifaddr` 0.1.7 or newer is required now. +* Switch to snake case and clean up import order -## 0.27.1 +Closes #2 ([`5429748`](https://github.com/python-zeroconf/python-zeroconf/commit/5429748190950a5daf7e9cf91de824dfbd06ee7a)) - - Improved the logging situation (includes fixing a false-positive - "packets() made no progress adding records", thanks to Greg Badros) +* Rationalize exception handling a bit and setup logging ([`ada563c`](https://github.com/python-zeroconf/python-zeroconf/commit/ada563c5a1f6d7c54f2ae5c495503079c395438f)) -## 0.27.0 +* Update README.rst ([`47ff62b`](https://github.com/python-zeroconf/python-zeroconf/commit/47ff62bae1fd69ffd953c82bd480e4770bfee97b)) - - Large multi-resource responses are now split into separate packets - which fixes a bad mdns-repeater/ChromeCast Audio interaction ending - with ChromeCast Audio crash (and possibly some others) and improves - RFC 6762 compliance, thanks to Greg Badros - - Added a warning presented when the listener passed to ServiceBrowser - lacks update\_service() callback - - Added support for finding all services available in the browser - example, thanks to Perry Kunder +* Update README.rst ([`b290965`](https://github.com/python-zeroconf/python-zeroconf/commit/b290965ecd589ca4feb1f88a4232d1ec2725dc44)) -Backwards incompatible: +* Create universal wheels ([`bf97c14`](https://github.com/python-zeroconf/python-zeroconf/commit/bf97c1459a9d91d6aa88d7bf34c5f8b4cd3cedc5)) - - Removed previously deprecated ServiceInfo address constructor - parameter and property -## 0.26.3 +## v0.15.1 (2014-07-10) - - Improved readability of logged incoming data, thanks to Erik - Montnemery - - Threads are given unique names now to aid debugging, thanks to Erik - Montnemery - - Fixed a regression where get\_service\_info() called within a - listener add\_service method would deadlock, timeout and incorrectly - return None, fix thanks to Erik Montnemery, but Matt Saxon and - Hmmbob were also involved in debugging it. +### Unknown -## 0.26.2 +* Bump version to 0.15.1 ([`9e81863`](https://github.com/python-zeroconf/python-zeroconf/commit/9e81863de37e2ab972d5a76a1dc2d5c517f83cc6)) - - Added support for multiple types to ServiceBrowser, thanks to J. - Nick Koston - - Fixed a race condition where a listener gets a message before the - lock is created, thanks to - 10. Nick Koston +* Update README.rst ([`161743e`](https://github.com/python-zeroconf/python-zeroconf/commit/161743ea387c961d3554488239f93df4b39be18c)) -## 0.26.1 +* Add coverage badge to the readme ([`8502a7e`](https://github.com/python-zeroconf/python-zeroconf/commit/8502a7e1c9770a42e44b4f1beb34c887212e7d48)) - - Fixed a performance regression introduced in 0.26.0, thanks to J. - Nick Koston (this is close in spirit to an optimization made in - 0.24.5 by the same author) +* Send coverage to coveralls ([`1d90a9f`](https://github.com/python-zeroconf/python-zeroconf/commit/1d90a9f91f87753a1ea649ce5da1bc6a7da4013d)) -## 0.26.0 +* Fix socket.error handling - - Fixed a regression where service update listener wasn't called on IP - address change (it's called on SRV/A/AAAA record changes now), - thanks to Matt Saxon +This closes #4 ([`475e80b`](https://github.com/python-zeroconf/python-zeroconf/commit/475e80b90e96364a183c63f09fa3858f34aa3646)) -Technically backwards incompatible: +* Add test_coverage make target ([`89531e6`](https://github.com/python-zeroconf/python-zeroconf/commit/89531e641f15b24a60f9fb2e9f71a7aa8450363a)) - - Service update hook is no longer called on service addition (service - added hook is still called), this is related to the fix above +* Add PyPI version badge to the readme ([`4c852d4`](https://github.com/python-zeroconf/python-zeroconf/commit/4c852d424d07925ae01c24a51ffc36ecae49b48d)) -## 0.25.1 +* Refactor integration test to use events ([`922eab0`](https://github.com/python-zeroconf/python-zeroconf/commit/922eab05596b72d141d459e83146a4cdb6c84389)) - - Eliminated 5s hangup when calling Zeroconf.close(), thanks to Erik - Montnemery +* Fix readme formatting ([`7b23734`](https://github.com/python-zeroconf/python-zeroconf/commit/7b23734356f85ccaa6ca66ffaeea8484a2d45d3d)) -## 0.25.0 +* Update README.rst ([`83fd618`](https://github.com/python-zeroconf/python-zeroconf/commit/83fd618328aff29892c71f9ba5b9ff983fe4a202)) - - Reverted uniqueness assertions when browsing, they caused a - regression +* Refactor browser example ([`8328aed`](https://github.com/python-zeroconf/python-zeroconf/commit/8328aed1444781b6fac854eb722ae0fef14a3cc4)) -Backwards incompatible: +* Update README.rst ([`49af263`](https://github.com/python-zeroconf/python-zeroconf/commit/49af26350390484bc6f4b66dab4f6b004040cd4a)) - - Rationalized handling of TXT records. Non-bytes values are converted - to str and encoded to bytes using UTF-8 now, None values mean - value-less attributes. When receiving TXT records no decoding is - performed now, keys are always bytes and values are either bytes or - None in value-less attributes. +* Bump version to 0.15 ([`77bcadd`](https://github.com/python-zeroconf/python-zeroconf/commit/77bcaddbd1964fb0b494e98ec3ae6d66ea42c509)) -## 0.24.5 +* Add myself to authors ([`b9f886b`](https://github.com/python-zeroconf/python-zeroconf/commit/b9f886bf2815c86c7004e123146293c48ea68f1e)) - - Fixed issues with shared records being used where they shouldn't be - (TXT, SRV, A records are unique now), thanks to Matt Saxon - - Stopped unnecessarily excluding host-only interfaces from - InterfaceChoice.all as they don't forbid multicast, thanks to - Andreas Oberritter - - Fixed repr() of IPv6 DNSAddress, thanks to Aldo Hoeben - - Removed duplicate update messages sent to listeners, thanks to Matt - Saxon - - Added support for cooperating responders, thanks to Matt Saxon - - Optimized handle\_response cache check, thanks to J. Nick Koston - - Fixed memory leak in DNSCache, thanks to J. Nick Koston +* Reuse one Zeroconf instance in browser example ([`1ee00b3`](https://github.com/python-zeroconf/python-zeroconf/commit/1ee00b318eab386b709351ffae81c8293f4e6d4d)) -## 0.24.4 +* Update README.rst ([`fba4215`](https://github.com/python-zeroconf/python-zeroconf/commit/fba4215be1804a13e454e609ed6df2cf98e149f2)) - - Fixed resetting TTL in DNSRecord.reset\_ttl(), thanks to Matt Saxon - - Improved various DNS class' string representations, thanks to Jay - Hogg +* Update README.rst ([`c7bfe63`](https://github.com/python-zeroconf/python-zeroconf/commit/c7bfe63f9a7eff9a1ede0ac63a329a316d3192ab)) -## 0.24.3 +* Rename examples ([`3502198`](https://github.com/python-zeroconf/python-zeroconf/commit/3502198768062b49564121b48a792ce5e7b7b288)) - - Fixed import-time "TypeError: 'ellipsis' object is not iterable." on - CPython 3.5.2 +* Refactor examples ([`2ce95f5`](https://github.com/python-zeroconf/python-zeroconf/commit/2ce95f52e7a02c7f1113ba7ebee3c89babb9a26e)) -## 0.24.2 +* Update README.rst ([`6a7cd31`](https://github.com/python-zeroconf/python-zeroconf/commit/6a7cd3197ee6ae5690b29b6543fc86d1b1a420d8)) - - Added support for AWDL interface on macOS (needed and used by the - opendrop project but should be useful in general), thanks to Milan - Stute - - Added missing type hints +* Advertise Python 3 support ([`d330918`](https://github.com/python-zeroconf/python-zeroconf/commit/d330918970d719d6b26a3f81e83dbb8b8adac0a4)) -## 0.24.1 +* Update README.rst ([`6aae20e`](https://github.com/python-zeroconf/python-zeroconf/commit/6aae20e1c1bef8413573139d62d3d2b889fe8776)) - - Applied some significant performance optimizations, thanks to Jaime - van Kessel for the patch and to Ghostkeeper for performance - measurements - - Fixed flushing outdated cache entries when incoming record is - unique, thanks to Michael Hu - - Fixed handling updates of TXT records (they'd not get recorded - previously), thanks to Michael Hu +* Move examples to examples directory ([`c83891c`](https://github.com/python-zeroconf/python-zeroconf/commit/c83891c9dd2f20e8dee44f1b412a536d20cbcbe3)) -## 0.24.0 +* Fix regression introduced with Python 3 compat ([`0a0f7e0`](https://github.com/python-zeroconf/python-zeroconf/commit/0a0f7e0e72d7f9ed08231d94b66ff44bcff60151)) - - Added IPv6 support, thanks to Dmitry Tantsur - - Added additional recommended records to PTR responses, thanks to - Scott Mertz - - Added handling of ENOTCONN being raised during shutdown when using - Eventlet, thanks to Tamás Nepusz - - Included the py.typed marker in the package so that type checkers - know to use type hints from the source code, thanks to Dmitry - Tantsur +* Mark threads as daemonic (at least for now) ([`b8cfc79`](https://github.com/python-zeroconf/python-zeroconf/commit/b8cfc7996941afded5c9c7e7903378279590b20f)) -## 0.23.0 +* Update README.rst ([`cd7ca98`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7ca98010044eb965bc988c23a8be59e09eb69a)) - - Added support for MyListener call getting updates to service TXT - records, thanks to Matt Saxon - - Added support for multiple addresses when publishing a service, - getting/setting single address has become deprecated. Change thanks - to Dmitry Tantsur +* Add Python 3 support ([`9a99aa7`](https://github.com/python-zeroconf/python-zeroconf/commit/9a99aa727f4e041a726aed3736c0a8ab625c4cb6)) -Backwards incompatible: +* Update README.rst ([`09a1f4f`](https://github.com/python-zeroconf/python-zeroconf/commit/09a1f4f9d76f64cc8c85f0525e05bdac53de210c)) - - Dropped Python 3.4 support +* Update README.rst ([`6feec34`](https://github.com/python-zeroconf/python-zeroconf/commit/6feec3459d2561f00402d627ea91a8a4981ad309)) -## 0.22.0 +* Tune package description ([`b819174`](https://github.com/python-zeroconf/python-zeroconf/commit/b8191741d4ef8e347f6dd138fa48da5aec9b6549)) - - A lot of maintenance work (tooling, typing coverage and - improvements, spelling) done, thanks to Ville Skyttä - - Provided saner defaults in ServiceInfo's constructor, thanks to - Jorge Miranda - - Fixed service removal packets not being sent on shutdown, thanks to - Andrew Bonney - - Added a way to define TTL-s through ServiceInfo contructor - parameters, thanks to Andrew Bonney +* Gitignore build/ ([`0ef1b0d`](https://github.com/python-zeroconf/python-zeroconf/commit/0ef1b0d3481b68a752efe822ff4e9ce8356bcffa)) -Technically backwards incompatible: +* Add setup.py ([`916bd38`](https://github.com/python-zeroconf/python-zeroconf/commit/916bd38ddb48a959c597ae1763193b4c2c74334f)) - - Adjusted query intervals to match RFC 6762, thanks to Andrew Bonney - - Made default TTL-s match RFC 6762, thanks to Andrew Bonney +* Update README.rst ([`35eced3`](https://github.com/python-zeroconf/python-zeroconf/commit/35eced310fbe1782fd87eb33e7f4befcb0a78499)) -## 0.21.3 +* Run actual tests on Travis ([`f8cea82`](https://github.com/python-zeroconf/python-zeroconf/commit/f8cea82177cea3577d2b4f70fec32e85229abdce)) - - This time really allowed incoming service names to contain - underscores (patch released as part of 0.21.0 was defective) +* Advertise Python 2.6 and PyPy support ([`43b182c`](https://github.com/python-zeroconf/python-zeroconf/commit/43b182cce40bcb21eb1e052a0bc42bf367a963ca)) -## 0.21.2 +* Move readme to README.rst ([`fd3401e`](https://github.com/python-zeroconf/python-zeroconf/commit/fd3401efb55ae91324d12ba80affd2f3b3ebcf5e)) - - Fixed import-time typing-related TypeError when older typing version - is used +* Move readme to README.rst ([`353b700`](https://github.com/python-zeroconf/python-zeroconf/commit/353b700df79b49c49db62e0a6e6eb0eae3ccb444)) -## 0.21.1 +* Stop catching BaseExceptions ([`41a013c`](https://github.com/python-zeroconf/python-zeroconf/commit/41a013c8a051b3f80018f37d4f254263cc890a68)) - - Fixed installation on Python 3.4 (we use typing now but there was no - explicit dependency on it) +* Set up Travis build ([`a2a6125`](https://github.com/python-zeroconf/python-zeroconf/commit/a2a6125dd03d9a810dac72163d545e413387217b)) -## 0.21.0 +* PEP8ize and clean up ([`e2964ed`](https://github.com/python-zeroconf/python-zeroconf/commit/e2964ed48263e72159e95cb0691af0dcb9ba498b)) - - Added an error message when importing the package using unsupported - Python version - - Fixed TTL handling for published service - - Implemented unicast support - - Fixed WSL (Windows Subsystem for Linux) compatibility - - Fixed occasional UnboundLocalError issue - - Fixed UTF-8 multibyte name compression - - Switched from netifaces to ifaddr (pure Python) - - Allowed incoming service names to contain underscores +* Updated for 0.14. ([`83aa0f3`](https://github.com/python-zeroconf/python-zeroconf/commit/83aa0f3803cdf79470f4a754c7b9ab616544eea1)) -## 0.20.0 +* Although SOL_IP is considered more correct here, it's undefined on some +systems, where IPPROTO_IP is available. (Both equate to 0.) Reported by +Mike Erdely. ([`443aca8`](https://github.com/python-zeroconf/python-zeroconf/commit/443aca867d694432d466d20bdf7c49ebc7a4e684)) - - Dropped support for Python 2 (this includes PyPy) and 3.3 - - Fixed some class' equality operators - - ServiceBrowser entries are being refreshed when 'stale' now - - Cache returns new records first now instead of last +* Obsolete comment. ([`eee7196`](https://github.com/python-zeroconf/python-zeroconf/commit/eee7196626773eae2dc0dc1a68de03a99d778139)) -## 0.19.1 +* Really these should be network order. ([`5e10a20`](https://github.com/python-zeroconf/python-zeroconf/commit/5e10a20a9cb6bbc09356cbf957f3f7fa3e169ff2)) - - Allowed installation with netifaces \>= 0.10.6 (a bug that was - concerning us got fixed) +* Docstrings for examples; shorter timeout; struct.unpack() vs. ord(). ([`0884d6a`](https://github.com/python-zeroconf/python-zeroconf/commit/0884d6a56afc6fb559b6c90a923762393187e50a)) -## 0.19.0 +* Make examples executable. ([`5e5e78e`](https://github.com/python-zeroconf/python-zeroconf/commit/5e5e78e27240e7e03d1c8aa96ee0e1f7877d0d5d)) - - Technically backwards incompatible - restricted netifaces dependency - version to work around a bug, see - for details +* Unneeded. ([`2ac738f`](https://github.com/python-zeroconf/python-zeroconf/commit/2ac738f84bbcf29d03bad289cb243182ecdf48d6)) -## 0.18.0 +* getText() is redundant with getProperties(). ([`a115187`](https://github.com/python-zeroconf/python-zeroconf/commit/a11518726321b15059be255b6329cba591887197)) - - Dropped Python 2.6 support - - Improved error handling inside code executed when Zeroconf object is - being closed +* Allow graceful exit from announcement test. ([`0f3b413`](https://github.com/python-zeroconf/python-zeroconf/commit/0f3b413b269f8b95b6f8073ba39d11f156ae632c)) -## 0.17.7 +* More readable display in browser; automatically quit after giving ten +seconds to respond. ([`eee4530`](https://github.com/python-zeroconf/python-zeroconf/commit/eee4530d7b8216338634282f3097cb96932aa28e)) - - Better Handling of DNS Incoming Packets parsing exceptions - - Many exceptions will now log a warning the first time they are seen - - Catch and log sendto() errors - - Fix/Implement duplicate name change - - Fix overly strict name validation introduced in 0.17.6 - - Greatly improve handling of oversized packets including: - - Implement name compression per RFC1035 - - Limit size of generated packets to 9000 bytes as per RFC6762 - - Better handle over sized incoming packets - - Increased test coverage to 95% +* New names, numbers. ([`2a000c5`](https://github.com/python-zeroconf/python-zeroconf/commit/2a000c589302147129eed990c842b38ac61f7514)) -## 0.17.6 +* Updated FSF address. ([`4e39602`](https://github.com/python-zeroconf/python-zeroconf/commit/4e396025ed666775973d54a50b69e8f635e28658)) - - Many improvements to address race conditions and exceptions during - ZC() startup and shutdown, thanks to: morpav, veawor, justingiorgi, - herczy, stephenrauch - - Added more test coverage: strahlex, stephenrauch - - Stephen Rauch contributed: - - Speed up browser startup - - Add ZeroconfServiceTypes() query class to discover all - advertised service types - - Add full validation for service names, types and subtypes - - Fix for subtype browsing - - Fix DNSHInfo support +* De-DOSification. ([`1dc3436`](https://github.com/python-zeroconf/python-zeroconf/commit/1dc3436e6357b66d0bb53f9b285f123b164984da)) -## 0.17.5 +* Lowercase imports. ([`e292868`](https://github.com/python-zeroconf/python-zeroconf/commit/e292868f9c7e817cb04dfce2d545f45db4041e5e)) - - Fixed OpenBSD compatibility, thanks to Alessio Sergi - - Fixed race condition on ServiceBrowser startup, thanks to gbiddison - - Fixed installation on some Python 3 systems, thanks to Per Sandström - - Fixed "size change during iteration" bug on Python 3, thanks to - gbiddison +* The great lowercasing. ([`5541813`](https://github.com/python-zeroconf/python-zeroconf/commit/5541813fbb8e1d7b233d09ee2d20ac0ca322a9f2)) -## 0.17.4 +* Renamed tests. ([`4bb88b0`](https://github.com/python-zeroconf/python-zeroconf/commit/4bb88b0952833b84c15c85190c0a9cac01922cbe)) - - Fixed support for Linux kernel versions \< 3.9 (thanks to Giovanni - Harting and Luckydonald, GitHub pull request \#26) +* Replaced unwrapped "lgpl.txt" with traditional "COPYING". ([`ad6b1ec`](https://github.com/python-zeroconf/python-zeroconf/commit/ad6b1ecf9fa71a5ec14f7a08fc3d6a689a19e6d2)) -## 0.17.3 +* Don't need range() here. ([`b36e7d5`](https://github.com/python-zeroconf/python-zeroconf/commit/b36e7d5dd5922b1739911878b29aba921ec9ecb6)) - - Fixed DNSText repr on Python 3 (it'd crash when the text was longer - than 10 bytes), thanks to Paulus Schoutsen for the patch, GitHub - pull request \#24 +* testNumbersAnswers() was identical to testNumbersQuestions(). +(Presumably it was intended to test addAnswer() instead...) ([`416054d`](https://github.com/python-zeroconf/python-zeroconf/commit/416054d407013af8678928b949d6579df4044d46)) -## 0.17.2 +* Extraneous spaces. ([`f6615a9`](https://github.com/python-zeroconf/python-zeroconf/commit/f6615a9d7632f3510d2f0a36cab155ac753141ab)) - - Fixed installation on Python 3.4.3+ (was failing because of enum34 - dependency which fails to install on 3.4.3+, changed to depend on - enum-compat instead; thanks to Michael Brennan for the original - patch, GitHub pull request \#22) +* Moved history to README; updated version number, etc. ([`015bae2`](https://github.com/python-zeroconf/python-zeroconf/commit/015bae258b5ce73a2a12361e4c9295107126963c)) -## 0.17.1 +* Meaningless. ([`6147a6e`](https://github.com/python-zeroconf/python-zeroconf/commit/6147a6ed20222851ba4438dd65366f907b4c189f)) - - Fixed EADDRNOTAVAIL when attempting to use dummy network interfaces - on Windows, thanks to daid +* Also unexceptional. ([`c36e3af`](https://github.com/python-zeroconf/python-zeroconf/commit/c36e3af2f6e0ea857f383f9b014f50b65fca641c)) -## 0.17.0 +* If name isn't in self.names, it's unexceptional. (And yes, I actually +tested, and this is faster.) ([`f772d4e`](https://github.com/python-zeroconf/python-zeroconf/commit/f772d4e5e208431378bf01d75eddc7df5119dff7)) - - Added some Python dependencies so it's not zero-dependencies anymore - - Improved exception handling (it'll be quieter now) - - Messages are listened to and sent using all available network - interfaces by default (configurable); thanks to Marcus Müller - - Started using logging more freely - - Fixed a bug with binary strings as property values being converted - to False (); - thanks to Dr. Seuss - - Added new `ServiceBrowser` event handler interface (see the - examples) - - PyPy3 now officially supported - - Fixed ServiceInfo repr on Python 3, thanks to Yordan Miladinov +* Excess spaces; don't use "len" as a label. After eblot. ([`df986ee`](https://github.com/python-zeroconf/python-zeroconf/commit/df986eed46e3ec7dadc6604d0b26e4fcf0b6291a)) -## 0.16.0 +* Outdated docs. ([`21d7c95`](https://github.com/python-zeroconf/python-zeroconf/commit/21d7c950f50827bc8ac6dd18fb0577c11b5cefac)) - - Set up Python logging and started using it - - Cleaned up code style (includes migrating from camel case to snake - case) +* Untab the test programs. ([`c13e4fa`](https://github.com/python-zeroconf/python-zeroconf/commit/c13e4fab3b0b95674fbc93cd2ac30fd2ba462a24)) -## 0.15.1 +* Remove the comment about the test programs. ([`8adab79`](https://github.com/python-zeroconf/python-zeroconf/commit/8adab79a64a73e76841b37e53e55fe8aad8eb580)) - - Fixed handling closed socket (GitHub \#4) +* Allow for the failure of getServiceInfo(). Not sure why it's happening, +though. ([`0a05f42`](https://github.com/python-zeroconf/python-zeroconf/commit/0a05f423ad591454a25c515d811556d10e5fc99f)) -## 0.15 +* Don't test for NonLocalNameException, since I killed it. ([`d89ddfc`](https://github.com/python-zeroconf/python-zeroconf/commit/d89ddfcecc7b336aa59a4ff784cb8b810772d24f)) - - Forked by Jakub Stasiak - - Made Python 3 compatible - - Added setup script, made installable by pip and uploaded to PyPI - - Set up Travis build - - Reformatted the code and moved files around - - Stopped catching BaseException in several places, that could hide - errors - - Marked threads as daemonic, they won't keep application alive now +* Describe this fork. ([`656f959`](https://github.com/python-zeroconf/python-zeroconf/commit/656f959c26310629953cc661ffad681194295131)) -## 0.14 +* Write only a byte. ([`d346107`](https://github.com/python-zeroconf/python-zeroconf/commit/d34610768812906ff07974c1314f6073b431d96e)) - - Fix for SOL\_IP undefined on some systems - thanks Mike Erdely. - - Cleaned up examples. - - Lowercased module name. +* Although beacons _should_ fit within single packets, maybe we should allow for the possibility that they won't? (Or, does this even make sense with sendto()?) ([`ac91642`](https://github.com/python-zeroconf/python-zeroconf/commit/ac91642b0ea90a3c84b605e19d562b897e2cd1fd)) -## 0.13 +* Update the version to indicate a fork. ([`a81f3ab`](https://github.com/python-zeroconf/python-zeroconf/commit/a81f3ababc585acca4bacc51a832703286ec5cfb)) - - Various minor changes; see git for details. - - No longer compatible with Python 2.2. Only tested with 2.5-2.7. - - Fork by William McBrine. +* HHHHHH -> 6H ([`9a94953`](https://github.com/python-zeroconf/python-zeroconf/commit/9a949532484a55e52f1d2f14eb27277a5133ce29)) -## 0.12 +* In Zeroconf, use the same method of determining the default IP as elsewhere, instead of the unreliable gethostbyname(gethostname()) method (but fall back to that). ([`f6d4731`](https://github.com/python-zeroconf/python-zeroconf/commit/f6d47316a47d9d04539f1a4215dd7eec06c33d4c)) - - allow selection of binding interface - - typo fix - Thanks A. M. Kuchlingi - - removed all use of word 'Rendezvous' - this is an API change +* More again. ([`2420505`](https://github.com/python-zeroconf/python-zeroconf/commit/24205054309e110238fc5a986cdc27b17c44abef)) -## 0.11 +* More. ([`b8baed3`](https://github.com/python-zeroconf/python-zeroconf/commit/b8baed3a2876c126cac65a7d95bb88661b31483c)) - - correction to comments for addListener method - - support for new record types seen from OS X - - IPv6 address - - hostinfo - - ignore unknown DNS record types - - fixes to name decoding - - works alongside other processes using port 5353 (e.g. on Mac OS X) - - tested against Mac OS X 10.3.2's mDNSResponder - - corrections to removal of list entries for service browser +* Minor style things for Zeroconf (use True/False instead of 1/0, etc.). ([`173350e`](https://github.com/python-zeroconf/python-zeroconf/commit/173350e415e66c9629d553f820677453bdbe5724)) -## 0.10 +* Clearer. ([`3e718b5`](https://github.com/python-zeroconf/python-zeroconf/commit/3e718b55becd883324bf40eda700431b302a0da8)) - - Jonathon Paisley contributed these corrections: - - always multicast replies, even when query is unicast - - correct a pointer encoding problem - - can now write records in any order - - traceback shown on failure - - better TXT record parsing - - server is now separate from name - - can cancel a service browser - - modified some unit tests to accommodate these changes +* 80-column fixes for Zeroconf. ([`e5d930b`](https://github.com/python-zeroconf/python-zeroconf/commit/e5d930bb681f5544827fc0c9f37daa778dec5930)) -## 0.09 +* Minor simplification of the pack/unpack routines in Zeroconf. ([`e814dd1`](https://github.com/python-zeroconf/python-zeroconf/commit/e814dd1e6848d8c7ec03660d347ea4a34390c37d)) - - remove all records on service unregistration - - fix DOS security problem with readName +* Skip unknown resource records in Zeroconf -- https://bugs.launchpad.net/pyzeroconf/+bug/498411 ([`488de88`](https://github.com/python-zeroconf/python-zeroconf/commit/488de8826ddd58646358900d057a4a1632492948)) -## 0.08 +* Some people are reporting bogus data coming back from Zeroconf scans, causing exceptions. ([`fe77e37`](https://github.com/python-zeroconf/python-zeroconf/commit/fe77e371cc68ea211508908e6180867c420ca042)) - - changed licensing to LGPL +* Don't need the string module here. ([`f76529c`](https://github.com/python-zeroconf/python-zeroconf/commit/f76529c685868dcdb62b6477f15ecb1122310cc5)) -## 0.07 +* Suppress EBADF errors in Zeroconf.py. ([`4c8aac9`](https://github.com/python-zeroconf/python-zeroconf/commit/4c8aac95613df62d001bd7192ec75247a2bb9b9d)) - - faster shutdown on engine - - pointer encoding of outgoing names - - ServiceBrowser now works - - new unit tests +* This doesn't seem to be necessary, and it's generating a lot of exceptions... ([`f80df7b`](https://github.com/python-zeroconf/python-zeroconf/commit/f80df7b0f8b9124970e109c51f7a49b7bd75906c)) -## 0.06 +* Untab Zeroconf. ([`892a4f0`](https://github.com/python-zeroconf/python-zeroconf/commit/892a4f095c23379a6cf5a0ef31521f9f90cb5276)) - - small improvements with unit tests - - added defined exception types - - new style objects - - fixed hostname/interface problem - - fixed socket timeout problem - - fixed add\_service\_listener() typo bug - - using select() for socket reads - - tested on Debian unstable with Python 2.2.2 +* has_key() is deprecated. ([`f998e39`](https://github.com/python-zeroconf/python-zeroconf/commit/f998e39cbb8d2c5556c10203957ff6a9ab2f546d)) -## 0.05 +* The initial version I committed to HME for Python back in 2008. This is +a step back in some respects (re-inserting tabs that will be undone a +couple patches hence), so that I can apply the patches going forward. ([`d952a9c`](https://github.com/python-zeroconf/python-zeroconf/commit/d952a9c117ae539cf4778d76618fe813b10a9a34)) - - ensure case insensitivty on domain names - - support for unicast DNS queries +* Remove the executable bit. ([`f0d095d`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d095d0f1c2767be6da47f885f5ed019e9fa363)) -## 0.04 +* Removed pyc file ([`38d0a18`](https://github.com/python-zeroconf/python-zeroconf/commit/38d0a184c13772dae3c14d3c46a30c68497c54db)) - - added some unit tests - - added \_\_ne\_\_ adjuncts where required - - ensure names end in '.local.' - - timeout on receiving socket for clean shutdown +* First commit ([`c3a39f8`](https://github.com/python-zeroconf/python-zeroconf/commit/c3a39f874a5c10e91ee2315271f13ae74ee381fd)) diff --git a/pyproject.toml b/pyproject.toml index 7bd2960f..4a9ca683 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.135.0" +version = "0.136.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 58bda33d..ec3a682f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.135.0" +__version__ = "0.136.0" __license__ = "LGPL" From 857e7423dee3e3e6e2c5d3049a47ddb28a5e6cfb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:39:08 -0600 Subject: [PATCH 266/434] chore(deps-dev): bump setuptools from 75.2.0 to 75.3.0 (#1437) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1c99cd11..f32c8580 100644 --- a/poetry.lock +++ b/poetry.lock @@ -320,23 +320,23 @@ pytest = ">=7.0.0" [[package]] name = "setuptools" -version = "75.2.0" +version = "75.3.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, - {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, + {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, + {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] [[package]] name = "tomli" From 06637f4c3b848d939b7a8e83c1204d96f60174bf Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 15 Nov 2024 16:40:20 +0100 Subject: [PATCH 267/434] chore(pre-commit): reenable codespell and disable false positives (#1432) --- .github/workflows/ci.yml | 6 +++--- .pre-commit-config.yaml | 8 ++++---- pyproject.toml | 4 +--- tests/services/test_browser.py | 2 +- tests/services/test_info.py | 10 +++++----- tests/test_asyncio.py | 2 +- tests/test_core.py | 4 ++-- tests/test_handlers.py | 22 +++++++++++----------- tests/test_protocol.py | 6 +++--- 9 files changed, 31 insertions(+), 33 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2359c420..309707e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,11 +14,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.9" - - uses: pre-commit/action@v2.0.3 + python-version: "3.12" + - uses: pre-commit/action@v3.0.1 # Make sure commit messages follow the conventional commits convention: # https://www.conventionalcommits.org diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8b50394d..61669a36 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,10 +44,10 @@ repos: - id: ruff args: [--fix, --exit-non-zero-on-fix] - id: ruff-format - # - repo: https://github.com/codespell-project/codespell - # rev: v2.2.1 - # hooks: - # - id: codespell + - repo: https://github.com/codespell-project/codespell + rev: v2.3.0 + hooks: + - id: codespell - repo: https://github.com/PyCQA/flake8 rev: 7.1.1 hooks: diff --git a/pyproject.toml b/pyproject.toml index 4a9ca683..42b9ee0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -193,6 +193,4 @@ requires = ['setuptools>=65.4.1', 'wheel', 'Cython>=3.0.8', "poetry-core>=1.5.2" build-backend = "poetry.core.masonry.api" [tool.codespell] -skip = '*.po,*.ts,./tests,./bench' -count = '' -quiet-level = 3 +ignore-words-list = ["additionals", "HASS"] diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index dc9b1435..0afc5ebc 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -770,7 +770,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): @pytest.mark.asyncio async def test_asking_qm_questions(): - """Verify explictly asking QM questions.""" + """Verify explicitly asking QM questions.""" type_ = "_quservice._tcp.local." aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zeroconf_browser = aiozc.zeroconf diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 9d4a4958..9a5cbb7d 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -82,7 +82,7 @@ def test_service_info_rejects_non_matching_updates(self): service_server, addresses=[service_address], ) - # Verify backwards compatiblity with calling with None + # Verify backwards compatibility with calling with None info.async_update_records(zc, now, []) # Matching updates info.async_update_records( @@ -572,7 +572,7 @@ def get_service_info_helper(zc, type, name): helper_thread.start() wait_time = 1 - # Expext query for SRV, TXT, A, AAAA + # Expect query for SRV, TXT, A, AAAA send_event.wait(wait_time) assert last_sent is not None assert len(last_sent.questions) == 4 @@ -582,7 +582,7 @@ def get_service_info_helper(zc, type, name): assert r.DNSQuestion(service_name, const._TYPE_AAAA, const._CLASS_IN) in last_sent.questions assert service_info is None - # Expext no further queries + # Expect no further queries last_sent = None send_event.clear() _inject_response( @@ -1006,7 +1006,7 @@ def test_serviceinfo_accepts_bytes_or_string_dict(): def test_asking_qu_questions(): - """Verify explictly asking QU questions.""" + """Verify explicitly asking QU questions.""" type_ = "_quservice._tcp.local." zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) @@ -1030,7 +1030,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): def test_asking_qm_questions(): - """Verify explictly asking QM questions.""" + """Verify explicitly asking QM questions.""" type_ = "_quservice._tcp.local." zeroconf = r.Zeroconf(interfaces=["127.0.0.1"]) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index a765a50a..54a8b400 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1331,7 +1331,7 @@ async def test_legacy_unicast_response(run_isolated): protocol.datagram_received(query.packets()[0], ("127.0.0.1", 6503)) calls = send_mock.mock_calls - # Verify the response is sent back on the socket it was recieved from + # Verify the response is sent back on the socket it was received from assert calls == [call(ANY, "127.0.0.1", 6503, (), protocol.transport)] outgoing = send_mock.call_args[0][0] assert isinstance(outgoing, DNSOutgoing) diff --git a/tests/test_core.py b/tests/test_core.py index fc2685fa..5159d2d0 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -348,7 +348,7 @@ def test_goodbye_all_services(): second_packet = out.packets() assert second_packet == first_packet - # Verify the registery is empty + # Verify the registry is empty out3 = zc.generate_unregister_all_services() assert out3 is None assert zc.registry.async_get_service_infos() == [] @@ -676,7 +676,7 @@ async def test_open_close_twice_from_async() -> None: """Test we can close twice from a coroutine when using Zeroconf. Ideally callers switch to using AsyncZeroconf, however there will - be a peroid where they still call the sync wrapper that we want + be a period where they still call the sync wrapper that we want to ensure will not deadlock on shutdown. This test is expected to throw warnings about tasks being destroyed diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 50816d2b..b98ef407 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -110,7 +110,7 @@ def _process_outgoing_packet(out): assert question_answers _process_outgoing_packet(construct_outgoing_multicast_answers(question_answers.mcast_aggregate)) - # The additonals should all be suppresed since they are all in the answers section + # The additonals should all be suppressed since they are all in the answers section # There will be one NSEC additional to indicate the lack of AAAA record # assert nbr_answers == 4 and nbr_additionals == 1 and nbr_authorities == 0 @@ -685,7 +685,7 @@ def _validate_complete_response(answers): assert not question_answers.mcast_aggregate _validate_complete_response(question_answers.mcast_now) - # With QU set and an authorative answer (probe) should respond to both unitcast + # With QU set and an authoritative answer (probe) should respond to both unitcast # and multicast since the response hasn't been seen since 75% of the ttl query = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(info.type, const._TYPE_PTR, const._CLASS_IN) @@ -744,7 +744,7 @@ def test_known_answer_supression(): now = current_time_millis() _clear_cache(zc) - # Test PTR supression + # Test PTR suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(type_, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) @@ -768,7 +768,7 @@ def test_known_answer_supression(): assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second - # Test A supression + # Test A suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(server_name, const._TYPE_A, const._CLASS_IN) generated.add_question(question) @@ -809,7 +809,7 @@ def test_known_answer_supression(): assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second - # Test SRV supression + # Test SRV suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(registration_name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) @@ -833,7 +833,7 @@ def test_known_answer_supression(): assert not question_answers.mcast_aggregate assert not question_answers.mcast_aggregate_last_second - # Test TXT supression + # Test TXT suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(registration_name, const._TYPE_TXT, const._CLASS_IN) generated.add_question(question) @@ -914,7 +914,7 @@ def test_multi_packet_known_answer_supression(): now = current_time_millis() _clear_cache(zc) - # Test PTR supression + # Test PTR suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(type_, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) @@ -976,7 +976,7 @@ def test_known_answer_supression_service_type_enumeration_query(): now = current_time_millis() _clear_cache(zc) - # Test PTR supression + # Test PTR suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME, const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) @@ -1062,7 +1062,7 @@ def test_upper_case_enumeration_query(): zc.registry.async_add(info2) _clear_cache(zc) - # Test PTR supression + # Test PTR suppression generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) question = r.DNSQuestion(const._SERVICE_TYPE_ENUMERATION_NAME.upper(), const._TYPE_PTR, const._CLASS_IN) generated.add_question(question) @@ -1579,7 +1579,7 @@ async def test_duplicate_goodbye_answers_in_packet(): @pytest.mark.asyncio async def test_response_aggregation_timings(run_isolated): - """Verify multicast respones are aggregated.""" + """Verify multicast responses are aggregated.""" type_ = "_mservice._tcp.local." type_2 = "_mservice2._tcp.local." type_3 = "_mservice3._tcp.local." @@ -1949,7 +1949,7 @@ async def test_future_answers_are_removed_on_send(): # The answer should get removed because we just sent it assert info.dns_pointer() not in outgoing_queue.queue[0].answers - # But the one we have not sent yet shoudl still go out later + # But the one we have not sent yet should still go out later assert info2.dns_pointer() in outgoing_queue.queue[0].answers diff --git a/tests/test_protocol.py b/tests/test_protocol.py index ee9ed930..8f124c17 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -261,7 +261,7 @@ def test_dns_hinfo(self): self.assertRaises(r.NamePartTooLongException, generated.packets) def test_many_questions(self): - """Test many questions get seperated into multiple packets.""" + """Test many questions get separated into multiple packets.""" generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for i in range(100): @@ -281,7 +281,7 @@ def test_many_questions(self): assert len(parsed2.questions) == 15 def test_many_questions_with_many_known_answers(self): - """Test many questions and known answers get seperated into multiple packets.""" + """Test many questions and known answers get separated into multiple packets.""" generated = r.DNSOutgoing(const._FLAGS_QR_QUERY) questions = [] for _ in range(30): @@ -319,7 +319,7 @@ def test_many_questions_with_many_known_answers(self): assert not parsed3.truncated def test_massive_probe_packet_split(self): - """Test probe with many authorative answers.""" + """Test probe with many authoritative answers.""" generated = r.DNSOutgoing(const._FLAGS_QR_QUERY | const._FLAGS_AA) questions = [] for _ in range(30): From 0df2fc3b13fcd016d8608afbb968f442ac4bfb12 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:40:39 -0600 Subject: [PATCH 268/434] chore(pre-commit.ci): pre-commit autoupdate (#1427) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 61669a36..782d07ba 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v3.29.1 + rev: v3.30.1 hooks: - id: commitizen stages: [commit-msg] @@ -34,12 +34,12 @@ repos: - id: prettier args: ["--tab-width", "2"] - repo: https://github.com/asottile/pyupgrade - rev: v3.18.0 + rev: v3.19.0 hooks: - id: pyupgrade args: [--py37-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.0 + rev: v0.7.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -53,7 +53,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.12.1 + rev: v1.13.0 hooks: - id: mypy additional_dependencies: [] From 287b03ff8259f6e031ceb0c975c24427c706c707 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 15 Nov 2024 16:41:19 +0100 Subject: [PATCH 269/434] chore(dependabot): automatic Github Actions updates (#1435) --- .github/dependabot.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 9d866e39..ba2becff 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,6 +5,16 @@ version: 2 updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + commit-message: + prefix: "chore(ci): " + groups: + github-actions: + patterns: + - "*" - package-ecosystem: "pip" # See documentation for possible values directory: "/" # Location of package manifests schedule: From aff9d1f39a4218a6f420b1504aad70ab78bc3254 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 15 Nov 2024 16:42:27 +0100 Subject: [PATCH 270/434] chore(tests): remove outdated python 3.7 compatibility code (#1431) --- tests/test_core.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/tests/test_core.py b/tests/test_core.py index 5159d2d0..71245a5f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -13,12 +13,7 @@ import unittest import unittest.mock from typing import Tuple, Union, cast -from unittest.mock import Mock, patch - -if sys.version_info[:3][1] < 8: - AsyncMock = Mock -else: - from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -748,7 +743,6 @@ def _background_register(): @pytest.mark.asyncio -@unittest.skipIf(sys.version_info[:3][1] < 8, "Requires Python 3.8 or later to patch _async_setup") @patch("zeroconf._core._STARTUP_TIMEOUT", 0) @patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): From f59989270d96b2d84ceb0b2d85d961e613cb7d32 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 15 Nov 2024 16:42:48 +0100 Subject: [PATCH 271/434] chore(pre-commit): bump pyupgrade to required python 3.8 (#1429) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 782d07ba..61ab26f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ repos: rev: v3.19.0 hooks: - id: pyupgrade - args: [--py37-plus] + args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.7.3 hooks: From 16f9527551f244f7454bd547a475644fcc0e9a25 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 09:43:35 -0600 Subject: [PATCH 272/434] chore(deps): bump async-timeout from 4.0.3 to 5.0.1 (#1438) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index f32c8580..209d6187 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] From 23b1958715fcc913c9be92378cce6d1c9b86c38e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 21 Nov 2024 07:38:56 -0600 Subject: [PATCH 273/434] chore(pre-commit.ci): pre-commit autoupdate (#1442) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 61ab26f7..1e3e7556 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v3.30.1 + rev: v3.31.0 hooks: - id: commitizen stages: [commit-msg] @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.3 + rev: v0.7.4 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 483d0673d4ae3eec37840452723fc1839a6cc95c Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 21 Nov 2024 14:39:39 +0100 Subject: [PATCH 274/434] fix(docs): update python to 3.8 (#1430) --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 8929f417..7899fad9 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,7 +16,7 @@ PyPI (installable, stable distributions): https://pypi.org/project/zeroconf. You pip install zeroconf -python-zeroconf works with CPython 3.6+ and PyPy 3 implementing Python 3.6+. +python-zeroconf works with CPython 3.8+ and PyPy 3 implementing Python 3.8+. Contents -------- From f637c75f638ba20c193e58ff63c073a4003430b9 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 21 Nov 2024 14:39:57 +0100 Subject: [PATCH 275/434] fix(ci): run release workflow only on main repository (#1441) --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 309707e1..0b837157 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -97,6 +97,7 @@ jobs: - test - lint - commitlint + if: ${{ github.repository_owner }} == "python-zeroconf" runs-on: ubuntu-latest environment: release From d5e8550dfb3db4e52651b106d7ed0216baf0b253 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 21 Nov 2024 13:48:52 +0000 Subject: [PATCH 276/434] 0.136.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a15e049a..c6954e9f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.136.1 (2024-11-21) + +### Bug Fixes + +* fix(ci): run release workflow only on main repository (#1441) ([`f637c75`](https://github.com/python-zeroconf/python-zeroconf/commit/f637c75f638ba20c193e58ff63c073a4003430b9)) + +* fix(docs): update python to 3.8 (#1430) ([`483d067`](https://github.com/python-zeroconf/python-zeroconf/commit/483d0673d4ae3eec37840452723fc1839a6cc95c)) + + ## v0.136.0 (2024-10-26) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 42b9ee0e..32aade30 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.136.0" +version = "0.136.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ec3a682f..b5c4612b 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.136.0" +__version__ = "0.136.1" __license__ = "LGPL" From 2ea705d850c1cb096c87372d5ec855f684603d01 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 21 Nov 2024 07:55:35 -0600 Subject: [PATCH 277/434] fix: retrigger release from failed github workflow (#1443) From 1b0d2f5f8bb7d360a466db72dadc63817f7cef10 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 21 Nov 2024 14:05:42 +0000 Subject: [PATCH 278/434] 0.136.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6954e9f..7e80bfb7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.136.2 (2024-11-21) + +### Bug Fixes + +* fix: retrigger release from failed github workflow (#1443) ([`2ea705d`](https://github.com/python-zeroconf/python-zeroconf/commit/2ea705d850c1cb096c87372d5ec855f684603d01)) + + ## v0.136.1 (2024-11-21) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 32aade30..1603963f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.136.1" +version = "0.136.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b5c4612b..9df63ad1 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.136.1" +__version__ = "0.136.2" __license__ = "LGPL" From 84596e07b01873c359bae4c4bd298c9367d9d9c3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 26 Nov 2024 22:08:22 -0600 Subject: [PATCH 279/434] chore(pre-commit.ci): pre-commit autoupdate (#1444) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- src/zeroconf/__init__.py | 28 ++++++++-------- src/zeroconf/_dns.py | 8 ++--- src/zeroconf/_engine.py | 10 +++--- src/zeroconf/_handlers/answers.py | 4 +-- .../_handlers/multicast_outgoing_queue.py | 8 ++--- src/zeroconf/_handlers/query_handler.py | 22 ++++++------- src/zeroconf/_handlers/record_manager.py | 2 +- src/zeroconf/_listener.py | 14 ++++---- src/zeroconf/_protocol/incoming.py | 24 +++++++------- src/zeroconf/_protocol/outgoing.py | 16 +++++----- src/zeroconf/_services/browser.py | 30 ++++++++--------- src/zeroconf/_services/info.py | 32 +++++++++---------- src/zeroconf/_services/registry.py | 2 +- src/zeroconf/_transport.py | 4 +-- src/zeroconf/_utils/ipaddress.py | 4 +-- src/zeroconf/asyncio.py | 4 +-- 17 files changed, 107 insertions(+), 107 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1e3e7556..3dfc075e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.4 + rev: v0.8.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 9df63ad1..e93eb4d2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,27 +88,27 @@ __all__ = [ - "__version__", - "Zeroconf", - "ServiceInfo", - "ServiceBrowser", - "ServiceListener", + "AbstractMethodException", + "BadTypeInNameException", "DNSQuestionType", - "InterfaceChoice", - "ServiceStateChange", - "IPVersion", - "ZeroconfServiceTypes", - "RecordUpdate", - "RecordUpdateListener", - "current_time_millis", # Exceptions "Error", - "AbstractMethodException", - "BadTypeInNameException", "EventLoopBlocked", + "IPVersion", "IncomingDecodeError", + "InterfaceChoice", "NamePartTooLongException", "NonUniqueNameException", "NotRunningException", + "RecordUpdate", + "RecordUpdateListener", + "ServiceBrowser", + "ServiceInfo", + "ServiceListener", "ServiceNameAlreadyRegistered", + "ServiceStateChange", + "Zeroconf", + "ZeroconfServiceTypes", + "__version__", + "current_time_millis", ] diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 15daa709..fe48a2f4 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -64,7 +64,7 @@ class DNSQuestionType(enum.Enum): class DNSEntry: """A DNS entry""" - __slots__ = ("key", "name", "type", "class_", "unique") + __slots__ = ("class_", "key", "name", "type", "unique") def __init__(self, name: str, type_: int, class_: int) -> None: self.name = name @@ -157,7 +157,7 @@ def __repr__(self) -> str: class DNSRecord(DNSEntry): """A DNS record - like a DNS entry, but has a TTL""" - __slots__ = ("ttl", "created") + __slots__ = ("created", "ttl") # TODO: Switch to just int ttl def __init__( @@ -421,7 +421,7 @@ def __repr__(self) -> str: class DNSService(DNSRecord): """A DNS service record""" - __slots__ = ("_hash", "priority", "weight", "port", "server", "server_key") + __slots__ = ("_hash", "port", "priority", "server", "server_key", "weight") def __init__( self, @@ -542,7 +542,7 @@ def __repr__(self) -> str: class DNSRRSet: """A set of dns records with a lookup to get the ttl.""" - __slots__ = ("_records", "_lookup") + __slots__ = ("_lookup", "_records") def __init__(self, records: List[DNSRecord]) -> None: """Create an RRset from records sets.""" diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index e807d9ef..05f8c948 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -45,15 +45,15 @@ class AsyncEngine: """An engine wraps sockets in the event loop.""" __slots__ = ( + "_cleanup_timer", + "_listen_socket", + "_respond_sockets", "loop", - "zc", "protocols", "readers", - "senders", "running_event", - "_listen_socket", - "_respond_sockets", - "_cleanup_timer", + "senders", + "zc", ) def __init__( diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index bab2d749..7ddde197 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -44,7 +44,7 @@ class QuestionAnswers: """A group of answers to a question.""" - __slots__ = ("ucast", "mcast_now", "mcast_aggregate", "mcast_aggregate_last_second") + __slots__ = ("mcast_aggregate", "mcast_aggregate_last_second", "mcast_now", "ucast") def __init__( self, @@ -71,7 +71,7 @@ def __repr__(self) -> str: class AnswerGroup: """A group of answers scheduled to be sent at the same time.""" - __slots__ = ("send_after", "send_before", "answers") + __slots__ = ("answers", "send_after", "send_before") def __init__( self, diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index afcefc01..caf6470b 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -45,12 +45,12 @@ class MulticastOutgoingQueue: """An outgoing queue used to aggregate multicast responses.""" __slots__ = ( - "zc", - "queue", - "_multicast_delay_random_min", - "_multicast_delay_random_max", "_additional_delay", "_aggregation_delay", + "_multicast_delay_random_max", + "_multicast_delay_random_min", + "queue", + "zc", ) def __init__(self, zeroconf: "Zeroconf", additional_delay: _int, max_aggregation_delay: _int) -> None: diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index 3acb1b44..ccfc7a77 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -71,7 +71,7 @@ class _AnswerStrategy: - __slots__ = ("question", "strategy_type", "types", "services") + __slots__ = ("question", "services", "strategy_type", "types") def __init__( self, @@ -91,15 +91,15 @@ class _QueryResponse: """A pair for unicast and multicast DNSOutgoing responses.""" __slots__ = ( - "_is_probe", - "_questions", - "_now", - "_cache", "_additionals", - "_ucast", - "_mcast_now", + "_cache", + "_is_probe", "_mcast_aggregate", "_mcast_aggregate_last_second", + "_mcast_now", + "_now", + "_questions", + "_ucast", ) def __init__(self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float) -> None: @@ -191,12 +191,12 @@ class QueryHandler: """Query the ServiceRegistry.""" __slots__ = ( - "zc", - "registry", "cache", - "question_history", - "out_queue", "out_delay_queue", + "out_queue", + "question_history", + "registry", + "zc", ) def __init__(self, zc: "Zeroconf") -> None: diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 53ab3ed1..0bb04996 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -40,7 +40,7 @@ class RecordManager: """Process records into the cache and notify listeners.""" - __slots__ = ("zc", "cache", "listeners") + __slots__ = ("cache", "listeners", "zc") def __init__(self, zeroconf: "Zeroconf") -> None: """Init the record manager.""" diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 4490965f..1980a820 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -55,17 +55,17 @@ class AsyncListener: the read() method called when a socket is available for reading.""" __slots__ = ( - "zc", - "_registry", - "_record_manager", + "_deferred", "_query_handler", + "_record_manager", + "_registry", + "_timers", "data", - "last_time", "last_message", - "transport", + "last_time", "sock_description", - "_deferred", - "_timers", + "transport", + "zc", ) def __init__(self, zc: "Zeroconf") -> None: diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index f7b1d773..d678c977 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -70,25 +70,25 @@ class DNSIncoming: """Object representation of an incoming DNS packet""" __slots__ = ( - "_did_read_others", - "flags", - "offset", - "data", - "view", + "_answers", "_data_len", + "_did_read_others", + "_has_qu_question", "_name_cache", - "_questions", - "_answers", - "id", - "_num_questions", + "_num_additionals", "_num_answers", "_num_authorities", - "_num_additionals", - "valid", + "_num_questions", + "_questions", + "data", + "flags", + "id", "now", + "offset", "scope_id", "source", - "_has_qu_question", + "valid", + "view", ) def __init__( diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 9e9a5c87..b2eb9230 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -77,20 +77,20 @@ class DNSOutgoing: """Object representation of an outgoing packet""" __slots__ = ( - "flags", + "additionals", + "allow_long", + "answers", + "authorities", + "data", "finished", + "flags", "id", "multicast", - "packets_data", "names", - "data", + "packets_data", + "questions", "size", - "allow_long", "state", - "questions", - "answers", - "authorities", - "additionals", ) def __init__(self, flags: int, multicast: bool = True, id_: int = 0) -> None: diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 30361528..42aaa1ac 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -107,10 +107,10 @@ class _ScheduledPTRQuery: __slots__ = ( "alias", - "name", - "ttl", "cancelled", "expire_time_millis", + "name", + "ttl", "when_millis", ) @@ -189,7 +189,7 @@ def __gt__(self, other: "_ScheduledPTRQuery") -> bool: class _DNSPointerOutgoingBucket: """A DNSOutgoing bucket.""" - __slots__ = ("now_millis", "out", "bytes") + __slots__ = ("bytes", "now_millis", "out") def __init__(self, now_millis: float, multicast: bool) -> None: """Create a bucket to wrap a DNSOutgoing.""" @@ -328,20 +328,20 @@ class QueryScheduler: """ __slots__ = ( - "_zc", - "_types", "_addr", - "_port", - "_multicast", + "_clock_resolution_millis", "_first_random_delay_interval", - "_min_time_between_queries_millis", "_loop", - "_startup_queries_sent", + "_min_time_between_queries_millis", + "_multicast", + "_next_run", "_next_scheduled_for_alias", + "_port", "_query_heap", - "_next_run", - "_clock_resolution_millis", "_question_type", + "_startup_queries_sent", + "_types", + "_zc", ) def __init__( @@ -556,15 +556,15 @@ class _ServiceBrowserBase(RecordUpdateListener): """Base class for ServiceBrowser.""" __slots__ = ( - "types", - "zc", "_cache", "_loop", "_pending_handlers", + "_query_sender_task", "_service_state_changed", - "query_scheduler", "done", - "_query_sender_task", + "query_scheduler", + "types", + "zc", ) def __init__( diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 8a85ad10..f47addf6 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -138,28 +138,28 @@ class ServiceInfo(RecordUpdateListener): """ __slots__ = ( - "text", - "type", - "_name", - "key", + "_decoded_properties", + "_dns_address_cache", + "_dns_pointer_cache", + "_dns_service_cache", + "_dns_text_cache", + "_get_address_and_nsec_records_cache", "_ipv4_addresses", "_ipv6_addresses", + "_name", + "_new_records_futures", + "_properties", + "host_ttl", + "interface_index", + "key", + "other_ttl", "port", - "weight", "priority", "server", "server_key", - "_properties", - "_decoded_properties", - "host_ttl", - "other_ttl", - "interface_index", - "_new_records_futures", - "_dns_pointer_cache", - "_dns_service_cache", - "_dns_text_cache", - "_dns_address_cache", - "_get_address_and_nsec_records_cache", + "text", + "type", + "weight", ) def __init__( diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 05ee14cb..4100c690 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -35,7 +35,7 @@ class ServiceRegistry: the event loop as it is not thread safe. """ - __slots__ = ("_services", "types", "servers", "has_entries") + __slots__ = ("_services", "has_entries", "servers", "types") def __init__( self, diff --git a/src/zeroconf/_transport.py b/src/zeroconf/_transport.py index f28c0029..b0811094 100644 --- a/src/zeroconf/_transport.py +++ b/src/zeroconf/_transport.py @@ -29,11 +29,11 @@ class _WrappedTransport: """A wrapper for transports.""" __slots__ = ( - "transport", + "fileno", "is_ipv6", "sock", - "fileno", "sock_name", + "transport", ) def __init__( diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index 72bb9ce8..8dc1f797 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -39,7 +39,7 @@ class ZeroconfIPv4Address(IPv4Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified", "_is_loopback", "__hash__", "zc_integer") + __slots__ = ("__hash__", "_is_link_local", "_is_loopback", "_is_unspecified", "_str", "zc_integer") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv4 address.""" @@ -72,7 +72,7 @@ def is_loopback(self) -> bool: class ZeroconfIPv6Address(IPv6Address): - __slots__ = ("_str", "_is_link_local", "_is_unspecified", "_is_loopback", "__hash__", "zc_integer") + __slots__ = ("__hash__", "_is_link_local", "_is_loopback", "_is_unspecified", "_str", "zc_integer") def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a new IPv6 address.""" diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 134ea3e0..926ef509 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -35,9 +35,9 @@ from .const import _BROWSER_TIME, _MDNS_PORT, _SERVICE_TYPE_ENUMERATION_NAME __all__ = [ - "AsyncZeroconf", - "AsyncServiceInfo", "AsyncServiceBrowser", + "AsyncServiceInfo", + "AsyncZeroconf", "AsyncZeroconfServiceTypes", ] From 88dcd31b31a66f41c093c265e2655d06a55bae4f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:23:13 -1000 Subject: [PATCH 280/434] chore(pre-commit.ci): pre-commit autoupdate (#1447) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3dfc075e..2ddedf0a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v3.31.0 + rev: v4.1.0 hooks: - id: commitizen stages: [commit-msg] @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.0 + rev: v0.8.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 581807b2b28099c5e32608a50b40f9fbced9c41a Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 19 Dec 2024 00:24:38 +0100 Subject: [PATCH 281/434] chore(ci): increase tested pypy to 3.9 and 3.10 (#1450) chore(ci): increase tested pypy to 3.9 and 3.10 --- .github/workflows/ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0b837157..f9f95d4a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -42,8 +42,8 @@ jobs: - "3.11" - "3.12" - "3.13" - - "pypy-3.8" - "pypy-3.9" + - "pypy-3.10" os: - ubuntu-latest - macos-latest @@ -56,14 +56,14 @@ jobs: extension: use_cython - os: windows-latest extension: use_cython - - os: windows-latest - python-version: "pypy-3.8" - os: windows-latest python-version: "pypy-3.9" - - os: macos-latest - python-version: "pypy-3.8" + - os: windows-latest + python-version: "pypy-3.10" - os: macos-latest python-version: "pypy-3.9" + - os: macos-latest + python-version: "pypy-3.10" runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v3 From e34feb6c19407c4374395d1b64213aa22f3b46af Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 19 Dec 2024 00:25:24 +0100 Subject: [PATCH 282/434] chore(pre-commit): remove duplicated hook (#1448) --- .pre-commit-config.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ddedf0a..0dcc6b6b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - - id: debug-statements - id: check-builtin-literals - id: check-case-conflict - id: check-docstring-first @@ -24,10 +23,10 @@ repos: - id: check-toml - id: check-xml - id: check-yaml + - id: debug-statements - id: detect-private-key - id: end-of-file-fixer - id: trailing-whitespace - - id: debug-statements - repo: https://github.com/pre-commit/mirrors-prettier rev: v4.0.0-alpha.8 hooks: From 1ec95c72146078dbf7915a97b22bd699361c8d28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:25:48 -1000 Subject: [PATCH 283/434] chore(deps-dev): bump pytest from 8.3.3 to 8.3.4 (#1446) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 209d6187..dfd5ba1e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -248,13 +248,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] From 5d285107fbf5ca5a2332c08e7b508f11330b9e60 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 09:55:10 -1000 Subject: [PATCH 284/434] chore(pre-commit.ci): pre-commit autoupdate (#1454) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0dcc6b6b..7f19ac50 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,12 +33,12 @@ repos: - id: prettier args: ["--tab-width", "2"] - repo: https://github.com/asottile/pyupgrade - rev: v3.19.0 + rev: v3.19.1 hooks: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.3 + rev: v0.8.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -52,7 +52,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.13.0 + rev: v1.14.1 hooks: - id: mypy additional_dependencies: [] From 7b4a29b660afdde54538831bf93038fbd87459fa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 6 Jan 2025 10:08:44 -1000 Subject: [PATCH 285/434] chore: drop Python 3.8 support (#1455) --- .github/workflows/ci.yml | 3 +- README.rst | 4 +- poetry.lock | 210 +++++++++++++++++++++------------------ pyproject.toml | 3 +- 4 files changed, 119 insertions(+), 101 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f9f95d4a..aa32bba0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,7 +36,6 @@ jobs: fail-fast: false matrix: python-version: - - "3.8" - "3.9" - "3.10" - "3.11" @@ -185,7 +184,7 @@ jobs: uses: pypa/cibuildwheel@v2.21.3 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* *p38-*_aarch64 cp38-*_arm64 *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 diff --git a/README.rst b/README.rst index eba4d7fe..f16b7c2f 100644 --- a/README.rst +++ b/README.rst @@ -45,8 +45,8 @@ Compared to some other Zeroconf/Bonjour/Avahi Python packages, python-zeroconf: Python compatibility -------------------- -* CPython 3.8+ -* PyPy3.8 7.3+ +* CPython 3.9+ +* PyPy 3.9+ Versioning ---------- diff --git a/poetry.lock b/poetry.lock index dfd5ba1e..9b8dd711 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,83 +24,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.10" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.dependencies] @@ -186,13 +176,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -222,13 +212,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -320,36 +310,66 @@ pytest = ">=7.0.0" [[package]] name = "setuptools" -version = "75.3.0" +version = "75.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.7.0-py3-none-any.whl", hash = "sha256:84fb203f278ebcf5cd08f97d3fb96d3fbed4b629d500b29ad60d11e00769b183"}, + {file = "setuptools-75.7.0.tar.gz", hash = "sha256:886ff7b16cd342f1d1defc16fc98c9ce3fde69e087a4e1983d7ab634e5f41f4f"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "778ccbd9b059daea1ccbc3a93e0186fa30737e8c5234cdc04edf505a1f71606a" +python-versions = "^3.9" +content-hash = "6882a0df6f35a4a996584f0a1842c26ae6727c6d0a71eff64df0e23387d58e5d" diff --git a/pyproject.toml b/pyproject.toml index 1603963f..4c9a3b10 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,6 @@ classifiers=[ 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Topic :: Software Development :: Libraries', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', @@ -70,7 +69,7 @@ match = "(?!master$)" prerelease = true [tool.poetry.dependencies] -python = "^3.8" +python = "^3.9" async-timeout = {version = ">=3.0.0", python = "<3.11"} ifaddr = ">=0.1.7" From 9f6af54e52a5a5689f8ba615e7a2c6593dbcf461 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 6 Jan 2025 10:28:38 -1000 Subject: [PATCH 286/434] chore: enable codspeed benchmarks (#1456) --- .github/workflows/ci.yml | 19 ++ poetry.lock | 244 +++++++++++++++++++++++- pyproject.toml | 1 + tests/benchmarks/__init__.py | 0 tests/benchmarks/test_incoming.py | 185 ++++++++++++++++++ tests/benchmarks/test_outgoing.py | 168 ++++++++++++++++ tests/benchmarks/test_txt_properties.py | 19 ++ 7 files changed, 635 insertions(+), 1 deletion(-) create mode 100644 tests/benchmarks/__init__.py create mode 100644 tests/benchmarks/test_incoming.py create mode 100644 tests/benchmarks/test_outgoing.py create mode 100644 tests/benchmarks/test_txt_properties.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa32bba0..60d99f97 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -91,6 +91,25 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} + benchmark: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Python 3.12 + uses: actions/setup-python@v4 + with: + python-version: 3.12 + - uses: snok/install-poetry@v1.3.4 + - name: Install Dependencies + run: | + REQUIRE_CYTHON=1 poetry install --only=main,dev + shell: bash + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: poetry run pytest --no-cov -vvvvv --codspeed tests/benchmarks + release: needs: - test diff --git a/poetry.lock b/poetry.lock index 9b8dd711..e4a5fae5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,6 +11,85 @@ files = [ {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "colorama" version = "0.4.6" @@ -199,6 +278,29 @@ files = [ {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] +[[package]] +name = "importlib-metadata" +version = "8.5.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -210,6 +312,41 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "packaging" version = "24.2" @@ -236,6 +373,31 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" version = "8.3.4" @@ -276,6 +438,37 @@ pytest = ">=8.2,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-codspeed" +version = "3.1.0" +description = "Pytest plugin to create CodSpeed benchmarks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, + {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, +] + +[package.dependencies] +cffi = ">=1.17.1" +importlib-metadata = {version = ">=8.5.0", markers = "python_version < \"3.10\""} +pytest = ">=3.8" +rich = ">=13.8.1" + +[package.extras] +compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"] +lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.6.5,<0.7.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + [[package]] name = "pytest-cov" version = "5.0.0" @@ -308,6 +501,25 @@ files = [ [package.dependencies] pytest = ">=7.0.0" +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "setuptools" version = "75.7.0" @@ -369,7 +581,37 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "6882a0df6f35a4a996584f0a1842c26ae6727c6d0a71eff64df0e23387d58e5d" +content-hash = "b2255f56e331fb25e626030bf4ad11e7424d28cb1b7dd0310b9c704ee39bb0e1" diff --git a/pyproject.toml b/pyproject.toml index 4c9a3b10..ce61d0a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,6 +80,7 @@ pytest-asyncio = ">=0.20.3,<0.25.0" cython = "^3.0.5" setuptools = ">=65.6.3,<76.0.0" pytest-timeout = "^2.1.0" +pytest-codspeed = "^3.1.0" [tool.ruff] target-version = "py38" diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/benchmarks/test_incoming.py b/tests/benchmarks/test_incoming.py new file mode 100644 index 00000000..6285c19f --- /dev/null +++ b/tests/benchmarks/test_incoming.py @@ -0,0 +1,185 @@ +"""Benchmark for DNSIncoming.""" + +import socket +from typing import List + +from pytest_codspeed import BenchmarkFixture + +from zeroconf import ( + DNSAddress, + DNSIncoming, + DNSNsec, + DNSOutgoing, + DNSService, + DNSText, + const, +) + + +def generate_packets() -> List[bytes]: + out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) + address = socket.inet_pton(socket.AF_INET, "192.168.208.5") + + additionals = [ + { + "name": "HASS Bridge ZJWH FF5137._hap._tcp.local.", + "address": address, + "port": 51832, + "text": b"\x13md=HASS Bridge" + b" ZJWH\x06pv=1.0\x14id=01:6B:30:FF:51:37\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=L0m/aQ==", + }, + { + "name": "HASS Bridge 3K9A C2582A._hap._tcp.local.", + "address": address, + "port": 51834, + "text": b"\x13md=HASS Bridge" + b" 3K9A\x06pv=1.0\x14id=E2:AA:5B:C2:58:2A\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b2CnzQ==", + }, + { + "name": "Master Bed TV CEDB27._hap._tcp.local.", + "address": address, + "port": 51830, + "text": b"\x10md=Master Bed" + b" TV\x06pv=1.0\x14id=9E:B7:44:CE:DB:27\x05c#=18\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=CVj1kw==", + }, + { + "name": "Living Room TV 921B77._hap._tcp.local.", + "address": address, + "port": 51833, + "text": b"\x11md=Living Room" + b" TV\x06pv=1.0\x14id=11:61:E7:92:1B:77\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=qU77SQ==", + }, + { + "name": "HASS Bridge ZC8X FF413D._hap._tcp.local.", + "address": address, + "port": 51829, + "text": b"\x13md=HASS Bridge" + b" ZC8X\x06pv=1.0\x14id=96:14:45:FF:41:3D\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b0QZlg==", + }, + { + "name": "HASS Bridge WLTF 4BE61F._hap._tcp.local.", + "address": address, + "port": 51837, + "text": b"\x13md=HASS Bridge" + b" WLTF\x06pv=1.0\x14id=E0:E7:98:4B:E6:1F\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=ahAISA==", + }, + { + "name": "FrontdoorCamera 8941D1._hap._tcp.local.", + "address": address, + "port": 54898, + "text": b"\x12md=FrontdoorCamera\x06pv=1.0\x14id=9F:B7:DC:89:41:D1\x04c#=2\x04" + b"s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=0+MXmA==", + }, + { + "name": "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + "address": address, + "port": 51836, + "text": b"\x13md=HASS Bridge" + b" W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=6fLM5A==", + }, + { + "name": "HASS Bridge Y9OO EFF0A7._hap._tcp.local.", + "address": address, + "port": 51838, + "text": b"\x13md=HASS Bridge" + b" Y9OO\x06pv=1.0\x14id=D3:FE:98:EF:F0:A7\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=u3bdfw==", + }, + { + "name": "Snooze Room TV 6B89B0._hap._tcp.local.", + "address": address, + "port": 51835, + "text": b"\x11md=Snooze Room" + b" TV\x06pv=1.0\x14id=5F:D5:70:6B:89:B0\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=xNTqsg==", + }, + { + "name": "AlexanderHomeAssistant 74651D._hap._tcp.local.", + "address": address, + "port": 54811, + "text": b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" + b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==", + }, + { + "name": "HASS Bridge OS95 39C053._hap._tcp.local.", + "address": address, + "port": 51831, + "text": b"\x13md=HASS Bridge" + b" OS95\x06pv=1.0\x14id=7E:8C:E6:39:C0:53\x05c#=12\x04s#=1\x04ff=0\x04ci=2" + b"\x04sf=0\x0bsh=Xfe5LQ==", + }, + ] + + out.add_answer_at_time( + DNSText( + "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", + ), + 0, + ) + + for record in additionals: + out.add_additional_answer( + DNSService( + record["name"], # type: ignore + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + 0, + 0, + record["port"], # type: ignore + record["name"], # type: ignore + ) + ) + out.add_additional_answer( + DNSText( + record["name"], # type: ignore + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + record["text"], # type: ignore + ) + ) + out.add_additional_answer( + DNSAddress( + record["name"], # type: ignore + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + record["address"], # type: ignore + ) + ) + out.add_additional_answer( + DNSNsec( + record["name"], # type: ignore + const._TYPE_NSEC, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + record["name"], # type: ignore + [const._TYPE_TXT, const._TYPE_SRV], + ) + ) + + return out.packets() + + +packets = generate_packets() + + +def test_parse_incoming_message(benchmark: BenchmarkFixture) -> None: + @benchmark + def parse_incoming_message() -> None: + for packet in packets: + DNSIncoming(packet).answers # noqa: B018 + break diff --git a/tests/benchmarks/test_outgoing.py b/tests/benchmarks/test_outgoing.py new file mode 100644 index 00000000..5b7ee164 --- /dev/null +++ b/tests/benchmarks/test_outgoing.py @@ -0,0 +1,168 @@ +"""Benchmark for DNSOutgoing.""" + +import socket + +from pytest_codspeed import BenchmarkFixture + +from zeroconf import DNSAddress, DNSOutgoing, DNSService, DNSText, const +from zeroconf._protocol.outgoing import State + + +def generate_packets() -> DNSOutgoing: + out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) + address = socket.inet_pton(socket.AF_INET, "192.168.208.5") + + additionals = [ + { + "name": "HASS Bridge ZJWH FF5137._hap._tcp.local.", + "address": address, + "port": 51832, + "text": b"\x13md=HASS Bridge" + b" ZJWH\x06pv=1.0\x14id=01:6B:30:FF:51:37\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=L0m/aQ==", + }, + { + "name": "HASS Bridge 3K9A C2582A._hap._tcp.local.", + "address": address, + "port": 51834, + "text": b"\x13md=HASS Bridge" + b" 3K9A\x06pv=1.0\x14id=E2:AA:5B:C2:58:2A\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b2CnzQ==", + }, + { + "name": "Master Bed TV CEDB27._hap._tcp.local.", + "address": address, + "port": 51830, + "text": b"\x10md=Master Bed" + b" TV\x06pv=1.0\x14id=9E:B7:44:CE:DB:27\x05c#=18\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=CVj1kw==", + }, + { + "name": "Living Room TV 921B77._hap._tcp.local.", + "address": address, + "port": 51833, + "text": b"\x11md=Living Room" + b" TV\x06pv=1.0\x14id=11:61:E7:92:1B:77\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=qU77SQ==", + }, + { + "name": "HASS Bridge ZC8X FF413D._hap._tcp.local.", + "address": address, + "port": 51829, + "text": b"\x13md=HASS Bridge" + b" ZC8X\x06pv=1.0\x14id=96:14:45:FF:41:3D\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b0QZlg==", + }, + { + "name": "HASS Bridge WLTF 4BE61F._hap._tcp.local.", + "address": address, + "port": 51837, + "text": b"\x13md=HASS Bridge" + b" WLTF\x06pv=1.0\x14id=E0:E7:98:4B:E6:1F\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=ahAISA==", + }, + { + "name": "FrontdoorCamera 8941D1._hap._tcp.local.", + "address": address, + "port": 54898, + "text": b"\x12md=FrontdoorCamera\x06pv=1.0\x14id=9F:B7:DC:89:41:D1\x04c#=2\x04" + b"s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=0+MXmA==", + }, + { + "name": "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + "address": address, + "port": 51836, + "text": b"\x13md=HASS Bridge" + b" W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=6fLM5A==", + }, + { + "name": "HASS Bridge Y9OO EFF0A7._hap._tcp.local.", + "address": address, + "port": 51838, + "text": b"\x13md=HASS Bridge" + b" Y9OO\x06pv=1.0\x14id=D3:FE:98:EF:F0:A7\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=u3bdfw==", + }, + { + "name": "Snooze Room TV 6B89B0._hap._tcp.local.", + "address": address, + "port": 51835, + "text": b"\x11md=Snooze Room" + b" TV\x06pv=1.0\x14id=5F:D5:70:6B:89:B0\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=xNTqsg==", + }, + { + "name": "AlexanderHomeAssistant 74651D._hap._tcp.local.", + "address": address, + "port": 54811, + "text": b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" + b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==", + }, + { + "name": "HASS Bridge OS95 39C053._hap._tcp.local.", + "address": address, + "port": 51831, + "text": b"\x13md=HASS Bridge" + b" OS95\x06pv=1.0\x14id=7E:8C:E6:39:C0:53\x05c#=12\x04s#=1\x04ff=0\x04ci=2" + b"\x04sf=0\x0bsh=Xfe5LQ==", + }, + ] + + out.add_answer_at_time( + DNSText( + "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", + ), + 0, + ) + + for record in additionals: + out.add_additional_answer( + DNSService( + record["name"], # type: ignore + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + 0, + 0, + record["port"], # type: ignore + record["name"], # type: ignore + ) + ) + out.add_additional_answer( + DNSText( + record["name"], # type: ignore + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + record["text"], # type: ignore + ) + ) + out.add_additional_answer( + DNSAddress( + record["name"], # type: ignore + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + record["address"], # type: ignore + ) + ) + + return out + + +out = generate_packets() + + +def test_parse_outgoing_message(benchmark: BenchmarkFixture) -> None: + @benchmark + def make_outgoing_message() -> None: + out.packets() + out.state = State.init.value + out.finished = False + out._reset_for_next_packet() diff --git a/tests/benchmarks/test_txt_properties.py b/tests/benchmarks/test_txt_properties.py new file mode 100644 index 00000000..ad75ab35 --- /dev/null +++ b/tests/benchmarks/test_txt_properties.py @@ -0,0 +1,19 @@ +from pytest_codspeed import BenchmarkFixture + +from zeroconf import ServiceInfo + +info = ServiceInfo( + "_test._tcp.local.", + "test._test._tcp.local.", + properties=( + b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" + b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==" + ), +) + + +def test_txt_properties(benchmark: BenchmarkFixture) -> None: + @benchmark + def process_properties() -> None: + info._properties = None + info.properties # noqa: B018 From 783c1b37d1372c90dfce658c66d03aa753afbf49 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 6 Jan 2025 11:31:25 -1000 Subject: [PATCH 287/434] feat: speed up parsing incoming records (#1458) --- src/zeroconf/_dns.pxd | 20 ++++++- src/zeroconf/_dns.py | 92 +++++++++++++++++++++++++---- src/zeroconf/_protocol/incoming.pxd | 12 +++- src/zeroconf/_protocol/incoming.py | 31 +++++++--- 4 files changed, 129 insertions(+), 26 deletions(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index d4116a66..6e432a77 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -30,7 +30,7 @@ cdef class DNSEntry: cdef public cython.uint class_ cdef public bint unique - cdef _set_class(self, cython.uint class_) + cdef _fast_init_entry(self, str name, cython.uint type_, cython.uint class_) cdef bint _dns_entry_matches(self, DNSEntry other) @@ -38,6 +38,8 @@ cdef class DNSQuestion(DNSEntry): cdef public cython.int _hash + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_) + cpdef bint answered_by(self, DNSRecord rec) cdef class DNSRecord(DNSEntry): @@ -45,6 +47,8 @@ cdef class DNSRecord(DNSEntry): cdef public cython.float ttl cdef public double created + cdef _fast_init_record(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, double created) + cdef bint _suppressed_by_answer(self, DNSRecord answer) @cython.locals( @@ -69,9 +73,11 @@ cdef class DNSRecord(DNSEntry): cdef class DNSAddress(DNSRecord): cdef public cython.int _hash - cdef public object address + cdef public bytes address cdef public object scope_id + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, bytes address, object scope_id, double created) + cdef bint _eq(self, DNSAddress other) cpdef write(self, DNSOutgoing out) @@ -83,6 +89,8 @@ cdef class DNSHinfo(DNSRecord): cdef public str cpu cdef public str os + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str cpu, str os, double created) + cdef bint _eq(self, DNSHinfo other) cpdef write(self, DNSOutgoing out) @@ -93,6 +101,8 @@ cdef class DNSPointer(DNSRecord): cdef public str alias cdef public str alias_key + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str alias, double created) + cdef bint _eq(self, DNSPointer other) cpdef write(self, DNSOutgoing out) @@ -102,6 +112,8 @@ cdef class DNSText(DNSRecord): cdef public cython.int _hash cdef public bytes text + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, bytes text, double created) + cdef bint _eq(self, DNSText other) cpdef write(self, DNSOutgoing out) @@ -115,6 +127,8 @@ cdef class DNSService(DNSRecord): cdef public str server cdef public str server_key + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, cython.uint priority, cython.uint weight, cython.uint port, str server, double created) + cdef bint _eq(self, DNSService other) cpdef write(self, DNSOutgoing out) @@ -125,6 +139,8 @@ cdef class DNSNsec(DNSRecord): cdef public object next_name cdef public cython.list rdtypes + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str next_name, cython.list rdtypes, double created) + cdef bint _eq(self, DNSNsec other) cpdef write(self, DNSOutgoing out) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index fe48a2f4..471376e9 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -67,12 +67,13 @@ class DNSEntry: __slots__ = ("class_", "key", "name", "type", "unique") def __init__(self, name: str, type_: int, class_: int) -> None: + self._fast_init_entry(name, type_, class_) + + def _fast_init_entry(self, name: str, type_: _int, class_: _int) -> None: + """Fast init for reuse.""" self.name = name self.key = name.lower() self.type = type_ - self._set_class(class_) - - def _set_class(self, class_: _int) -> None: self.class_ = class_ & _CLASS_MASK self.unique = (class_ & _CLASS_UNIQUE) != 0 @@ -111,7 +112,11 @@ class DNSQuestion(DNSEntry): __slots__ = ("_hash",) def __init__(self, name: str, type_: int, class_: int) -> None: - super().__init__(name, type_, class_) + self._fast_init(name, type_, class_) + + def _fast_init(self, name: str, type_: _int, class_: _int) -> None: + """Fast init for reuse.""" + self._fast_init_entry(name, type_, class_) self._hash = hash((self.key, type_, self.class_)) def answered_by(self, rec: "DNSRecord") -> bool: @@ -168,9 +173,13 @@ def __init__( ttl: Union[float, int], created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_) + self._fast_init_record(name, type_, class_, ttl, created or current_time_millis()) + + def _fast_init_record(self, name: str, type_: _int, class_: _int, ttl: _float, created: _float) -> None: + """Fast init for reuse.""" + self._fast_init_entry(name, type_, class_) self.ttl = ttl - self.created = created or current_time_millis() + self.created = created def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use """Abstract method""" @@ -248,7 +257,20 @@ def __init__( scope_id: Optional[int] = None, created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init(name, type_, class_, ttl, address, scope_id, created or current_time_millis()) + + def _fast_init( + self, + name: str, + type_: _int, + class_: _int, + ttl: _float, + address: bytes, + scope_id: Optional[_int], + created: _float, + ) -> None: + """Fast init for reuse.""" + self._fast_init_record(name, type_, class_, ttl, created) self.address = address self.scope_id = scope_id self._hash = hash((self.key, type_, self.class_, address, scope_id)) @@ -300,7 +322,13 @@ def __init__( os: str, created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init(name, type_, class_, ttl, cpu, os, created or current_time_millis()) + + def _fast_init( + self, name: str, type_: _int, class_: _int, ttl: _float, cpu: str, os: str, created: _float + ) -> None: + """Fast init for reuse.""" + self._fast_init_record(name, type_, class_, ttl, created) self.cpu = cpu self.os = os self._hash = hash((self.key, type_, self.class_, cpu, os)) @@ -341,7 +369,12 @@ def __init__( alias: str, created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init(name, type_, class_, ttl, alias, created or current_time_millis()) + + def _fast_init( + self, name: str, type_: _int, class_: _int, ttl: _float, alias: str, created: _float + ) -> None: + self._fast_init_record(name, type_, class_, ttl, created) self.alias = alias self.alias_key = alias.lower() self._hash = hash((self.key, type_, self.class_, self.alias_key)) @@ -391,7 +424,12 @@ def __init__( text: bytes, created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init(name, type_, class_, ttl, text, created or current_time_millis()) + + def _fast_init( + self, name: str, type_: _int, class_: _int, ttl: _float, text: bytes, created: _float + ) -> None: + self._fast_init_record(name, type_, class_, ttl, created) self.text = text self._hash = hash((self.key, type_, self.class_, text)) @@ -435,7 +473,23 @@ def __init__( server: str, created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init( + name, type_, class_, ttl, priority, weight, port, server, created or current_time_millis() + ) + + def _fast_init( + self, + name: str, + type_: _int, + class_: _int, + ttl: _float, + priority: _int, + weight: _int, + port: _int, + server: str, + created: _float, + ) -> None: + self._fast_init_record(name, type_, class_, ttl, created) self.priority = priority self.weight = weight self.port = port @@ -483,12 +537,24 @@ def __init__( name: str, type_: int, class_: int, - ttl: int, + ttl: Union[int, float], next_name: str, rdtypes: List[int], created: Optional[float] = None, ) -> None: - super().__init__(name, type_, class_, ttl, created) + self._fast_init(name, type_, class_, ttl, next_name, rdtypes, created or current_time_millis()) + + def _fast_init( + self, + name: str, + type_: _int, + class_: _int, + ttl: _float, + next_name: str, + rdtypes: List[_int], + created: _float, + ) -> None: + self._fast_init_record(name, type_, class_, ttl, created) self.next_name = next_name self.rdtypes = sorted(rdtypes) self._hash = hash((self.key, type_, self.class_, next_name, *self.rdtypes)) diff --git a/src/zeroconf/_protocol/incoming.pxd b/src/zeroconf/_protocol/incoming.pxd index bb438303..feaa2a02 100644 --- a/src/zeroconf/_protocol/incoming.pxd +++ b/src/zeroconf/_protocol/incoming.pxd @@ -97,7 +97,7 @@ cdef class DNSIncoming: ) cdef void _read_others(self) - @cython.locals(offset="unsigned int") + @cython.locals(offset="unsigned int", question=DNSQuestion) cdef _read_questions(self) @cython.locals( @@ -109,9 +109,15 @@ cdef class DNSIncoming: @cython.locals( name_start="unsigned int", - offset="unsigned int" + offset="unsigned int", + address_rec=DNSAddress, + pointer_rec=DNSPointer, + text_rec=DNSText, + srv_rec=DNSService, + hinfo_rec=DNSHinfo, + nsec_rec=DNSNsec, ) - cdef _read_record(self, object domain, unsigned int type_, unsigned int class_, unsigned int ttl, unsigned int length) + cdef _read_record(self, str domain, unsigned int type_, unsigned int class_, unsigned int ttl, unsigned int length) @cython.locals( offset="unsigned int", diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index d678c977..5347f50d 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -246,7 +246,8 @@ def _read_questions(self) -> None: # The question has 2 unsigned shorts in network order type_ = view[offset] << 8 | view[offset + 1] class_ = view[offset + 2] << 8 | view[offset + 3] - question = DNSQuestion(name, type_, class_) + question = DNSQuestion.__new__(DNSQuestion) + question._fast_init(name, type_, class_) if question.unique: # QU questions use the same bit as unique self._has_qu_question = True questions.append(question) @@ -306,11 +307,17 @@ def _read_record( ) -> Optional[DNSRecord]: """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: - return DNSAddress(domain, type_, class_, ttl, self._read_string(4), None, self.now) + address_rec = DNSAddress.__new__(DNSAddress) + address_rec._fast_init(domain, type_, class_, ttl, self._read_string(4), None, self.now) + return address_rec if type_ in (_TYPE_CNAME, _TYPE_PTR): - return DNSPointer(domain, type_, class_, ttl, self._read_name(), self.now) + pointer_rec = DNSPointer.__new__(DNSPointer) + pointer_rec._fast_init(domain, type_, class_, ttl, self._read_name(), self.now) + return pointer_rec if type_ == _TYPE_TXT: - return DNSText(domain, type_, class_, ttl, self._read_string(length), self.now) + text_rec = DNSText.__new__(DNSText) + text_rec._fast_init(domain, type_, class_, ttl, self._read_string(length), self.now) + return text_rec if type_ == _TYPE_SRV: view = self.view offset = self.offset @@ -319,7 +326,8 @@ def _read_record( priority = view[offset] << 8 | view[offset + 1] weight = view[offset + 2] << 8 | view[offset + 3] port = view[offset + 4] << 8 | view[offset + 5] - return DNSService( + srv_rec = DNSService.__new__(DNSService) + srv_rec._fast_init( domain, type_, class_, @@ -330,8 +338,10 @@ def _read_record( self._read_name(), self.now, ) + return srv_rec if type_ == _TYPE_HINFO: - return DNSHinfo( + hinfo_rec = DNSHinfo.__new__(DNSHinfo) + hinfo_rec._fast_init( domain, type_, class_, @@ -340,8 +350,10 @@ def _read_record( self._read_character_string(), self.now, ) + return hinfo_rec if type_ == _TYPE_AAAA: - return DNSAddress( + address_rec = DNSAddress.__new__(DNSAddress) + address_rec._fast_init( domain, type_, class_, @@ -350,9 +362,11 @@ def _read_record( self.scope_id, self.now, ) + return address_rec if type_ == _TYPE_NSEC: name_start = self.offset - return DNSNsec( + nsec_rec = DNSNsec.__new__(DNSNsec) + nsec_rec._fast_init( domain, type_, class_, @@ -361,6 +375,7 @@ def _read_record( self._read_bitmap(name_start + length), self.now, ) + return nsec_rec # Try to ignore types we don't know about # Skip the payload for the resource record so the next # records can be parsed correctly From 6a48fac061bf5921b2df5729881661de8baa5dd4 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 6 Jan 2025 21:40:49 +0000 Subject: [PATCH 288/434] 0.137.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e80bfb7..f5efa70f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.137.0 (2025-01-06) + +### Features + +* feat: speed up parsing incoming records (#1458) ([`783c1b3`](https://github.com/python-zeroconf/python-zeroconf/commit/783c1b37d1372c90dfce658c66d03aa753afbf49)) + + ## v0.136.2 (2024-11-21) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index ce61d0a4..65e0c40d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.136.2" +version = "0.137.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index e93eb4d2..3e14b846 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.136.2" +__version__ = "0.137.0" __license__ = "LGPL" From 4ff48a01bc76c82e5710aafaf6cf6e79c069cd85 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 6 Jan 2025 11:58:29 -1000 Subject: [PATCH 289/434] fix: move wheel builds to macos-13 (#1459) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 60d99f97..f6125518 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -165,7 +165,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, windows-2019, macos-12, macos-latest] + os: [ubuntu-latest, windows-2019, macos-13, macos-latest] steps: - uses: actions/checkout@v3 From 9dc0eff4f9c605b4281970b044ec2a4cdf9aa27d Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 6 Jan 2025 22:16:19 +0000 Subject: [PATCH 290/434] 0.137.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f5efa70f..38353724 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.137.1 (2025-01-06) + +### Bug Fixes + +* fix: move wheel builds to macos-13 (#1459) ([`4ff48a0`](https://github.com/python-zeroconf/python-zeroconf/commit/4ff48a01bc76c82e5710aafaf6cf6e79c069cd85)) + + ## v0.137.0 (2025-01-06) ### Features diff --git a/pyproject.toml b/pyproject.toml index 65e0c40d..a1a41ae7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.137.0" +version = "0.137.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 3e14b846..46bfa4d2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.137.0" +__version__ = "0.137.1" __license__ = "LGPL" From be05f0dc4f6b2431606031a7bb24585728d15f01 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 6 Jan 2025 13:34:41 -1000 Subject: [PATCH 291/434] fix: split wheel builds to avoid timeout (#1461) --- .github/workflows/ci.yml | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f6125518..520bf35e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -166,6 +166,14 @@ jobs: strategy: matrix: os: [ubuntu-latest, windows-2019, macos-13, macos-latest] + musl: ["", "musllinux"] + exclude: + - os: windows-2019 + musl: "musllinux" + - os: macos-13 + musl: "musllinux" + - os: macos-latest + musl: "musllinux" steps: - uses: actions/checkout@v3 @@ -199,11 +207,23 @@ jobs: with: platforms: arm64 - - name: Build wheels - uses: pypa/cibuildwheel@v2.21.3 + - name: Build wheels (non-musl) + uses: pypa/cibuildwheel@v2.22.0 + if: matrix.musl == '' + # to supply options, put them in 'env', like: + env: + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux* + CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc + CIBW_ARCHS_LINUX: auto aarch64 + CIBW_BUILD_VERBOSITY: 3 + REQUIRE_CYTHON: 1 + + - name: Build wheels (musl) + uses: pypa/cibuildwheel@v2.22.0 + if: matrix.musl == 'musllinux' # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux*_aarch64 + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *manylinux* CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: auto aarch64 CIBW_BUILD_VERBOSITY: 3 From 44e92d48071dbbab592623b511968447cee13548 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 6 Jan 2025 23:48:59 +0000 Subject: [PATCH 292/434] 0.137.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 38353724..d313ef3e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.137.2 (2025-01-06) + +### Bug Fixes + +* fix: split wheel builds to avoid timeout (#1461) ([`be05f0d`](https://github.com/python-zeroconf/python-zeroconf/commit/be05f0dc4f6b2431606031a7bb24585728d15f01)) + + ## v0.137.1 (2025-01-06) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index a1a41ae7..8343292d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.137.1" +version = "0.137.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 46bfa4d2..6d246715 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.137.1" +__version__ = "0.137.2" __license__ = "LGPL" From bd845386ffc36a2762cdccdc6490234557b9036e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 7 Jan 2025 12:23:26 -1000 Subject: [PATCH 293/434] chore: add codspeed badge (#1463) --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index f16b7c2f..297d8080 100644 --- a/README.rst +++ b/README.rst @@ -10,6 +10,10 @@ python-zeroconf .. image:: https://codecov.io/gh/python-zeroconf/python-zeroconf/branch/master/graph/badge.svg :target: https://codecov.io/gh/python-zeroconf/python-zeroconf +.. image:: https://img.shields.io/endpoint?url=https://codspeed.io/badge.json + :target: https://codspeed.io/python-zeroconf/python-zeroconf + :alt: Codspeed.io status for python-zeroconf + `Documentation `_. This is fork of pyzeroconf, Multicast DNS Service Discovery for Python, From d46fe855e9a3d6f16df8c7f51d1d06c61b8e5694 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Wed, 8 Jan 2025 21:13:10 +0100 Subject: [PATCH 294/434] chore: remove legacy code = (3, 9, 0) - _IPVersion_All_value = IPVersion.All.value _IPVersion_V4Only_value = IPVersion.V4Only.value # https://datatracker.ietf.org/doc/html/rfc6762#section-5.2 @@ -250,7 +247,7 @@ def addresses(self, value: List[bytes]) -> None: self._get_address_and_nsec_records_cache = None for address in value: - if IPADDRESS_SUPPORTS_SCOPE_ID and len(address) == 16 and self.interface_index is not None: + if len(address) == 16 and self.interface_index is not None: addr = ip_bytes_and_scope_to_address(address, self.interface_index) else: addr = cached_ip_addresses(address) diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index 8dc1f797..64cdfb63 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -20,22 +20,15 @@ USA """ -import sys -from functools import lru_cache +from functools import cache, lru_cache from ipaddress import AddressValueError, IPv4Address, IPv6Address, NetmaskValueError from typing import Any, Optional, Union from .._dns import DNSAddress from ..const import _TYPE_AAAA -if sys.version_info >= (3, 9, 0): - from functools import cache -else: - cache = lru_cache(maxsize=None) - bytes_ = bytes int_ = int -IPADDRESS_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) class ZeroconfIPv4Address(IPv4Address): @@ -128,7 +121,7 @@ def get_ip_address_object_from_record( record: DNSAddress, ) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: """Get the IP address object from the record.""" - if IPADDRESS_SUPPORTS_SCOPE_ID and record.type == _TYPE_AAAA and record.scope_id: + if record.type == _TYPE_AAAA and record.scope_id: return ip_bytes_and_scope_to_address(record.address, record.scope_id) return cached_ip_addresses_wrapper(record.address) @@ -146,7 +139,7 @@ def ip_bytes_and_scope_to_address( def str_without_scope_id(addr: Union[ZeroconfIPv4Address, ZeroconfIPv6Address]) -> str: """Return the string representation of the address without the scope id.""" - if IPADDRESS_SUPPORTS_SCOPE_ID and addr.version == 6: + if addr.version == 6: address_str = str(addr) return address_str.partition("%")[0] return str(addr) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 9a5cbb7d..ad05f824 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -7,7 +7,6 @@ import logging import os import socket -import sys import threading import unittest from ipaddress import ip_address @@ -704,7 +703,6 @@ def test_multiple_addresses(): assert info.addresses == [address, address] assert info.parsed_addresses() == [address_parsed, address_parsed] assert info.parsed_scoped_addresses() == [address_parsed, address_parsed] - ipaddress_supports_scope_id = sys.version_info >= (3, 9, 0) if has_working_ipv6() and not os.environ.get("SKIP_IPV6"): address_v6_parsed = "2001:db8::1" @@ -751,9 +749,7 @@ def test_multiple_addresses(): assert info.ip_addresses_by_version(r.IPVersion.All) == [ ip_address(address), ip_address(address_v6), - ip_address(address_v6_ll_scoped_parsed) - if ipaddress_supports_scope_id - else ip_address(address_v6_ll), + ip_address(address_v6_ll_scoped_parsed), ] assert info.addresses_by_version(r.IPVersion.V4Only) == [address] assert info.ip_addresses_by_version(r.IPVersion.V4Only) == [ip_address(address)] @@ -763,9 +759,7 @@ def test_multiple_addresses(): ] assert info.ip_addresses_by_version(r.IPVersion.V6Only) == [ ip_address(address_v6), - ip_address(address_v6_ll_scoped_parsed) - if ipaddress_supports_scope_id - else ip_address(address_v6_ll), + ip_address(address_v6_ll_scoped_parsed), ] assert info.parsed_addresses() == [ address_parsed, @@ -780,16 +774,15 @@ def test_multiple_addresses(): assert info.parsed_scoped_addresses() == [ address_parsed, address_v6_parsed, - address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, + address_v6_ll_scoped_parsed, ] assert info.parsed_scoped_addresses(r.IPVersion.V4Only) == [address_parsed] assert info.parsed_scoped_addresses(r.IPVersion.V6Only) == [ address_v6_parsed, - address_v6_ll_scoped_parsed if ipaddress_supports_scope_id else address_v6_ll_parsed, + address_v6_ll_scoped_parsed, ] -@unittest.skipIf(sys.version_info < (3, 9, 0), "Requires newer python") def test_scoped_addresses_from_cache(): type_ = "_http._tcp.local." registration_name = f"scoped.{type_}" diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index ddade486..35d11913 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -2,10 +2,6 @@ """Unit tests for zeroconf._utils.ipaddress.""" -import sys - -import pytest - from zeroconf import const from zeroconf._dns import DNSAddress from zeroconf._utils import ipaddress @@ -52,7 +48,6 @@ def test_cached_ip_addresses_wrapper(): assert ipv6.is_unspecified is True -@pytest.mark.skipif(sys.version_info < (3, 9, 0), reason="scope_id is not supported") def test_get_ip_address_object_from_record(): """Test the get_ip_address_object_from_record.""" # not link local From 45c82d1bbcc1de163487e6b55ccdb528ee952cc0 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Wed, 8 Jan 2025 21:16:25 +0100 Subject: [PATCH 295/434] chore(git): make scripts with shebang executable (#1433) Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 1 + examples/async_apple_scanner.py | 2 +- examples/async_browser.py | 2 +- examples/async_registration.py | 3 ++- examples/async_service_info_request.py | 3 ++- examples/browser.py | 2 +- examples/registration.py | 2 +- examples/resolver.py | 2 +- examples/self_test.py | 2 +- tests/conftest.py | 3 --- tests/services/test_browser.py | 3 --- tests/services/test_info.py | 3 --- tests/services/test_registry.py | 3 --- tests/services/test_types.py | 3 --- tests/test_asyncio.py | 3 --- tests/test_cache.py | 3 --- tests/test_core.py | 3 --- tests/test_dns.py | 3 --- tests/test_engine.py | 3 --- tests/test_exceptions.py | 3 --- tests/test_handlers.py | 3 --- tests/test_history.py | 3 --- tests/test_init.py | 3 --- tests/test_listener.py | 3 --- tests/test_logger.py | 3 --- tests/test_protocol.py | 3 --- tests/test_services.py | 3 --- tests/test_updates.py | 3 --- tests/utils/test_asyncio.py | 3 --- tests/utils/test_ipaddress.py | 2 -- tests/utils/test_name.py | 3 --- tests/utils/test_net.py | 3 --- 32 files changed, 11 insertions(+), 76 deletions(-) mode change 100644 => 100755 examples/async_apple_scanner.py mode change 100644 => 100755 examples/async_browser.py mode change 100644 => 100755 examples/async_registration.py mode change 100644 => 100755 examples/async_service_info_request.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7f19ac50..5c4c252f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,6 +20,7 @@ repos: - id: check-case-conflict - id: check-docstring-first - id: check-json + - id: check-shebang-scripts-are-executable - id: check-toml - id: check-xml - id: check-yaml diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py old mode 100644 new mode 100755 index 29eb5f70..1d2c5306 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """Scan for apple devices.""" diff --git a/examples/async_browser.py b/examples/async_browser.py old mode 100644 new mode 100755 index bc5f252e..78be3a4c --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """Example of browsing for a service. diff --git a/examples/async_registration.py b/examples/async_registration.py old mode 100644 new mode 100755 index a75b5566..56cb91f2 --- a/examples/async_registration.py +++ b/examples/async_registration.py @@ -1,4 +1,5 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python + """Example of announcing 250 services (in this case, a fake HTTP server).""" import argparse diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py old mode 100644 new mode 100755 index 31864756..b904fd89 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -1,4 +1,5 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python + """Example of perodic dump of homekit services. This example is useful when a user wants an ondemand diff --git a/examples/browser.py b/examples/browser.py index aebf3f5d..4e7b7610 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """Example of browsing for a service. diff --git a/examples/registration.py b/examples/registration.py index 5be9f45d..1c42d890 100755 --- a/examples/registration.py +++ b/examples/registration.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """Example of announcing a service (in this case, a fake HTTP server)""" diff --git a/examples/resolver.py b/examples/resolver.py index e7a11f82..1b74f97e 100755 --- a/examples/resolver.py +++ b/examples/resolver.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python """Example of resolving a service with a known name""" diff --git a/examples/self_test.py b/examples/self_test.py index 35f83b06..1fec3921 100755 --- a/examples/self_test.py +++ b/examples/self_test.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python import logging import socket diff --git a/tests/conftest.py b/tests/conftest.py index 5dfd900f..ba49cef6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """conftest for zeroconf tests.""" import threading diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 0afc5ebc..f3b977fb 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._services.browser.""" import asyncio diff --git a/tests/services/test_info.py b/tests/services/test_info.py index ad05f824..5573eed1 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._services.info.""" import asyncio diff --git a/tests/services/test_registry.py b/tests/services/test_registry.py index d3f60179..999e422c 100644 --- a/tests/services/test_registry.py +++ b/tests/services/test_registry.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._services.registry.""" import socket diff --git a/tests/services/test_types.py b/tests/services/test_types.py index f50ea42c..811b22c5 100644 --- a/tests/services/test_types.py +++ b/tests/services/test_types.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._services.types.""" import logging diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 54a8b400..2471733b 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for aio.py.""" import asyncio diff --git a/tests/test_cache.py b/tests/test_cache.py index 363fcb0e..b39a58c8 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._cache.""" import logging diff --git a/tests/test_core.py b/tests/test_core.py index 71245a5f..82055968 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._core""" import asyncio diff --git a/tests/test_dns.py b/tests/test_dns.py index 95d4b553..f44affc8 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._dns.""" import logging diff --git a/tests/test_engine.py b/tests/test_engine.py index 88307e32..79560d9c 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._engine""" import asyncio diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 1373d6c3..1f5bd738 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._exceptions""" import logging diff --git a/tests/test_handlers.py b/tests/test_handlers.py index b98ef407..7b7abcea 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._handlers""" import asyncio diff --git a/tests/test_history.py b/tests/test_history.py index 659e67f8..c604d383 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for _history.py.""" from typing import Set diff --git a/tests/test_init.py b/tests/test_init.py index d7a01224..3ae695c5 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf.py""" import logging diff --git a/tests/test_listener.py b/tests/test_listener.py index f6752af7..f5af91f8 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._listener""" import logging diff --git a/tests/test_logger.py b/tests/test_logger.py index 7a9b4867..ecaf9dd0 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for logger.py.""" import logging diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 8f124c17..1feb64c5 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._protocol""" import copy diff --git a/tests/test_services.py b/tests/test_services.py index 7cc075e7..908782c7 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._services.""" import logging diff --git a/tests/test_updates.py b/tests/test_updates.py index 2ebaee89..1af85736 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._updates.""" import logging diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index 7b086fbc..f22d85ed 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._utils.asyncio.""" import asyncio diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index 35d11913..c6f63aaf 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - """Unit tests for zeroconf._utils.ipaddress.""" from zeroconf import const diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index c814e094..6f2c6b13 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._utils.name.""" import socket diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index a89ea565..17212af2 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - - """Unit tests for zeroconf._utils.net.""" import errno From ebbb2afccabd3841a3cb0a39824b49773cc6258a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Jan 2025 10:29:36 -1000 Subject: [PATCH 296/434] feat: improve performance of processing incoming records (#1467) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- src/zeroconf/_handlers/record_manager.pxd | 4 ++-- src/zeroconf/_handlers/record_manager.py | 8 ++++++-- src/zeroconf/_record_update.pxd | 2 ++ src/zeroconf/_record_update.py | 8 +++++++- 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 5be2c283..d4e068c2 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -6,12 +6,11 @@ from .._dns cimport DNSQuestion, DNSRecord from .._protocol.incoming cimport DNSIncoming from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis - +from .._record_update cimport RecordUpdate cdef cython.float _DNS_PTR_MIN_TTL cdef cython.uint _TYPE_PTR cdef object _ADDRESS_RECORD_TYPES -cdef object RecordUpdate cdef bint TYPE_CHECKING cdef object _TYPE_PTR @@ -31,6 +30,7 @@ cdef class RecordManager: record=DNSRecord, answers=cython.list, maybe_entry=DNSRecord, + rec_update=RecordUpdate ) cpdef void async_updates_from_response(self, DNSIncoming msg) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 0bb04996..5f25ceb1 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -120,11 +120,15 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: address_adds.append(record) else: other_adds.append(record) - updates.append(RecordUpdate(record, maybe_entry)) + rec_update = RecordUpdate.__new__(RecordUpdate) + rec_update._fast_init(record, maybe_entry) + updates.append(rec_update) # This is likely a goodbye since the record is # expired and exists in the cache elif maybe_entry is not None: - updates.append(RecordUpdate(record, maybe_entry)) + rec_update = RecordUpdate.__new__(RecordUpdate) + rec_update._fast_init(record, maybe_entry) + updates.append(rec_update) removes.add(record) if unique_types: diff --git a/src/zeroconf/_record_update.pxd b/src/zeroconf/_record_update.pxd index d1b18cbe..1562299b 100644 --- a/src/zeroconf/_record_update.pxd +++ b/src/zeroconf/_record_update.pxd @@ -8,3 +8,5 @@ cdef class RecordUpdate: cdef public DNSRecord new cdef public DNSRecord old + + cdef void _fast_init(self, object new, object old) diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index 880b7a1b..912ab6f1 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -24,12 +24,18 @@ from ._dns import DNSRecord +_DNSRecord = DNSRecord + class RecordUpdate: __slots__ = ("new", "old") - def __init__(self, new: DNSRecord, old: Optional[DNSRecord] = None): + def __init__(self, new: DNSRecord, old: Optional[DNSRecord] = None) -> None: """RecordUpdate represents a change in a DNS record.""" + self._fast_init(new, old) + + def _fast_init(self, new: _DNSRecord, old: Optional[_DNSRecord]) -> None: + """Fast init for RecordUpdate.""" self.new = new self.old = old From afd4517f7ca9147a3cfbaef979e01ff81fd639d7 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 8 Jan 2025 20:30:53 +0000 Subject: [PATCH 297/434] 0.138.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d313ef3e..a2610460 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.138.0 (2025-01-08) + +### Features + +* feat: improve performance of processing incoming records (#1467) + +Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> ([`ebbb2af`](https://github.com/python-zeroconf/python-zeroconf/commit/ebbb2afccabd3841a3cb0a39824b49773cc6258a)) + + ## v0.137.2 (2025-01-06) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 8343292d..b2905ec9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.137.2" +version = "0.138.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 6d246715..ffc066bb 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.137.2" +__version__ = "0.138.0" __license__ = "LGPL" From e05055c584ca46080990437b2b385a187bc48458 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Jan 2025 11:48:24 -1000 Subject: [PATCH 298/434] fix: ensure cache does not return stale created and ttl values (#1469) --- src/zeroconf/_cache.pxd | 15 ++++++-- src/zeroconf/_cache.py | 24 +++++++------ tests/test_cache.py | 79 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 105 insertions(+), 13 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index d4417466..a1402c22 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -47,12 +47,13 @@ cdef class DNSCache: ) cpdef list async_all_by_details(self, str name, unsigned int type_, unsigned int class_) - cpdef cython.dict async_entries_with_name(self, str name) + cpdef list async_entries_with_name(self, str name) - cpdef cython.dict async_entries_with_server(self, str name) + cpdef list async_entries_with_server(self, str name) @cython.locals( cached_entry=DNSRecord, + records=dict ) cpdef DNSRecord get_by_details(self, str name, unsigned int type_, unsigned int class_) @@ -79,7 +80,15 @@ cdef class DNSCache: ) cpdef void async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) - cpdef entries_with_name(self, str name) + @cython.locals( + entries=dict + ) + cpdef list entries_with_name(self, str name) + + @cython.locals( + entries=dict + ) + cpdef list entries_with_server(self, str server) @cython.locals( record=DNSRecord, diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index f34c4c16..b6c9b82d 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -149,26 +149,26 @@ def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DN matches: List[DNSRecord] = [] if records is None: return matches - for record in records: + for record in records.values(): if type_ == record.type and class_ == record.class_: matches.append(record) return matches - def async_entries_with_name(self, name: str) -> Dict[DNSRecord, DNSRecord]: + def async_entries_with_name(self, name: str) -> List[DNSRecord]: """Returns a dict of entries whose key matches the name. This function is not threadsafe and must be called from the event loop. """ - return self.cache.get(name.lower()) or {} + return self.entries_with_name(name) - def async_entries_with_server(self, name: str) -> Dict[DNSRecord, DNSRecord]: + def async_entries_with_server(self, name: str) -> List[DNSRecord]: """Returns a dict of entries whose key matches the server. This function is not threadsafe and must be called from the event loop. """ - return self.service_cache.get(name.lower()) or {} + return self.entries_with_server(name) # The below functions are threadsafe and do not need to be run in the # event loop, however they all make copies so they significantly @@ -179,7 +179,7 @@ def get(self, entry: DNSEntry) -> Optional[DNSRecord]: matching entry.""" if isinstance(entry, _UNIQUE_RECORD_TYPES): return self.cache.get(entry.key, {}).get(entry) - for cached_entry in reversed(list(self.cache.get(entry.key, []))): + for cached_entry in reversed(list(self.cache.get(entry.key, {}).values())): if entry.__eq__(cached_entry): return cached_entry return None @@ -200,7 +200,7 @@ def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRe records = self.cache.get(key) if records is None: return None - for cached_entry in reversed(list(records)): + for cached_entry in reversed(list(records.values())): if type_ == cached_entry.type and class_ == cached_entry.class_: return cached_entry return None @@ -211,15 +211,19 @@ def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRe records = self.cache.get(key) if records is None: return [] - return [entry for entry in list(records) if type_ == entry.type and class_ == entry.class_] + return [entry for entry in list(records.values()) if type_ == entry.type and class_ == entry.class_] def entries_with_server(self, server: str) -> List[DNSRecord]: """Returns a list of entries whose server matches the name.""" - return list(self.service_cache.get(server.lower(), [])) + if entries := self.service_cache.get(server.lower()): + return list(entries.values()) + return [] def entries_with_name(self, name: str) -> List[DNSRecord]: """Returns a list of entries whose key matches the name.""" - return list(self.cache.get(name.lower(), [])) + if entries := self.cache.get(name.lower()): + return list(entries.values()) + return [] def current_entry_with_name_and_alias(self, name: str, alias: str) -> Optional[DNSRecord]: now = current_time_millis() diff --git a/tests/test_cache.py b/tests/test_cache.py index b39a58c8..63f23373 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -279,3 +279,82 @@ def test_name(self): cache = r.DNSCache() cache.async_add_records([record1, record2]) assert cache.names() == ["irrelevant"] + + +def test_async_entries_with_name_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=1.0) + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + assert next(iter(cache.async_entries_with_name("a"))) is record2 + + +def test_async_entries_with_server_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSService("a", const._TYPE_SRV, const._CLASS_IN, 1, 1, 1, 1, "a", created=1.0) + record2 = r.DNSService("a", const._TYPE_SRV, const._CLASS_IN, 1, 1, 1, 1, "a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + assert next(iter(cache.async_entries_with_server("a"))) is record2 + + +def test_async_get_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=1.0) + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + assert cache.get(record2) is record2 + + +def test_async_get_returns_newest_nsec_record(): + cache = r.DNSCache() + record1 = r.DNSNsec("a", const._TYPE_NSEC, const._CLASS_IN, 1, "a", [], created=1.0) + record2 = r.DNSNsec("a", const._TYPE_NSEC, const._CLASS_IN, 1, "a", [], created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + assert cache.get(record2) is record2 + + +def test_get_by_details_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=1.0) + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + assert cache.get_by_details("a", const._TYPE_A, const._CLASS_IN) is record2 + + +def test_get_all_by_details_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=1.0) + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + records = cache.get_all_by_details("a", const._TYPE_A, const._CLASS_IN) + assert len(records) == 1 + assert records[0] is record2 + + +def test_async_get_all_by_details_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=1.0) + record2 = r.DNSAddress("a", const._TYPE_A, const._CLASS_IN, 1, b"a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + records = cache.async_all_by_details("a", const._TYPE_A, const._CLASS_IN) + assert len(records) == 1 + assert records[0] is record2 + + +def test_async_get_unique_returns_newest_record(): + cache = r.DNSCache() + record1 = r.DNSPointer("a", const._TYPE_PTR, const._CLASS_IN, 1, "a", created=1.0) + record2 = r.DNSPointer("a", const._TYPE_PTR, const._CLASS_IN, 1, "a", created=2.0) + cache.async_add_records([record1]) + cache.async_add_records([record2]) + record = cache.async_get_unique(record1) + assert record is record2 + record = cache.async_get_unique(record2) + assert record is record2 From 6de7bb6315ddd909a60fd0c7a02dadf04b408454 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 8 Jan 2025 21:57:38 +0000 Subject: [PATCH 299/434] 0.138.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a2610460..00c797c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.138.1 (2025-01-08) + +### Bug Fixes + +* fix: ensure cache does not return stale created and ttl values (#1469) ([`e05055c`](https://github.com/python-zeroconf/python-zeroconf/commit/e05055c584ca46080990437b2b385a187bc48458)) + + ## v0.138.0 (2025-01-08) ### Features diff --git a/pyproject.toml b/pyproject.toml index b2905ec9..61481c40 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.138.0" +version = "0.138.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index ffc066bb..cf146490 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.138.0" +__version__ = "0.138.1" __license__ = "LGPL" From 09db1848957b34415f364b7338e4adce99b57abc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Jan 2025 14:16:02 -1000 Subject: [PATCH 300/434] feat: implement heapq for tracking cache expire times (#1465) --- src/zeroconf/_cache.pxd | 16 ++- src/zeroconf/_cache.py | 62 +++++++++++- src/zeroconf/_dns.pxd | 4 +- src/zeroconf/_dns.py | 12 +-- src/zeroconf/_handlers/record_manager.py | 12 +-- tests/services/test_browser.py | 4 +- tests/services/test_info.py | 2 +- tests/test_cache.py | 123 +++++++++++++++++++++++ tests/test_dns.py | 55 +++++----- tests/test_handlers.py | 34 +++++-- 10 files changed, 263 insertions(+), 61 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index a1402c22..7f78a736 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -11,10 +11,14 @@ from ._dns cimport ( DNSText, ) +cdef object heappop +cdef object heappush +cdef object heapify cdef object _UNIQUE_RECORD_TYPES cdef unsigned int _TYPE_PTR cdef cython.uint _ONE_SECOND +cdef unsigned int _MIN_SCHEDULED_RECORD_EXPIRATION @cython.locals( record_cache=dict, @@ -26,6 +30,8 @@ cdef class DNSCache: cdef public cython.dict cache cdef public cython.dict service_cache + cdef public list _expire_heap + cdef public dict _expirations cpdef bint async_add_records(self, object entries) @@ -65,7 +71,8 @@ cdef class DNSCache: @cython.locals( store=cython.dict, - service_record=DNSService + service_record=DNSService, + when=object ) cdef bint _async_add(self, DNSRecord record) @@ -95,3 +102,10 @@ cdef class DNSCache: now=double ) cpdef current_entry_with_name_and_alias(self, str name, str alias) + + cpdef void _async_set_created_ttl( + self, + DNSRecord record, + double now, + cython.float ttl + ) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index b6c9b82d..a43bdc5c 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -20,6 +20,7 @@ USA """ +from heapq import heapify, heappop, heappush from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, cast from ._dns import ( @@ -43,6 +44,11 @@ _float = float _int = int +# The minimum number of scheduled record expirations before we start cleaning up +# the expiration heap. This is a performance optimization to avoid cleaning up the +# heap too often when there are only a few scheduled expirations. +_MIN_SCHEDULED_RECORD_EXPIRATION = 100 + def _remove_key(cache: _DNSRecordCacheType, key: _str, record: _DNSRecord) -> None: """Remove a key from a DNSRecord cache @@ -60,6 +66,8 @@ class DNSCache: def __init__(self) -> None: self.cache: _DNSRecordCacheType = {} + self._expire_heap: List[Tuple[float, DNSRecord]] = [] + self._expirations: Dict[DNSRecord, float] = {} self.service_cache: _DNSRecordCacheType = {} # Functions prefixed with async_ are NOT threadsafe and must @@ -81,6 +89,12 @@ def _async_add(self, record: _DNSRecord) -> bool: store = self.cache.setdefault(record.key, {}) new = record not in store and not isinstance(record, DNSNsec) store[record] = record + when = record.created + (record.ttl * 1000) + if self._expirations.get(record) != when: + # Avoid adding duplicates to the heap + heappush(self._expire_heap, (when, record)) + self._expirations[record] = when + if isinstance(record, DNSService): service_record = record self.service_cache.setdefault(record.server_key, {})[service_record] = service_record @@ -108,6 +122,7 @@ def _async_remove(self, record: _DNSRecord) -> None: service_record = record _remove_key(self.service_cache, service_record.server_key, service_record) _remove_key(self.cache, record.key, record) + self._expirations.pop(record, None) def async_remove_records(self, entries: Iterable[DNSRecord]) -> None: """Remove multiple records. @@ -121,8 +136,44 @@ def async_expire(self, now: _float) -> List[DNSRecord]: """Purge expired entries from the cache. This function must be run in from event loop. + + :param now: The current time in milliseconds. """ - expired = [record for records in self.cache.values() for record in records if record.is_expired(now)] + if not (expire_heap_len := len(self._expire_heap)): + return [] + + expired: List[DNSRecord] = [] + # Find any expired records and add them to the to-delete list + while self._expire_heap: + when, record = self._expire_heap[0] + if when > now: + break + heappop(self._expire_heap) + # Check if the record hasn't been re-added to the heap + # with a different expiration time as it will be removed + # later when it reaches the top of the heap and its + # expiration time is met. + if self._expirations.get(record) == when: + expired.append(record) + + # If the expiration heap grows larger than the number expirations + # times two, we clean it up to avoid keeping expired entries in + # the heap and consuming memory. We guard this with a minimum + # threshold to avoid cleaning up the heap too often when there are + # only a few scheduled expirations. + if ( + expire_heap_len > _MIN_SCHEDULED_RECORD_EXPIRATION + and expire_heap_len > len(self._expirations) * 2 + ): + # Remove any expired entries from the expiration heap + # that do not match the expiration time in the expirations + # as it means the record has been re-added to the heap + # with a different expiration time. + self._expire_heap = [ + entry for entry in self._expire_heap if self._expirations.get(entry[1]) == entry[0] + ] + heapify(self._expire_heap) + self.async_remove_records(expired) return expired @@ -256,4 +307,11 @@ def async_mark_unique_records_older_than_1s_to_expire( created_double = record.created if (now - created_double > _ONE_SECOND) and record not in answers_rrset: # Expire in 1s - record.set_created_ttl(now, 1) + self._async_set_created_ttl(record, now, 1) + + def _async_set_created_ttl(self, record: DNSRecord, now: _float, ttl: _float) -> None: + """Set the created time and ttl of a record.""" + # It would be better if we made a copy instead of mutating the record + # in place, but records currently don't have a copy method. + record._set_created_ttl(now, ttl) + self._async_add(record) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 6e432a77..e41ac4c3 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -66,9 +66,7 @@ cdef class DNSRecord(DNSEntry): cpdef bint is_recent(self, double now) - cpdef reset_ttl(self, DNSRecord other) - - cpdef set_created_ttl(self, double now, cython.float ttl) + cdef _set_created_ttl(self, double now, cython.float ttl) cdef class DNSAddress(DNSRecord): diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 471376e9..4fc8d2d6 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -185,6 +185,9 @@ def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use """Abstract method""" raise AbstractMethodException + def __lt__(self, other: "DNSRecord") -> bool: + return self.ttl < other.ttl + def suppressed_by(self, msg: "DNSIncoming") -> bool: """Returns true if any answer in a message can suffice for the information held in this record.""" @@ -222,13 +225,10 @@ def is_recent(self, now: _float) -> bool: """Returns true if the record more than one quarter of its TTL remaining.""" return self.created + (_RECENT_TIME_MS * self.ttl) > now - def reset_ttl(self, other) -> None: # type: ignore[no-untyped-def] - """Sets this record's TTL and created time to that of - another record.""" - self.set_created_ttl(other.created, other.ttl) - - def set_created_ttl(self, created: _float, ttl: Union[float, int]) -> None: + def _set_created_ttl(self, created: _float, ttl: Union[float, int]) -> None: """Set the created and ttl of a record.""" + # It would be better if we made a copy instead of mutating the record + # in place, but records currently don't have a copy method. self.created = created self.ttl = ttl diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index 5f25ceb1..d4e2792c 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -103,7 +103,8 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: record, _DNS_PTR_MIN_TTL, ) - record.set_created_ttl(record.created, _DNS_PTR_MIN_TTL) + # Safe because the record is never in the cache yet + record._set_created_ttl(record.created, _DNS_PTR_MIN_TTL) if record.unique: # https://tools.ietf.org/html/rfc6762#section-10.2 unique_types.add((record.name, record_type, record.class_)) @@ -113,13 +114,10 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: maybe_entry = cache.async_get_unique(record) if not record.is_expired(now): - if maybe_entry is not None: - maybe_entry.reset_ttl(record) + if record_type in _ADDRESS_RECORD_TYPES: + address_adds.append(record) else: - if record_type in _ADDRESS_RECORD_TYPES: - address_adds.append(record) - else: - other_adds.append(record) + other_adds.append(record) rec_update = RecordUpdate.__new__(RecordUpdate) rec_update._fast_init(record, maybe_entry) updates.append(rec_update) diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index f3b977fb..ba5ae52e 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1509,9 +1509,9 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de ) # Force the ttl to be 1 second now = current_time_millis() - for cache_record in zc.cache.cache.values(): + for cache_record in list(zc.cache.cache.values()): for record in cache_record: - record.set_created_ttl(now, 1) + zc.cache._async_set_created_ttl(record, now, 1) time.sleep(0.3) info.port = 400 diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 5573eed1..1b16fef8 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -242,7 +242,7 @@ def test_service_info_rejects_expired_records(self): ttl, b"\x04ff=0\x04ci=3\x04sf=0\x0bsh=6fLM5A==", ) - expired_record.set_created_ttl(1000, 1) + zc.cache._async_set_created_ttl(expired_record, 1000, 1) info.async_update_records(zc, now, [RecordUpdate(expired_record, None)]) assert info.properties[b"ci"] == b"2" zc.close() diff --git a/tests/test_cache.py b/tests/test_cache.py index 63f23373..99de9827 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -3,6 +3,9 @@ import logging import unittest import unittest.mock +from heapq import heapify, heappop + +import pytest import zeroconf as r from zeroconf import const @@ -358,3 +361,123 @@ def test_async_get_unique_returns_newest_record(): assert record is record2 record = cache.async_get_unique(record2) assert record is record2 + + +@pytest.mark.asyncio +async def test_cache_heap_cleanup() -> None: + """Test that the heap gets cleaned up when there are many old expirations.""" + cache = r.DNSCache() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_records_to_cleanup = 100 + now = r.current_time_millis() + name = "heap.local." + ttl_seconds = 100 + ttl_millis = ttl_seconds * 1000 + + for i in range(min_records_to_cleanup): + record = r.DNSAddress(name, const._TYPE_A, const._CLASS_IN, ttl_seconds, b"1", created=now + i) + cache.async_add_records([record]) + + assert len(cache._expire_heap) == min_records_to_cleanup + assert len(cache.async_entries_with_name(name)) == 1 + + # Now that we reached the minimum number of cookies to cleanup, + # add one more cookie to trigger the cleanup + record = r.DNSAddress( + name, const._TYPE_A, const._CLASS_IN, ttl_seconds, b"1", created=now + min_records_to_cleanup + ) + expected_expire_time = record.created + ttl_millis + cache.async_add_records([record]) + assert len(cache.async_entries_with_name(name)) == 1 + entry = next(iter(cache.async_entries_with_name(name))) + assert (entry.created + ttl_millis) == expected_expire_time + assert entry is record + + # Verify that the heap has been cleaned up + assert len(cache.async_entries_with_name(name)) == 1 + cache.async_expire(now) + + heap_copy = cache._expire_heap.copy() + heapify(heap_copy) + # Ensure heap order is maintained + assert cache._expire_heap == heap_copy + + # The heap should have been cleaned up + assert len(cache._expire_heap) == 1 + assert len(cache.async_entries_with_name(name)) == 1 + + entry = next(iter(cache.async_entries_with_name(name))) + assert entry is record + + assert (entry.created + ttl_millis) == expected_expire_time + + cache.async_expire(expected_expire_time) + assert not cache.async_entries_with_name(name), cache._expire_heap + + +@pytest.mark.asyncio +async def test_cache_heap_multi_name_cleanup() -> None: + """Test cleanup with multiple names.""" + cache = r.DNSCache() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_records_to_cleanup = 100 + now = r.current_time_millis() + name = "heap.local." + name2 = "heap2.local." + ttl_seconds = 100 + ttl_millis = ttl_seconds * 1000 + + for i in range(min_records_to_cleanup): + record = r.DNSAddress(name, const._TYPE_A, const._CLASS_IN, ttl_seconds, b"1", created=now + i) + cache.async_add_records([record]) + expected_expire_time = record.created + ttl_millis + + for i in range(5): + record = r.DNSAddress( + name2, const._TYPE_A, const._CLASS_IN, ttl_seconds, bytes((i,)), created=now + i + ) + cache.async_add_records([record]) + + assert len(cache._expire_heap) == min_records_to_cleanup + 5 + assert len(cache.async_entries_with_name(name)) == 1 + assert len(cache.async_entries_with_name(name2)) == 5 + + cache.async_expire(now) + # The heap and expirations should have been cleaned up + assert len(cache._expire_heap) == 1 + 5 + assert len(cache._expirations) == 1 + 5 + + cache.async_expire(expected_expire_time) + assert not cache.async_entries_with_name(name), cache._expire_heap + + +@pytest.mark.asyncio +async def test_cache_heap_pops_order() -> None: + """Test cache heap is popped in order.""" + cache = r.DNSCache() + # The heap should not be cleaned up when there are less than 100 expiration changes + min_records_to_cleanup = 100 + now = r.current_time_millis() + name = "heap.local." + name2 = "heap2.local." + ttl_seconds = 100 + + for i in range(min_records_to_cleanup): + record = r.DNSAddress(name, const._TYPE_A, const._CLASS_IN, ttl_seconds, b"1", created=now + i) + cache.async_add_records([record]) + + for i in range(5): + record = r.DNSAddress( + name2, const._TYPE_A, const._CLASS_IN, ttl_seconds, bytes((i,)), created=now + i + ) + cache.async_add_records([record]) + + assert len(cache._expire_heap) == min_records_to_cleanup + 5 + assert len(cache.async_entries_with_name(name)) == 1 + assert len(cache.async_entries_with_name(name2)) == 5 + + start_ts = 0.0 + while cache._expire_heap: + ts, _ = heappop(cache._expire_heap) + assert ts >= start_ts + start_ts = ts diff --git a/tests/test_dns.py b/tests/test_dns.py index f44affc8..e9c4dc09 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -96,34 +96,6 @@ def test_dns_record_abc(self): with pytest.raises((r.AbstractMethodException, TypeError)): record.write(None) # type: ignore[arg-type] - def test_dns_record_reset_ttl(self): - start = r.current_time_millis() - record = r.DNSRecord( - "irrelevant", - const._TYPE_SRV, - const._CLASS_IN, - const._DNS_HOST_TTL, - created=start, - ) - later = start + 1000 - record2 = r.DNSRecord( - "irrelevant", - const._TYPE_SRV, - const._CLASS_IN, - const._DNS_HOST_TTL, - created=later, - ) - now = r.current_time_millis() - - assert record.created != record2.created - assert record.get_remaining_ttl(now) != record2.get_remaining_ttl(now) - - record.reset_ttl(record2) - - assert record.ttl == record2.ttl - assert record.created == record2.created - assert record.get_remaining_ttl(now) == record2.get_remaining_ttl(now) - def test_service_info_dunder(self): type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" @@ -233,6 +205,33 @@ def test_dns_record_hashablity_does_not_consider_ttl(): assert len(record_set) == 1 +def test_dns_record_hashablity_does_not_consider_created(): + """Test DNSRecord are hashable and created is not considered.""" + + # Verify the TTL is not considered in the hash + record1 = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same", created=1.0 + ) + record2 = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same", created=2.0 + ) + + record_set = {record1, record2} + assert len(record_set) == 1 + + record_set.add(record1) + assert len(record_set) == 1 + + record3_dupe = r.DNSAddress( + "irrelevant", const._TYPE_A, const._CLASS_IN, const._DNS_HOST_TTL, b"same", created=3.0 + ) + assert record2 == record3_dupe + assert record2.__hash__() == record3_dupe.__hash__() + + record_set.add(record3_dupe) + assert len(record_set) == 1 + + def test_dns_record_hashablity_does_not_consider_unique(): """Test DNSRecord are hashable and unique is ignored.""" diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 7b7abcea..8cf5cc9a 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1139,10 +1139,9 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Add the A record to the cache with 50% ttl remaining a_record = info.dns_addresses()[0] - a_record.set_created_ttl(current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl) + zc.cache._async_set_created_ttl(a_record, current_time_millis() - (a_record.ttl * 1000 / 2), a_record.ttl) assert not a_record.is_recent(current_time_millis()) info._dns_address_cache = None # we are mutating the record so clear the cache - zc.cache.async_add_records([a_record]) # With QU should respond to only unicast when the answer has been recently multicast # even if the additional has not been recently multicast @@ -1190,9 +1189,10 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # Remove the 100% PTR record and add a 50% PTR record zc.cache.async_remove_records([ptr_record]) - ptr_record.set_created_ttl(current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl) + zc.cache._async_set_created_ttl( + ptr_record, current_time_millis() - (ptr_record.ttl * 1000 / 2), ptr_record.ttl + ) assert not ptr_record.is_recent(current_time_millis()) - zc.cache.async_add_records([ptr_record]) # With QU should respond to only multicast since the has less # than 75% of its ttl remaining query = r.DNSOutgoing(const._FLAGS_QR_QUERY) @@ -1312,10 +1312,13 @@ async def test_cache_flush_bit(): for record in new_records: assert zc.cache.async_get_unique(record) is not None - cached_records = [zc.cache.async_get_unique(record) for record in new_records] - for cached_record in cached_records: - assert cached_record is not None - cached_record.created = current_time_millis() - 1500 + cached_record_group = [ + zc.cache.async_all_by_details(record.name, record.type, record.class_) for record in new_records + ] + for cached_records in cached_record_group: + for cached_record in cached_records: + assert cached_record is not None + cached_record.created = current_time_millis() - 1500 fresh_address = socket.inet_aton("4.4.4.4") info.addresses = [fresh_address] @@ -1325,9 +1328,18 @@ async def test_cache_flush_bit(): out.add_answer_at_time(answer, 0) for packet in out.packets(): zc.record_manager.async_updates_from_response(r.DNSIncoming(packet)) - for cached_record in cached_records: - assert cached_record is not None - assert cached_record.ttl == 1 + + cached_record_group = [ + zc.cache.async_all_by_details(record.name, record.type, record.class_) for record in new_records + ] + for cached_records in cached_record_group: + for cached_record in cached_records: + # the new record should not be set to 1 + if cached_record == answer: + assert cached_record.ttl != 1 + continue + assert cached_record is not None + assert cached_record.ttl == 1 for entry in zc.cache.async_all_by_details(server_name, const._TYPE_A, const._CLASS_IN): assert isinstance(entry, r.DNSAddress) From 2d1ffed0a8a7932388943dfe454a65a65cfa420c Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 9 Jan 2025 00:25:23 +0000 Subject: [PATCH 301/434] 0.139.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 7 +++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 00c797c4..b1e28c16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,13 @@ # CHANGELOG +## v0.139.0 (2025-01-09) + +### Features + +* feat: implement heapq for tracking cache expire times (#1465) ([`09db184`](https://github.com/python-zeroconf/python-zeroconf/commit/09db1848957b34415f364b7338e4adce99b57abc)) + + ## v0.138.1 (2025-01-08) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 61481c40..8343c5e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.138.1" +version = "0.139.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index cf146490..2c4004ab 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.138.1" +__version__ = "0.139.0" __license__ = "LGPL" From 35949881fb13057236de788756304c2de8d31ff9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 8 Jan 2025 16:45:48 -1000 Subject: [PATCH 302/434] chore: cleanup unused vars in pxd files (#1470) --- src/zeroconf/_services/info.pxd | 1 - src/zeroconf/_utils/ipaddress.pxd | 1 - 2 files changed, 2 deletions(-) diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 6f1bef71..53abe62a 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -47,7 +47,6 @@ cdef cython.set _ADDRESS_RECORD_TYPES cdef unsigned int _DUPLICATE_QUESTION_INTERVAL cdef bint TYPE_CHECKING -cdef bint IPADDRESS_SUPPORTS_SCOPE_ID cdef object cached_ip_addresses cdef object randint diff --git a/src/zeroconf/_utils/ipaddress.pxd b/src/zeroconf/_utils/ipaddress.pxd index 098c6ff9..01d38164 100644 --- a/src/zeroconf/_utils/ipaddress.pxd +++ b/src/zeroconf/_utils/ipaddress.pxd @@ -1,5 +1,4 @@ cdef bint TYPE_CHECKING -cdef bint IPADDRESS_SUPPORTS_SCOPE_ID from .._dns cimport DNSAddress From 0dad54307d84875a0afef68d0a2f878c76e2e7e7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 10 Jan 2025 11:32:39 -1000 Subject: [PATCH 303/434] chore: add benchmark for sending packets (#1471) --- tests/benchmarks/helpers.py | 153 +++++++++++++++++++++++++++++ tests/benchmarks/test_outgoing.py | 156 +----------------------------- tests/benchmarks/test_send.py | 22 +++++ 3 files changed, 178 insertions(+), 153 deletions(-) create mode 100644 tests/benchmarks/helpers.py create mode 100644 tests/benchmarks/test_send.py diff --git a/tests/benchmarks/helpers.py b/tests/benchmarks/helpers.py new file mode 100644 index 00000000..e701e0b6 --- /dev/null +++ b/tests/benchmarks/helpers.py @@ -0,0 +1,153 @@ +"""Benchmark helpers.""" + +import socket + +from zeroconf import DNSAddress, DNSOutgoing, DNSService, DNSText, const + + +def generate_packets() -> DNSOutgoing: + out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) + address = socket.inet_pton(socket.AF_INET, "192.168.208.5") + + additionals = [ + { + "name": "HASS Bridge ZJWH FF5137._hap._tcp.local.", + "address": address, + "port": 51832, + "text": b"\x13md=HASS Bridge" + b" ZJWH\x06pv=1.0\x14id=01:6B:30:FF:51:37\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=L0m/aQ==", + }, + { + "name": "HASS Bridge 3K9A C2582A._hap._tcp.local.", + "address": address, + "port": 51834, + "text": b"\x13md=HASS Bridge" + b" 3K9A\x06pv=1.0\x14id=E2:AA:5B:C2:58:2A\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b2CnzQ==", + }, + { + "name": "Master Bed TV CEDB27._hap._tcp.local.", + "address": address, + "port": 51830, + "text": b"\x10md=Master Bed" + b" TV\x06pv=1.0\x14id=9E:B7:44:CE:DB:27\x05c#=18\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=CVj1kw==", + }, + { + "name": "Living Room TV 921B77._hap._tcp.local.", + "address": address, + "port": 51833, + "text": b"\x11md=Living Room" + b" TV\x06pv=1.0\x14id=11:61:E7:92:1B:77\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=qU77SQ==", + }, + { + "name": "HASS Bridge ZC8X FF413D._hap._tcp.local.", + "address": address, + "port": 51829, + "text": b"\x13md=HASS Bridge" + b" ZC8X\x06pv=1.0\x14id=96:14:45:FF:41:3D\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=b0QZlg==", + }, + { + "name": "HASS Bridge WLTF 4BE61F._hap._tcp.local.", + "address": address, + "port": 51837, + "text": b"\x13md=HASS Bridge" + b" WLTF\x06pv=1.0\x14id=E0:E7:98:4B:E6:1F\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=ahAISA==", + }, + { + "name": "FrontdoorCamera 8941D1._hap._tcp.local.", + "address": address, + "port": 54898, + "text": b"\x12md=FrontdoorCamera\x06pv=1.0\x14id=9F:B7:DC:89:41:D1\x04c#=2\x04" + b"s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=0+MXmA==", + }, + { + "name": "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + "address": address, + "port": 51836, + "text": b"\x13md=HASS Bridge" + b" W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=6fLM5A==", + }, + { + "name": "HASS Bridge Y9OO EFF0A7._hap._tcp.local.", + "address": address, + "port": 51838, + "text": b"\x13md=HASS Bridge" + b" Y9OO\x06pv=1.0\x14id=D3:FE:98:EF:F0:A7\x04c#=2\x04s#=1\x04ff=0\x04" + b"ci=2\x04sf=0\x0bsh=u3bdfw==", + }, + { + "name": "Snooze Room TV 6B89B0._hap._tcp.local.", + "address": address, + "port": 51835, + "text": b"\x11md=Snooze Room" + b" TV\x06pv=1.0\x14id=5F:D5:70:6B:89:B0\x05c#=17\x04s#=1\x04ff=0\x05" + b"ci=31\x04sf=0\x0bsh=xNTqsg==", + }, + { + "name": "AlexanderHomeAssistant 74651D._hap._tcp.local.", + "address": address, + "port": 54811, + "text": b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" + b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==", + }, + { + "name": "HASS Bridge OS95 39C053._hap._tcp.local.", + "address": address, + "port": 51831, + "text": b"\x13md=HASS Bridge" + b" OS95\x06pv=1.0\x14id=7E:8C:E6:39:C0:53\x05c#=12\x04s#=1\x04ff=0\x04ci=2" + b"\x04sf=0\x0bsh=Xfe5LQ==", + }, + ] + + out.add_answer_at_time( + DNSText( + "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" + b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", + ), + 0, + ) + + for record in additionals: + out.add_additional_answer( + DNSService( + record["name"], # type: ignore + const._TYPE_SRV, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + 0, + 0, + record["port"], # type: ignore + record["name"], # type: ignore + ) + ) + out.add_additional_answer( + DNSText( + record["name"], # type: ignore + const._TYPE_TXT, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_OTHER_TTL, + record["text"], # type: ignore + ) + ) + out.add_additional_answer( + DNSAddress( + record["name"], # type: ignore + const._TYPE_A, + const._CLASS_IN | const._CLASS_UNIQUE, + const._DNS_HOST_TTL, + record["address"], # type: ignore + ) + ) + + return out diff --git a/tests/benchmarks/test_outgoing.py b/tests/benchmarks/test_outgoing.py index 5b7ee164..69de540e 100644 --- a/tests/benchmarks/test_outgoing.py +++ b/tests/benchmarks/test_outgoing.py @@ -1,165 +1,15 @@ """Benchmark for DNSOutgoing.""" -import socket - from pytest_codspeed import BenchmarkFixture -from zeroconf import DNSAddress, DNSOutgoing, DNSService, DNSText, const from zeroconf._protocol.outgoing import State - -def generate_packets() -> DNSOutgoing: - out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) - address = socket.inet_pton(socket.AF_INET, "192.168.208.5") - - additionals = [ - { - "name": "HASS Bridge ZJWH FF5137._hap._tcp.local.", - "address": address, - "port": 51832, - "text": b"\x13md=HASS Bridge" - b" ZJWH\x06pv=1.0\x14id=01:6B:30:FF:51:37\x05c#=12\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=L0m/aQ==", - }, - { - "name": "HASS Bridge 3K9A C2582A._hap._tcp.local.", - "address": address, - "port": 51834, - "text": b"\x13md=HASS Bridge" - b" 3K9A\x06pv=1.0\x14id=E2:AA:5B:C2:58:2A\x05c#=12\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=b2CnzQ==", - }, - { - "name": "Master Bed TV CEDB27._hap._tcp.local.", - "address": address, - "port": 51830, - "text": b"\x10md=Master Bed" - b" TV\x06pv=1.0\x14id=9E:B7:44:CE:DB:27\x05c#=18\x04s#=1\x04ff=0\x05" - b"ci=31\x04sf=0\x0bsh=CVj1kw==", - }, - { - "name": "Living Room TV 921B77._hap._tcp.local.", - "address": address, - "port": 51833, - "text": b"\x11md=Living Room" - b" TV\x06pv=1.0\x14id=11:61:E7:92:1B:77\x05c#=17\x04s#=1\x04ff=0\x05" - b"ci=31\x04sf=0\x0bsh=qU77SQ==", - }, - { - "name": "HASS Bridge ZC8X FF413D._hap._tcp.local.", - "address": address, - "port": 51829, - "text": b"\x13md=HASS Bridge" - b" ZC8X\x06pv=1.0\x14id=96:14:45:FF:41:3D\x05c#=12\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=b0QZlg==", - }, - { - "name": "HASS Bridge WLTF 4BE61F._hap._tcp.local.", - "address": address, - "port": 51837, - "text": b"\x13md=HASS Bridge" - b" WLTF\x06pv=1.0\x14id=E0:E7:98:4B:E6:1F\x04c#=2\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=ahAISA==", - }, - { - "name": "FrontdoorCamera 8941D1._hap._tcp.local.", - "address": address, - "port": 54898, - "text": b"\x12md=FrontdoorCamera\x06pv=1.0\x14id=9F:B7:DC:89:41:D1\x04c#=2\x04" - b"s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=0+MXmA==", - }, - { - "name": "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", - "address": address, - "port": 51836, - "text": b"\x13md=HASS Bridge" - b" W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=6fLM5A==", - }, - { - "name": "HASS Bridge Y9OO EFF0A7._hap._tcp.local.", - "address": address, - "port": 51838, - "text": b"\x13md=HASS Bridge" - b" Y9OO\x06pv=1.0\x14id=D3:FE:98:EF:F0:A7\x04c#=2\x04s#=1\x04ff=0\x04" - b"ci=2\x04sf=0\x0bsh=u3bdfw==", - }, - { - "name": "Snooze Room TV 6B89B0._hap._tcp.local.", - "address": address, - "port": 51835, - "text": b"\x11md=Snooze Room" - b" TV\x06pv=1.0\x14id=5F:D5:70:6B:89:B0\x05c#=17\x04s#=1\x04ff=0\x05" - b"ci=31\x04sf=0\x0bsh=xNTqsg==", - }, - { - "name": "AlexanderHomeAssistant 74651D._hap._tcp.local.", - "address": address, - "port": 54811, - "text": b"\x19md=AlexanderHomeAssistant\x06pv=1.0\x14id=59:8A:0B:74:65:1D\x05" - b"c#=14\x04s#=1\x04ff=0\x04ci=2\x04sf=0\x0bsh=ccZLPA==", - }, - { - "name": "HASS Bridge OS95 39C053._hap._tcp.local.", - "address": address, - "port": 51831, - "text": b"\x13md=HASS Bridge" - b" OS95\x06pv=1.0\x14id=7E:8C:E6:39:C0:53\x05c#=12\x04s#=1\x04ff=0\x04ci=2" - b"\x04sf=0\x0bsh=Xfe5LQ==", - }, - ] - - out.add_answer_at_time( - DNSText( - "HASS Bridge W9DN 5B5CC5._hap._tcp.local.", - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - const._DNS_OTHER_TTL, - b"\x13md=HASS Bridge W9DN\x06pv=1.0\x14id=11:8E:DB:5B:5C:C5\x05c#=12\x04s#=1" - b"\x04ff=0\x04ci=2\x04sf=0\x0bsh=6fLM5A==", - ), - 0, - ) - - for record in additionals: - out.add_additional_answer( - DNSService( - record["name"], # type: ignore - const._TYPE_SRV, - const._CLASS_IN | const._CLASS_UNIQUE, - const._DNS_HOST_TTL, - 0, - 0, - record["port"], # type: ignore - record["name"], # type: ignore - ) - ) - out.add_additional_answer( - DNSText( - record["name"], # type: ignore - const._TYPE_TXT, - const._CLASS_IN | const._CLASS_UNIQUE, - const._DNS_OTHER_TTL, - record["text"], # type: ignore - ) - ) - out.add_additional_answer( - DNSAddress( - record["name"], # type: ignore - const._TYPE_A, - const._CLASS_IN | const._CLASS_UNIQUE, - const._DNS_HOST_TTL, - record["address"], # type: ignore - ) - ) - - return out - - -out = generate_packets() +from .helpers import generate_packets def test_parse_outgoing_message(benchmark: BenchmarkFixture) -> None: + out = generate_packets() + @benchmark def make_outgoing_message() -> None: out.packets() diff --git a/tests/benchmarks/test_send.py b/tests/benchmarks/test_send.py new file mode 100644 index 00000000..7a6d664b --- /dev/null +++ b/tests/benchmarks/test_send.py @@ -0,0 +1,22 @@ +"""Benchmark for sending packets.""" + +import pytest +from pytest_codspeed import BenchmarkFixture + +from zeroconf.asyncio import AsyncZeroconf + +from .helpers import generate_packets + + +@pytest.mark.asyncio +async def test_sending_packets(benchmark: BenchmarkFixture) -> None: + """Benchmark sending packets.""" + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) + await aiozc.zeroconf.async_wait_for_start() + out = generate_packets() + + @benchmark + def _send_packets() -> None: + aiozc.zeroconf.async_send(out) + + await aiozc.async_close() From 2197b9672bb9490a55b5f58b5acf0a5e0ce25837 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:40:08 -1000 Subject: [PATCH 304/434] chore(deps-dev): bump setuptools from 75.7.0 to 75.8.0 (#1473) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index e4a5fae5..5ce877e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "async-timeout" @@ -522,13 +522,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "75.7.0" +version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.7.0-py3-none-any.whl", hash = "sha256:84fb203f278ebcf5cd08f97d3fb96d3fbed4b629d500b29ad60d11e00769b183"}, - {file = "setuptools-75.7.0.tar.gz", hash = "sha256:886ff7b16cd342f1d1defc16fc98c9ce3fde69e087a4e1983d7ab634e5f41f4f"}, + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, ] [package.extras] From ed02799ea1209cddac7e2d2d6428b72b95f5906a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:40:40 -1000 Subject: [PATCH 305/434] chore(pre-commit.ci): pre-commit autoupdate (#1480) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- src/zeroconf/_utils/net.py | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5c4c252f..8551ee8b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.6 + rev: v0.9.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 4cd50926..0eba9288 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -254,7 +254,7 @@ def new_socket( except OSError as ex: if ex.errno == errno.EADDRNOTAVAIL: log.warning( - "Address not available when binding to %s, " "it is expected to happen on some systems", + "Address not available when binding to %s, it is expected to happen on some systems", bind_tup, ) return None @@ -295,8 +295,7 @@ def add_multicast_member( _errno = get_errno(e) if _errno == errno.EADDRINUSE: log.info( - "Address in use when adding %s to multicast group, " - "it is expected to happen on some systems", + "Address in use when adding %s to multicast group, it is expected to happen on some systems", interface, ) return False @@ -309,7 +308,7 @@ def add_multicast_member( return False if _errno in err_einval: log.info( - "Interface of %s does not support multicast, " "it is expected in WSL", + "Interface of %s does not support multicast, it is expected in WSL", interface, ) return False From 430491db91952ad03a058f5932436969fb4b06cf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:40:57 -1000 Subject: [PATCH 306/434] chore(deps-dev): bump pytest-codspeed from 3.1.0 to 3.1.2 (#1475) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5ce877e4..939ecd22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -440,22 +440,23 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-codspeed" -version = "3.1.0" +version = "3.1.2" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, - {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, + {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, + {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, ] [package.dependencies] From 6f3430f334761092f0ced7d5e5065a3710eb4ad5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:41:12 -1000 Subject: [PATCH 307/434] chore(deps-dev): bump pytest-asyncio from 0.24.0 to 0.25.2 (#1476) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 939ecd22..56a2b910 100644 --- a/poetry.lock +++ b/poetry.lock @@ -422,20 +422,20 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.24.0" +version = "0.25.2" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, - {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, + {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, + {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, ] [package.dependencies] pytest = ">=8.2,<9" [package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] @@ -615,4 +615,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "b2255f56e331fb25e626030bf4ad11e7424d28cb1b7dd0310b9c704ee39bb0e1" +content-hash = "f5c250deb75c032aed220cdb67ee2a16316143cec5458a8bb99fd9bafbdbf1ad" diff --git a/pyproject.toml b/pyproject.toml index 8343c5e0..ad461890 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,7 +76,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" pytest-cov = ">=4,<6" -pytest-asyncio = ">=0.20.3,<0.25.0" +pytest-asyncio = ">=0.20.3,<0.26.0" cython = "^3.0.5" setuptools = ">=65.6.3,<76.0.0" pytest-timeout = "^2.1.0" From b170d903868be4b13c1cef7bf5fb4b9e9bffba72 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 16 Jan 2025 23:41:50 +0100 Subject: [PATCH 308/434] chore(tests): replace `lru_cache` with `cache` (#1477) --- tests/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/__init__.py b/tests/__init__.py index 82c09be7..dc4524fb 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -23,7 +23,7 @@ import asyncio import socket import time -from functools import lru_cache +from functools import cache from typing import List, Optional, Set from unittest import mock @@ -62,7 +62,7 @@ def _wait_for_start(zc: Zeroconf) -> None: asyncio.run_coroutine_threadsafe(zc.async_wait_for_start(), zc.loop).result() -@lru_cache(maxsize=None) +@cache def has_working_ipv6(): """Return True if if the system can bind an IPv6 address.""" if not socket.has_ipv6: From ba2ee5a2b48e9e2a378889f1e46c9ed00349b457 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 16 Jan 2025 23:42:09 +0100 Subject: [PATCH 309/434] chore: remove outdated requirements-dev.txt (#1478) --- requirements-dev.txt | 16 ---------------- 1 file changed, 16 deletions(-) delete mode 100644 requirements-dev.txt diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 1054014e..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,16 +0,0 @@ -async_timeout>=4.0.1 -autopep8 -black;implementation_name=="cpython" -bump2version -coverage -flake8 -flake8-import-order -ifaddr -mypy;implementation_name=="cpython" -pep8-naming>=0.12.0 -pylint -pytest -pytest-asyncio -pytest-cov -pytest-timeout -readme_renderer From d20d8c1b4db2dd2ff3818091a1bbd973caa6acd6 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Thu, 16 Jan 2025 23:42:53 +0100 Subject: [PATCH 310/434] chore: migrate to f-string (#1481) --- examples/async_apple_scanner.py | 12 ++++++------ examples/async_browser.py | 12 ++++++------ examples/async_service_info_request.py | 8 ++++---- examples/browser.py | 10 +++++----- examples/self_test.py | 2 +- pyproject.toml | 1 - src/zeroconf/_dns.py | 2 +- src/zeroconf/_protocol/incoming.py | 26 ++++++++++++++------------ src/zeroconf/_protocol/outgoing.py | 20 +++++++++++--------- src/zeroconf/_utils/net.py | 8 ++++---- tests/services/test_browser.py | 20 ++++++++++---------- tests/services/test_info.py | 26 +++++++++++++------------- tests/test_asyncio.py | 6 +++--- tests/test_core.py | 2 +- tests/test_init.py | 10 +++++----- tests/test_protocol.py | 6 +++--- 16 files changed, 87 insertions(+), 84 deletions(-) diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index 1d2c5306..e126e8f9 100755 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -55,12 +55,12 @@ def async_on_service_state_change( async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000, question_type=DNSQuestionType.QU) - print("Info from zeroconf.get_service_info: %r" % (info)) + print(f"Info from zeroconf.get_service_info: {info!r}") if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] - print(" Name: %s" % name) - print(" Addresses: %s" % ", ".join(addresses)) - print(" Weight: %d, priority: %d" % (info.weight, info.priority)) + addresses = [f"{addr}:{cast(int, info.port)}" for addr in info.parsed_addresses()] + print(f" Name: {name}") + print(f" Addresses: {', '.join(addresses)}") + print(f" Weight: {info.weight}, priority: {info.priority}") print(f" Server: {info.server}") if info.properties: print(" Properties are:") @@ -82,7 +82,7 @@ def __init__(self, args: Any) -> None: async def async_run(self) -> None: self.aiozc = AsyncZeroconf(ip_version=ip_version) await self.aiozc.zeroconf.async_wait_for_start() - print("\nBrowsing %s service(s), press Ctrl-C to exit...\n" % ALL_SERVICES) + print(f"\nBrowsing {ALL_SERVICES} service(s), press Ctrl-C to exit...\n") kwargs = { "handlers": [async_on_service_state_change], "question_type": DNSQuestionType.QU, diff --git a/examples/async_browser.py b/examples/async_browser.py index 78be3a4c..31b55e4a 100755 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -35,12 +35,12 @@ def async_on_service_state_change( async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name: str) -> None: info = AsyncServiceInfo(service_type, name) await info.async_request(zeroconf, 3000) - print("Info from zeroconf.get_service_info: %r" % (info)) + print(f"Info from zeroconf.get_service_info: {info!r}") if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] - print(" Name: %s" % name) - print(" Addresses: %s" % ", ".join(addresses)) - print(" Weight: %d, priority: %d" % (info.weight, info.priority)) + addresses = [f"{addr}:{cast(int, info.port)}" for addr in info.parsed_scoped_addresses()] + print(f" Name: {name}") + print(f" Addresses: {', '.join(addresses)}") + print(f" Weight: {info.weight}, priority: {info.priority}") print(f" Server: {info.server}") if info.properties: print(" Properties are:") @@ -68,7 +68,7 @@ async def async_run(self) -> None: await AsyncZeroconfServiceTypes.async_find(aiozc=self.aiozc, ip_version=ip_version) ) - print("\nBrowsing %s service(s), press Ctrl-C to exit...\n" % services) + print(f"\nBrowsing {services} service(s), press Ctrl-C to exit...\n") self.aiobrowser = AsyncServiceBrowser( self.aiozc.zeroconf, services, handlers=[async_on_service_state_change] ) diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py index b904fd89..42df809d 100755 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -30,11 +30,11 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: tasks = [info.async_request(aiozc.zeroconf, 3000) for info in infos] await asyncio.gather(*tasks) for info in infos: - print("Info for %s" % (info.name)) + print(f"Info for {info.name}") if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_addresses()] - print(" Addresses: %s" % ", ".join(addresses)) - print(" Weight: %d, priority: %d" % (info.weight, info.priority)) + addresses = [f"{addr}:{cast(int, info.port)}" for addr in info.parsed_addresses()] + print(f" Addresses: {', '.join(addresses)}") + print(f" Weight: {info.weight}, priority: {info.priority}") print(f" Server: {info.server}") if info.properties: print(" Properties are:") diff --git a/examples/browser.py b/examples/browser.py index 4e7b7610..107be452 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -26,12 +26,12 @@ def on_service_state_change( if state_change is ServiceStateChange.Added: info = zeroconf.get_service_info(service_type, name) - print("Info from zeroconf.get_service_info: %r" % (info)) + print(f"Info from zeroconf.get_service_info: {info!r}") if info: - addresses = ["%s:%d" % (addr, cast(int, info.port)) for addr in info.parsed_scoped_addresses()] - print(" Addresses: %s" % ", ".join(addresses)) - print(" Weight: %d, priority: %d" % (info.weight, info.priority)) + addresses = [f"{addr}:{cast(int, info.port)}" for addr in info.parsed_scoped_addresses()] + print(f" Addresses: {', '.join(addresses)}") + print(f" Weight: {info.weight}, priority: {info.priority}") print(f" Server: {info.server}") if info.properties: print(" Properties are:") @@ -75,7 +75,7 @@ def on_service_state_change( if args.find: services = list(ZeroconfServiceTypes.find(zc=zeroconf)) - print("\nBrowsing %d service(s), press Ctrl-C to exit...\n" % len(services)) + print(f"\nBrowsing {len(services)} service(s), press Ctrl-C to exit...\n") browser = ServiceBrowser(zeroconf, services, handlers=[on_service_state_change]) try: diff --git a/examples/self_test.py b/examples/self_test.py index 1fec3921..b12a8518 100755 --- a/examples/self_test.py +++ b/examples/self_test.py @@ -34,7 +34,7 @@ r.register_service(info) print(" Registration done.") print("2. Testing query of service information...") - print(" Getting ZOE service: %s" % (r.get_service_info("_http._tcp.local.", "ZOE._http._tcp.local."))) + print(f" Getting ZOE service: {r.get_service_info('_http._tcp.local.', 'ZOE._http._tcp.local.')}") print(" Query done.") print("3. Testing query of own service...") queried_info = r.get_service_info("_http._tcp.local.", "My Service Name._http._tcp.local.") diff --git a/pyproject.toml b/pyproject.toml index ad461890..9da8d87d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,6 @@ line-length = 110 ignore = [ "S101", # use of assert "S104", # S104 Possible binding to all interfaces - "UP031", # UP031 use f-strings -- too many to fix right now ] select = [ "B", # flake8-bugbear diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 4fc8d2d6..c22f8b17 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -102,7 +102,7 @@ def entry_to_string(self, hdr: str, other: Optional[Union[bytes, str]]) -> str: self.get_class_(self.class_), "-unique" if self.unique else "", self.name, - "=%s" % cast(Any, other) if other is not None else "", + f"={cast(Any, other)}" if other is not None else "", ) diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 5347f50d..6e009b29 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -208,18 +208,20 @@ def is_probe(self) -> bool: return self._num_authorities > 0 def __repr__(self) -> str: - return "" % ", ".join( - [ - "id=%s" % self.id, - "flags=%s" % self.flags, - "truncated=%s" % self.truncated, - "n_q=%s" % self._num_questions, - "n_ans=%s" % self._num_answers, - "n_auth=%s" % self._num_authorities, - "n_add=%s" % self._num_additionals, - "questions=%s" % self._questions, - "answers=%s" % self.answers(), - ] + return "".format( + ", ".join( + [ + f"id={self.id}", + f"flags={self.flags}", + f"truncated={self.truncated}", + f"n_q={self._num_questions}", + f"n_ans={self._num_answers}", + f"n_auth={self._num_authorities}", + f"n_add={self._num_additionals}", + f"questions={self._questions}", + f"answers={self.answers()}", + ] + ) ) def _read_header(self) -> None: diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index b2eb9230..c937350e 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -128,15 +128,17 @@ def _reset_for_next_packet(self) -> None: self.allow_long = True def __repr__(self) -> str: - return "" % ", ".join( - [ - "multicast=%s" % self.multicast, - "flags=%s" % self.flags, - "questions=%s" % self.questions, - "answers=%s" % self.answers, - "authorities=%s" % self.authorities, - "additionals=%s" % self.additionals, - ] + return "".format( + ", ".join( + [ + f"multicast={self.multicast}", + f"flags={self.flags}", + f"questions={self.questions}", + f"answers={self.answers}", + f"authorities={self.authorities}", + f"additionals={self.additionals}", + ] + ) ) def add_question(self, record: DNSQuestion) -> None: diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 0eba9288..7298bec4 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -95,7 +95,7 @@ def ip6_to_address_and_index(adapters: List[Any], ip: str) -> Tuple[Tuple[str, i cast(int, adapter.index), ) - raise RuntimeError("No adapter found for IP address %s" % ip) + raise RuntimeError(f"No adapter found for IP address {ip}") def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str, int, int]: @@ -106,7 +106,7 @@ def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str if isinstance(adapter_ip.ip, tuple): return cast(Tuple[str, int, int], adapter_ip.ip) - raise RuntimeError("No adapter found for index %s" % index) + raise RuntimeError(f"No adapter found for index {index}") def ip6_addresses_to_indexes( @@ -154,7 +154,7 @@ def normalize_interface_choice( result.extend(get_all_addresses()) if not result: raise RuntimeError( - "No interfaces to listen on, check that any interfaces have IP version %s" % ip_version + f"No interfaces to listen on, check that any interfaces have IP version {ip_version}" ) elif isinstance(choice, list): # First, take IPv4 addresses. @@ -162,7 +162,7 @@ def normalize_interface_choice( # Unlike IP_ADD_MEMBERSHIP, IPV6_JOIN_GROUP requires interface indexes. result += ip6_addresses_to_indexes(choice) else: - raise TypeError("choice must be a list or InterfaceChoice, got %r" % choice) + raise TypeError(f"choice must be a list or InterfaceChoice, got {choice!r}") return result diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index ba5ae52e..f4d750e0 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -562,7 +562,7 @@ async def test_asking_default_is_asking_qm_questions_after_the_first_qu(): got_query = asyncio.Event() type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" def on_service_state_change(zeroconf, service_type, state_change, name): if name == registration_name: @@ -664,7 +664,7 @@ async def test_ttl_refresh_cancelled_rescue_query(): got_query = asyncio.Event() type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" def on_service_state_change(zeroconf, service_type, state_change, name): if name == registration_name: @@ -913,7 +913,7 @@ def test_service_browser_is_aware_of_port_changes(): zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" callbacks = [] @@ -977,7 +977,7 @@ def test_service_browser_listeners_update_service(): zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" callbacks = [] class MyServiceListener(r.ServiceListener): @@ -1042,7 +1042,7 @@ def test_service_browser_listeners_no_update_service(): zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_hap._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" callbacks = [] class MyServiceListener(r.ServiceListener): @@ -1364,9 +1364,9 @@ def test_service_browser_matching(): zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" not_match_type_ = "_asustor-looksgood_http._tcp.local." - not_match_registration_name = "xxxyyy.%s" % not_match_type_ + not_match_registration_name = f"xxxyyy.{not_match_type_}" callbacks = [] class MyServiceListener(r.ServiceListener): @@ -1457,7 +1457,7 @@ def test_service_browser_expire_callbacks(): zc = Zeroconf(interfaces=["127.0.0.1"]) # start a browser type_ = "_old._tcp.local." - registration_name = "uniquezip323.%s" % type_ + registration_name = f"uniquezip323.{type_}" callbacks = [] class MyServiceListener(r.ServiceListener): @@ -1582,7 +1582,7 @@ async def test_close_zeroconf_without_browser_before_start_up_queries(): """Test that we stop sending startup queries if zeroconf is closed out from under the browser.""" service_added = asyncio.Event() type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" def on_service_state_change(zeroconf, service_type, state_change, name): if name == registration_name: @@ -1651,7 +1651,7 @@ async def test_close_zeroconf_without_browser_after_start_up_queries(): service_added = asyncio.Event() type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" def on_service_state_change(zeroconf, service_type, state_change, name): if name == registration_name: diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 1b16fef8..7051e6fe 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -666,7 +666,7 @@ def test_service_info_duplicate_properties_txt_records(self): def test_multiple_addresses(): type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" desc = {"path": "/~paulsm/"} address_parsed = "10.0.1.2" address = socket.inet_aton(address_parsed) @@ -830,7 +830,7 @@ def test_scoped_addresses_from_cache(): async def test_multiple_a_addresses_newest_address_first(): """Test that info.addresses returns the newest seen address first.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) cache = aiozc.zeroconf.cache @@ -849,7 +849,7 @@ async def test_multiple_a_addresses_newest_address_first(): @pytest.mark.asyncio async def test_invalid_a_addresses(caplog): type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) cache = aiozc.zeroconf.cache @@ -1082,7 +1082,7 @@ def async_send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): async def test_release_wait_when_new_recorded_added(): """Test that async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1147,7 +1147,7 @@ async def test_release_wait_when_new_recorded_added(): async def test_port_changes_are_seen(): """Test that port changes are seen by async_request.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1230,7 +1230,7 @@ async def test_port_changes_are_seen(): async def test_port_changes_are_seen_with_directed_request(): """Test that port changes are seen by async_request with a directed request.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1313,7 +1313,7 @@ async def test_port_changes_are_seen_with_directed_request(): async def test_ipv4_changes_are_seen(): """Test that ipv4 changes are seen by async_request.""" type_ = "_http._tcp.local." - registration_name = "multiaipv4rec.%s" % type_ + registration_name = f"multiaipv4rec.{type_}" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1401,7 +1401,7 @@ async def test_ipv4_changes_are_seen(): async def test_ipv6_changes_are_seen(): """Test that ipv6 changes are seen by async_request.""" type_ = "_http._tcp.local." - registration_name = "multiaipv6rec.%s" % type_ + registration_name = f"multiaipv6rec.{type_}" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1496,7 +1496,7 @@ async def test_ipv6_changes_are_seen(): async def test_bad_ip_addresses_ignored_in_cache(): """Test that bad ip address in the cache are ignored async_request.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1550,7 +1550,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): async def test_service_name_change_as_seen_has_ip_in_cache(): """Test that service name changes are seen by async_request when the ip is in the cache.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1632,7 +1632,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): async def test_service_name_change_as_seen_ip_not_in_cache(): """Test that service name changes are seen by async_request when the ip is not in the cache.""" type_ = "_http._tcp.local." - registration_name = "multiarec.%s" % type_ + registration_name = f"multiarec.{type_}" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahost.local." @@ -1715,7 +1715,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): async def test_release_wait_when_new_recorded_added_concurrency(): """Test that concurrent async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." - registration_name = "multiareccon.%s" % type_ + registration_name = f"multiareccon.{type_}" desc = {"path": "/~paulsm/"} aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) host = "multahostcon.local." @@ -1786,7 +1786,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): async def test_service_info_nsec_records(): """Test we can generate nsec records from ServiceInfo.""" type_ = "_http._tcp.local." - registration_name = "multiareccon.%s" % type_ + registration_name = f"multiareccon.{type_}" desc = {"path": "/~paulsm/"} host = "multahostcon.local." info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, host) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 2471733b..86e9e8c7 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -933,7 +933,7 @@ async def test_service_browser_instantiation_generates_add_events_from_cache(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_hap._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" callbacks = [] class MyServiceListener(ServiceListener): @@ -982,7 +982,7 @@ async def test_integration(): got_query = asyncio.Event() type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" def on_service_state_change(zeroconf, service_type, state_change, name): if name == registration_name: @@ -1184,7 +1184,7 @@ async def test_service_browser_ignores_unrelated_updates(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf type_ = "_veryuniqueone._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" callbacks = [] class MyServiceListener(ServiceListener): diff --git a/tests/test_core.py b/tests/test_core.py index 82055968..9ccdcc78 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -318,7 +318,7 @@ def test_goodbye_all_services(): out = zc.generate_unregister_all_services() assert out is None type_ = "_http._tcp.local." - registration_name = "xxxyyy.%s" % type_ + registration_name = f"xxxyyy.{type_}" desc = {"path": "/~paulsm/"} info = r.ServiceInfo( type_, diff --git a/tests/test_init.py b/tests/test_init.py index 3ae695c5..080d485e 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -40,20 +40,20 @@ def test_long_name(self): def test_exceedingly_long_name(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - name = "%slocal." % ("part." * 1000) + name = f"{'part.' * 1000}local." question = r.DNSQuestion(name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) r.DNSIncoming(generated.packets()[0]) def test_extra_exceedingly_long_name(self): generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) - name = "%slocal." % ("part." * 4000) + name = f"{'part.' * 4000}local." question = r.DNSQuestion(name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) r.DNSIncoming(generated.packets()[0]) def test_exceedingly_long_name_part(self): - name = "%s.local." % ("a" * 1000) + name = f"{'a' * 1000}.local." generated = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) question = r.DNSQuestion(name, const._TYPE_SRV, const._CLASS_IN) generated.add_question(question) @@ -154,14 +154,14 @@ def verify_name_change(self, zc, type_, name, number_hosts): addresses=[socket.inet_aton("10.0.1.2")], ) zc.register_service(info_service2, allow_name_change=True) - assert info_service2.name.split(".")[0] == "%s-%d" % (name, number_hosts + 1) + assert info_service2.name.split(".")[0] == f"{name}-{number_hosts + 1}" def generate_many_hosts(self, zc, type_, name, number_hosts): block_size = 25 number_hosts = int((number_hosts - 1) / block_size + 1) * block_size out = r.DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) for i in range(1, number_hosts + 1): - next_name = name if i == 1 else "%s-%d" % (name, i) + next_name = name if i == 1 else f"{name}-{i}" self.generate_host(out, next_name, type_) _inject_responses(zc, [r.DNSIncoming(packet) for packet in out.packets()]) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 1feb64c5..e46dbf03 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -707,7 +707,7 @@ def test_tc_bit_in_query_packet(): for i in range(30): out.add_answer_at_time( DNSText( - ("HASS Bridge W9DN %s._hap._tcp.local." % i), + f"HASS Bridge W9DN {i}._hap._tcp.local.", const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, @@ -739,7 +739,7 @@ def test_tc_bit_not_set_in_answer_packet(): for i in range(30): out.add_answer_at_time( DNSText( - ("HASS Bridge W9DN %s._hap._tcp.local." % i), + f"HASS Bridge W9DN {i}._hap._tcp.local.", const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, @@ -813,7 +813,7 @@ def test_records_same_packet_share_fate(): for i in range(30): out.add_answer_at_time( DNSText( - ("HASS Bridge W9DN %s._hap._tcp.local." % i), + f"HASS Bridge W9DN {i}._hap._tcp.local.", const._TYPE_TXT, const._CLASS_IN | const._CLASS_UNIQUE, const._DNS_OTHER_TTL, From 46d3f551e561908749afbf296d30537371d88cd3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 13:33:11 -1000 Subject: [PATCH 311/434] chore(deps-dev): bump pytest-cov from 5.0.0 to 6.0.0 (#1474) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 56a2b910..bf39f792 100644 --- a/poetry.lock +++ b/poetry.lock @@ -472,17 +472,17 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-cov" -version = "5.0.0" +version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, - {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, ] [package.dependencies] -coverage = {version = ">=5.2.1", extras = ["toml"]} +coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] @@ -615,4 +615,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f5c250deb75c032aed220cdb67ee2a16316143cec5458a8bb99fd9bafbdbf1ad" +content-hash = "748c1d5a24ec0b6c1561daace768193ce87acc53d4cabf06c82551a45c079c94" diff --git a/pyproject.toml b/pyproject.toml index 9da8d87d..450c8aa7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" -pytest-cov = ">=4,<6" +pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.26.0" cython = "^3.0.5" setuptools = ">=65.6.3,<76.0.0" From a97d228213350b5fc34ed5c82a635dc1d0f9438d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Jan 2025 13:35:47 -1000 Subject: [PATCH 312/434] chore: use native arm runners for arm wheels (#1484) --- .github/workflows/ci.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 520bf35e..0ec37345 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -165,7 +165,14 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, windows-2019, macos-13, macos-latest] + os: + [ + ubuntu-24.04-arm, + ubuntu-latest, + windows-2019, + macos-13, + macos-latest, + ] musl: ["", "musllinux"] exclude: - os: windows-2019 @@ -201,12 +208,6 @@ jobs: ref: "${{ steps.release_tag.outputs.newest_release_tag }}" fetch-depth: 0 - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 - with: - platforms: arm64 - - name: Build wheels (non-musl) uses: pypa/cibuildwheel@v2.22.0 if: matrix.musl == '' @@ -214,7 +215,7 @@ jobs: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux* CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: auto aarch64 + CIBW_ARCHS_LINUX: ${matrix.os == ubuntu-24.04-arm && 'aarch64' || 'auto'} CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 @@ -225,7 +226,7 @@ jobs: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *manylinux* CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: auto aarch64 + CIBW_ARCHS_LINUX: ${matrix.os == ubuntu-24.04-arm && 'aarch64' || 'auto'} CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 From dde26c655a49811c11071b0531e408a188687009 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 17 Jan 2025 00:35:54 +0100 Subject: [PATCH 313/434] fix(docs): remove repetition of words (#1479) Co-authored-by: J. Nick Koston --- tests/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/__init__.py b/tests/__init__.py index dc4524fb..5b1789a1 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -64,7 +64,7 @@ def _wait_for_start(zc: Zeroconf) -> None: @cache def has_working_ipv6(): - """Return True if if the system can bind an IPv6 address.""" + """Return True if the system can bind an IPv6 address.""" if not socket.has_ipv6: return False From 22a0fb487db27bc2c6448a9167742f3040e910ba Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Fri, 17 Jan 2025 00:36:03 +0100 Subject: [PATCH 314/434] feat: migrate to native types (#1472) Co-authored-by: J. Nick Koston Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- bench/incoming.py | 3 +-- docs/conf.py | 4 ++-- examples/async_apple_scanner.py | 8 +++++--- examples/async_browser.py | 8 +++++--- examples/async_registration.py | 9 +++++---- examples/async_service_info_request.py | 10 ++++++---- tests/__init__.py | 9 +++++---- tests/benchmarks/test_incoming.py | 3 +-- tests/services/test_browser.py | 11 ++++++----- tests/services/test_info.py | 12 +++++++----- tests/test_cache.py | 1 - tests/test_core.py | 10 ++++++---- tests/test_dns.py | 1 - tests/test_engine.py | 3 +-- tests/test_exceptions.py | 1 - tests/test_handlers.py | 10 +++++----- tests/test_history.py | 8 +++----- tests/test_init.py | 1 - tests/test_listener.py | 5 +++-- tests/test_protocol.py | 1 - tests/test_services.py | 4 ++-- tests/utils/test_asyncio.py | 5 +++-- 22 files changed, 66 insertions(+), 61 deletions(-) diff --git a/bench/incoming.py b/bench/incoming.py index 3edcfec2..eb35f8a9 100644 --- a/bench/incoming.py +++ b/bench/incoming.py @@ -2,7 +2,6 @@ import socket import timeit -from typing import List from zeroconf import ( DNSAddress, @@ -15,7 +14,7 @@ ) -def generate_packets() -> List[bytes]: +def generate_packets() -> list[bytes]: out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) address = socket.inet_pton(socket.AF_INET, "192.168.208.5") diff --git a/docs/conf.py b/docs/conf.py index b3ad57ea..647742e6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -7,7 +7,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. -from typing import Any, Dict +from typing import Any import zeroconf @@ -173,7 +173,7 @@ # -- Options for LaTeX output -------------------------------------------------- -latex_elements: Dict[str, Any] = {} +latex_elements: dict[str, Any] = {} # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index e126e8f9..19691662 100755 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -2,10 +2,12 @@ """Scan for apple devices.""" +from __future__ import annotations + import argparse import asyncio import logging -from typing import Any, Optional, cast +from typing import Any, cast from zeroconf import DNSQuestionType, IPVersion, ServiceStateChange, Zeroconf from zeroconf.asyncio import AsyncServiceBrowser, AsyncServiceInfo, AsyncZeroconf @@ -76,8 +78,8 @@ async def _async_show_service_info(zeroconf: Zeroconf, service_type: str, name: class AsyncAppleScanner: def __init__(self, args: Any) -> None: self.args = args - self.aiobrowser: Optional[AsyncServiceBrowser] = None - self.aiozc: Optional[AsyncZeroconf] = None + self.aiobrowser: AsyncServiceBrowser | None = None + self.aiozc: AsyncZeroconf | None = None async def async_run(self) -> None: self.aiozc = AsyncZeroconf(ip_version=ip_version) diff --git a/examples/async_browser.py b/examples/async_browser.py index 31b55e4a..d86cfc5e 100755 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -5,10 +5,12 @@ The default is HTTP and HAP; use --find to search for all available services in the network """ +from __future__ import annotations + import argparse import asyncio import logging -from typing import Any, Optional, cast +from typing import Any, cast from zeroconf import IPVersion, ServiceStateChange, Zeroconf from zeroconf.asyncio import ( @@ -56,8 +58,8 @@ async def async_display_service_info(zeroconf: Zeroconf, service_type: str, name class AsyncRunner: def __init__(self, args: Any) -> None: self.args = args - self.aiobrowser: Optional[AsyncServiceBrowser] = None - self.aiozc: Optional[AsyncZeroconf] = None + self.aiobrowser: AsyncServiceBrowser | None = None + self.aiozc: AsyncZeroconf | None = None async def async_run(self) -> None: self.aiozc = AsyncZeroconf(ip_version=ip_version) diff --git a/examples/async_registration.py b/examples/async_registration.py index 56cb91f2..d01b15e1 100755 --- a/examples/async_registration.py +++ b/examples/async_registration.py @@ -2,11 +2,12 @@ """Example of announcing 250 services (in this case, a fake HTTP server).""" +from __future__ import annotations + import argparse import asyncio import logging import socket -from typing import List, Optional from zeroconf import IPVersion from zeroconf.asyncio import AsyncServiceInfo, AsyncZeroconf @@ -15,9 +16,9 @@ class AsyncRunner: def __init__(self, ip_version: IPVersion) -> None: self.ip_version = ip_version - self.aiozc: Optional[AsyncZeroconf] = None + self.aiozc: AsyncZeroconf | None = None - async def register_services(self, infos: List[AsyncServiceInfo]) -> None: + async def register_services(self, infos: list[AsyncServiceInfo]) -> None: self.aiozc = AsyncZeroconf(ip_version=self.ip_version) tasks = [self.aiozc.async_register_service(info) for info in infos] background_tasks = await asyncio.gather(*tasks) @@ -26,7 +27,7 @@ async def register_services(self, infos: List[AsyncServiceInfo]) -> None: while True: await asyncio.sleep(1) - async def unregister_services(self, infos: List[AsyncServiceInfo]) -> None: + async def unregister_services(self, infos: list[AsyncServiceInfo]) -> None: assert self.aiozc is not None tasks = [self.aiozc.async_unregister_service(info) for info in infos] background_tasks = await asyncio.gather(*tasks) diff --git a/examples/async_service_info_request.py b/examples/async_service_info_request.py index 42df809d..ca75fc52 100755 --- a/examples/async_service_info_request.py +++ b/examples/async_service_info_request.py @@ -7,10 +7,12 @@ """ +from __future__ import annotations + import argparse import asyncio import logging -from typing import Any, List, Optional, cast +from typing import Any, cast from zeroconf import IPVersion, ServiceBrowser, ServiceStateChange, Zeroconf from zeroconf.asyncio import AsyncServiceInfo, AsyncZeroconf @@ -22,7 +24,7 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: zeroconf = aiozc.zeroconf while True: await asyncio.sleep(5) - infos: List[AsyncServiceInfo] = [] + infos: list[AsyncServiceInfo] = [] for name in zeroconf.cache.names(): if not name.endswith(HAP_TYPE): continue @@ -50,8 +52,8 @@ async def async_watch_services(aiozc: AsyncZeroconf) -> None: class AsyncRunner: def __init__(self, args: Any) -> None: self.args = args - self.threaded_browser: Optional[ServiceBrowser] = None - self.aiozc: Optional[AsyncZeroconf] = None + self.threaded_browser: ServiceBrowser | None = None + self.aiozc: AsyncZeroconf | None = None async def async_run(self) -> None: self.aiozc = AsyncZeroconf(ip_version=ip_version) diff --git a/tests/__init__.py b/tests/__init__.py index 5b1789a1..a70cca60 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -20,11 +20,12 @@ USA """ +from __future__ import annotations + import asyncio import socket import time from functools import cache -from typing import List, Optional, Set from unittest import mock import ifaddr @@ -36,11 +37,11 @@ class QuestionHistoryWithoutSuppression(QuestionHistory): - def suppresses(self, question: DNSQuestion, now: float, known_answers: Set[DNSRecord]) -> bool: + def suppresses(self, question: DNSQuestion, now: float, known_answers: set[DNSRecord]) -> bool: return False -def _inject_responses(zc: Zeroconf, msgs: List[DNSIncoming]) -> None: +def _inject_responses(zc: Zeroconf, msgs: list[DNSIncoming]) -> None: """Inject a DNSIncoming response.""" assert zc.loop is not None @@ -90,7 +91,7 @@ def _clear_cache(zc: Zeroconf) -> None: zc.question_history.clear() -def time_changed_millis(millis: Optional[float] = None) -> None: +def time_changed_millis(millis: float | None = None) -> None: """Call all scheduled events for a time.""" loop = asyncio.get_running_loop() loop_time = loop.time() diff --git a/tests/benchmarks/test_incoming.py b/tests/benchmarks/test_incoming.py index 6285c19f..e0552f3a 100644 --- a/tests/benchmarks/test_incoming.py +++ b/tests/benchmarks/test_incoming.py @@ -1,7 +1,6 @@ """Benchmark for DNSIncoming.""" import socket -from typing import List from pytest_codspeed import BenchmarkFixture @@ -16,7 +15,7 @@ ) -def generate_packets() -> List[bytes]: +def generate_packets() -> list[bytes]: out = DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) address = socket.inet_pton(socket.AF_INET, "192.168.208.5") diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index f4d750e0..5268c341 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -6,8 +6,9 @@ import socket import time import unittest +from collections.abc import Iterable from threading import Event -from typing import Iterable, List, Set, cast +from typing import cast from unittest.mock import patch import pytest @@ -580,7 +581,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): old_send = zeroconf_browser.async_send expected_ttl = const._DNS_OTHER_TTL - questions: List[List[DNSQuestion]] = [] + questions: list[list[DNSQuestion]] = [] def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -1151,7 +1152,7 @@ async def test_generate_service_query_suppress_duplicate_questions(): 10000, f"known-to-other.{name}", ) - other_known_answers: Set[r.DNSRecord] = {answer} + other_known_answers: set[r.DNSRecord] = {answer} zc.question_history.add_question_at_time(question, now, other_known_answers) assert zc.question_history.suppresses(question, now, other_known_answers) @@ -1196,7 +1197,7 @@ async def test_query_scheduler(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() zc = aiozc.zeroconf - sends: List[r.DNSIncoming] = [] + sends: list[r.DNSIncoming] = [] def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -1289,7 +1290,7 @@ async def test_query_scheduler_rescue_records(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.zeroconf.async_wait_for_start() zc = aiozc.zeroconf - sends: List[r.DNSIncoming] = [] + sends: list[r.DNSIncoming] = [] def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 7051e6fe..1f8924a3 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1,14 +1,16 @@ """Unit tests for zeroconf._services.info.""" +from __future__ import annotations + import asyncio import logging import os import socket import threading import unittest +from collections.abc import Iterable from ipaddress import ip_address from threading import Event -from typing import Iterable, List, Optional from unittest.mock import patch import pytest @@ -264,7 +266,7 @@ def test_get_info_partial(self): send_event = Event() service_info_event = Event() - last_sent: Optional[r.DNSOutgoing] = None + last_sent: r.DNSOutgoing | None = None def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -407,7 +409,7 @@ def test_get_info_suppressed_by_question_history(self): send_event = Event() service_info_event = Event() - last_sent: Optional[r.DNSOutgoing] = None + last_sent: r.DNSOutgoing | None = None def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -534,7 +536,7 @@ def test_get_info_single(self): send_event = Event() service_info_event = Event() - last_sent = None # type: Optional[r.DNSOutgoing] + last_sent: r.DNSOutgoing | None = None def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): """Sends an outgoing packet.""" @@ -879,7 +881,7 @@ def test_filter_address_by_type_from_service_info(): ipv6 = socket.inet_pton(socket.AF_INET6, "2001:db8::1") info = ServiceInfo(type_, registration_name, 80, 0, 0, desc, "ash-2.local.", addresses=[ipv4, ipv6]) - def dns_addresses_to_addresses(dns_address: List[DNSAddress]) -> List[bytes]: + def dns_addresses_to_addresses(dns_address: list[DNSAddress]) -> list[bytes]: return [address.address for address in dns_address] assert dns_addresses_to_addresses(info.dns_addresses()) == [ipv4, ipv6] diff --git a/tests/test_cache.py b/tests/test_cache.py index 99de9827..f5304cef 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,7 +1,6 @@ """Unit tests for zeroconf._cache.""" import logging -import unittest import unittest.mock from heapq import heapify, heappop diff --git a/tests/test_core.py b/tests/test_core.py index 9ccdcc78..fcfdf424 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._core""" +from __future__ import annotations + import asyncio import logging import os @@ -9,7 +11,7 @@ import time import unittest import unittest.mock -from typing import Tuple, Union, cast +from typing import cast from unittest.mock import AsyncMock, Mock, patch import pytest @@ -38,13 +40,13 @@ def teardown_module(): def threadsafe_query( - zc: "Zeroconf", - protocol: "AsyncListener", + zc: Zeroconf, + protocol: AsyncListener, msg: DNSIncoming, addr: str, port: int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]], + v6_flow_scope: tuple[()] | tuple[int, int], ) -> None: async def make_query(): protocol.handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) diff --git a/tests/test_dns.py b/tests/test_dns.py index e9c4dc09..491e2ca7 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -3,7 +3,6 @@ import logging import os import socket -import unittest import unittest.mock import pytest diff --git a/tests/test_engine.py b/tests/test_engine.py index 79560d9c..23a03949 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -3,7 +3,6 @@ import asyncio import itertools import logging -from typing import Set from unittest.mock import patch import pytest @@ -41,7 +40,7 @@ async def test_reaper(): zeroconf.cache.async_add_records([record_with_10s_ttl, record_with_1s_ttl]) question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) now = r.current_time_millis() - other_known_answers: Set[r.DNSRecord] = { + other_known_answers: set[r.DNSRecord] = { r.DNSPointer( "_hap._tcp.local.", const._TYPE_PTR, diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index 1f5bd738..cf004d2c 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,7 +1,6 @@ """Unit tests for zeroconf._exceptions""" import logging -import unittest import unittest.mock import zeroconf as r diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 8cf5cc9a..80ee7f40 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -7,7 +7,7 @@ import time import unittest import unittest.mock -from typing import List, cast +from typing import cast from unittest.mock import patch import pytest @@ -1371,7 +1371,7 @@ async def test_record_update_manager_add_listener_callsback_existing_records(): class MyListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -1973,7 +1973,7 @@ async def test_add_listener_warns_when_not_using_record_update_listener(caplog): class MyListener: """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -2005,7 +2005,7 @@ async def test_async_updates_iteration_safe(): class OtherListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -2014,7 +2014,7 @@ def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.Recor class ListenerThatAddsListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: List[r.RecordUpdate]) -> None: + def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.async_add_listener(other, None) diff --git a/tests/test_history.py b/tests/test_history.py index c604d383..606362d1 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -1,7 +1,5 @@ """Unit tests for _history.py.""" -from typing import Set - import zeroconf as r import zeroconf.const as const from zeroconf._history import QuestionHistory @@ -12,7 +10,7 @@ def test_question_suppression(): question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) now = r.current_time_millis() - other_known_answers: Set[r.DNSRecord] = { + other_known_answers: set[r.DNSRecord] = { r.DNSPointer( "_hap._tcp.local.", const._TYPE_PTR, @@ -21,7 +19,7 @@ def test_question_suppression(): "known-to-other._hap._tcp.local.", ) } - our_known_answers: Set[r.DNSRecord] = { + our_known_answers: set[r.DNSRecord] = { r.DNSPointer( "_hap._tcp.local.", const._TYPE_PTR, @@ -54,7 +52,7 @@ def test_question_expire(): now = r.current_time_millis() question = r.DNSQuestion("_hap._tcp._local.", const._TYPE_PTR, const._CLASS_IN) - other_known_answers: Set[r.DNSRecord] = { + other_known_answers: set[r.DNSRecord] = { r.DNSPointer( "_hap._tcp.local.", const._TYPE_PTR, diff --git a/tests/test_init.py b/tests/test_init.py index 080d485e..78fb1e37 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -3,7 +3,6 @@ import logging import socket import time -import unittest import unittest.mock from unittest.mock import patch diff --git a/tests/test_listener.py b/tests/test_listener.py index f5af91f8..a55fc143 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -1,9 +1,10 @@ """Unit tests for zeroconf._listener""" +from __future__ import annotations + import logging import unittest import unittest.mock -from typing import Tuple, Union from unittest.mock import MagicMock, patch import zeroconf as r @@ -146,7 +147,7 @@ def handle_query_or_defer( addr: str, port: int, transport: _engine._WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + v6_flow_scope: tuple[()] | tuple[int, int] = (), ) -> None: """Handle a query or defer it for later processing.""" super().handle_query_or_defer(msg, addr, port, transport, v6_flow_scope) diff --git a/tests/test_protocol.py b/tests/test_protocol.py index e46dbf03..1397c60c 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -5,7 +5,6 @@ import os import socket import struct -import unittest import unittest.mock from typing import cast diff --git a/tests/test_services.py b/tests/test_services.py index 908782c7..992070e2 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -6,7 +6,7 @@ import time import unittest from threading import Event -from typing import Any, Dict +from typing import Any import pytest @@ -91,7 +91,7 @@ def update_service(self, zeroconf, type, name): } zeroconf_registrar = Zeroconf(interfaces=["127.0.0.1"]) - desc: Dict[str, Any] = {"path": "/~paulsm/"} + desc: dict[str, Any] = {"path": "/~paulsm/"} desc.update(properties) addresses = [socket.inet_aton("10.0.1.2")] if has_working_ipv6() and not os.environ.get("SKIP_IPV6"): diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index f22d85ed..09137a71 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -1,11 +1,12 @@ """Unit tests for zeroconf._utils.asyncio.""" +from __future__ import annotations + import asyncio import concurrent.futures import contextlib import threading import time -from typing import Optional from unittest.mock import patch import pytest @@ -120,7 +121,7 @@ def test_cumulative_timeouts_less_than_close_plus_buffer(): async def test_run_coro_with_timeout() -> None: """Test running a coroutine with a timeout raises EventLoopBlocked.""" loop = asyncio.get_event_loop() - task: Optional[asyncio.Task] = None + task: asyncio.Task | None = None async def _saved_sleep_task(): nonlocal task From d9be7155a0ef1ac521e5bbedd3884ddeb9f0b99d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Jan 2025 13:36:11 -1000 Subject: [PATCH 315/434] feat: small performance improvement to writing outgoing packets (#1482) --- src/zeroconf/_dns.pxd | 2 +- src/zeroconf/_protocol/outgoing.pxd | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index e41ac4c3..5ff98a8d 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -134,7 +134,7 @@ cdef class DNSService(DNSRecord): cdef class DNSNsec(DNSRecord): cdef public cython.int _hash - cdef public object next_name + cdef public str next_name cdef public cython.list rdtypes cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str next_name, cython.list rdtypes, double created) diff --git a/src/zeroconf/_protocol/outgoing.pxd b/src/zeroconf/_protocol/outgoing.pxd index fa1aeebc..bb9730b8 100644 --- a/src/zeroconf/_protocol/outgoing.pxd +++ b/src/zeroconf/_protocol/outgoing.pxd @@ -108,6 +108,8 @@ cdef class DNSOutgoing: cpdef void write_string(self, cython.bytes value) + cpdef void write_character_string(self, cython.bytes value) + @cython.locals(utfstr=bytes) cdef void _write_utf(self, cython.str value) From aaec7c2f612fe7182fba07a8e5f97ac2f2086793 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Jan 2025 13:47:18 -1000 Subject: [PATCH 316/434] chore: add cython linter (#1416) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 5 +++ pyproject.toml | 4 +++ src/zeroconf/_cache.pxd | 50 +++++++----------------------- src/zeroconf/_handlers/answers.pxd | 3 +- src/zeroconf/_listener.pxd | 1 - src/zeroconf/_services/browser.pxd | 6 +++- src/zeroconf/_utils/ipaddress.pxd | 7 +++-- 7 files changed, 32 insertions(+), 44 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8551ee8b..c7603360 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,3 +57,8 @@ repos: hooks: - id: mypy additional_dependencies: [] + - repo: https://github.com/MarcoGorelli/cython-lint + rev: v0.16.2 + hooks: + - id: cython-lint + - id: double-quote-cython-strings diff --git a/pyproject.toml b/pyproject.toml index 450c8aa7..c77333ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -193,3 +193,7 @@ build-backend = "poetry.core.masonry.api" [tool.codespell] ignore-words-list = ["additionals", "HASS"] + +[tool.cython-lint] +max-line-length = 88 +ignore = ['E501'] # too many to fix right now diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 7f78a736..a39ed756 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -20,9 +20,8 @@ cdef unsigned int _TYPE_PTR cdef cython.uint _ONE_SECOND cdef unsigned int _MIN_SCHEDULED_RECORD_EXPIRATION -@cython.locals( - record_cache=dict, -) + +@cython.locals(record_cache=dict) cdef _remove_key(cython.dict cache, object key, DNSRecord record) @@ -37,36 +36,23 @@ cdef class DNSCache: cpdef void async_remove_records(self, object entries) - @cython.locals( - store=cython.dict, - ) + @cython.locals(store=cython.dict) cpdef DNSRecord async_get_unique(self, DNSRecord entry) - @cython.locals( - record=DNSRecord, - ) + @cython.locals(record=DNSRecord) cpdef list async_expire(self, double now) - @cython.locals( - records=cython.dict, - record=DNSRecord, - ) + @cython.locals(records=cython.dict, record=DNSRecord) cpdef list async_all_by_details(self, str name, unsigned int type_, unsigned int class_) cpdef list async_entries_with_name(self, str name) cpdef list async_entries_with_server(self, str name) - @cython.locals( - cached_entry=DNSRecord, - records=dict - ) + @cython.locals(cached_entry=DNSRecord, records=dict) cpdef DNSRecord get_by_details(self, str name, unsigned int type_, unsigned int class_) - @cython.locals( - records=cython.dict, - entry=DNSRecord, - ) + @cython.locals(records=cython.dict, entry=DNSRecord) cpdef cython.list get_all_by_details(self, str name, unsigned int type_, unsigned int class_) @cython.locals( @@ -76,31 +62,19 @@ cdef class DNSCache: ) cdef bint _async_add(self, DNSRecord record) - @cython.locals( - service_record=DNSService - ) + @cython.locals(service_record=DNSService) cdef void _async_remove(self, DNSRecord record) - @cython.locals( - record=DNSRecord, - created_double=double, - ) + @cython.locals(record=DNSRecord, created_double=double) cpdef void async_mark_unique_records_older_than_1s_to_expire(self, cython.set unique_types, object answers, double now) - @cython.locals( - entries=dict - ) + @cython.locals(entries=dict) cpdef list entries_with_name(self, str name) - @cython.locals( - entries=dict - ) + @cython.locals(entries=dict) cpdef list entries_with_server(self, str server) - @cython.locals( - record=DNSRecord, - now=double - ) + @cython.locals(record=DNSRecord, now=double) cpdef current_entry_with_name_and_alias(self, str name, str alias) cpdef void _async_set_created_ttl( diff --git a/src/zeroconf/_handlers/answers.pxd b/src/zeroconf/_handlers/answers.pxd index 25b3c1a1..759905f2 100644 --- a/src/zeroconf/_handlers/answers.pxd +++ b/src/zeroconf/_handlers/answers.pxd @@ -20,8 +20,6 @@ cdef class AnswerGroup: cdef public cython.dict answers - - cdef object _FLAGS_QR_RESPONSE_AA cdef object NAME_GETTER @@ -31,5 +29,6 @@ cpdef DNSOutgoing construct_outgoing_unicast_answers( cython.dict answers, bint ucast_source, cython.list questions, object id_ ) + @cython.locals(answer=DNSRecord, additionals=cython.set, additional=DNSRecord) cdef void _add_answers_additionals(DNSOutgoing out, cython.dict answers) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 96f52be0..20084b47 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -16,7 +16,6 @@ cdef cython.uint _MAX_MSG_ABSOLUTE cdef cython.uint _DUPLICATE_PACKET_SUPPRESSION_INTERVAL - cdef class AsyncListener: cdef public object zc diff --git a/src/zeroconf/_services/browser.pxd b/src/zeroconf/_services/browser.pxd index 4649291c..1ea99c82 100644 --- a/src/zeroconf/_services/browser.pxd +++ b/src/zeroconf/_services/browser.pxd @@ -44,6 +44,7 @@ cdef class _DNSPointerOutgoingBucket: cpdef add(self, cython.uint max_compressed_size, DNSQuestion question, cython.set answers) + @cython.locals(cache=DNSCache, question_history=QuestionHistory, record=DNSRecord, qu_question=bint) cpdef list generate_service_query( object zc, @@ -53,9 +54,11 @@ cpdef list generate_service_query( object question_type ) + @cython.locals(answer=DNSPointer, query_buckets=list, question=DNSQuestion, max_compressed_size=cython.uint, max_bucket_size=cython.uint, query_bucket=_DNSPointerOutgoingBucket) cdef list _group_ptr_queries_with_known_answers(double now_millis, bint multicast, cython.dict question_with_known_answers) + cdef class QueryScheduler: cdef object _zc @@ -83,7 +86,7 @@ cdef class QueryScheduler: @cython.locals(current=_ScheduledPTRQuery, expire_time=double) cpdef void reschedule_ptr_first_refresh(self, DNSPointer pointer) - @cython.locals(ttl_millis='unsigned int', additional_wait=double, next_query_time=double) + @cython.locals(ttl_millis="unsigned int", additional_wait=double, next_query_time=double) cpdef void schedule_rescue_query(self, _ScheduledPTRQuery query, double now_millis, float additional_percentage) cpdef void _process_startup_queries(self) @@ -93,6 +96,7 @@ cdef class QueryScheduler: cpdef void async_send_ready_queries(self, bint first_request, double now_millis, set ready_types) + cdef class _ServiceBrowserBase(RecordUpdateListener): cdef public cython.set types diff --git a/src/zeroconf/_utils/ipaddress.pxd b/src/zeroconf/_utils/ipaddress.pxd index 01d38164..78bbdfbd 100644 --- a/src/zeroconf/_utils/ipaddress.pxd +++ b/src/zeroconf/_utils/ipaddress.pxd @@ -1,13 +1,16 @@ -cdef bint TYPE_CHECKING - from .._dns cimport DNSAddress +cdef bint TYPE_CHECKING + cpdef get_ip_address_object_from_record(DNSAddress record) + @cython.locals(address_str=str) cpdef str_without_scope_id(object addr) + cpdef ip_bytes_and_scope_to_address(object addr, object scope_id) + cdef object cached_ip_addresses_wrapper From 127338d2ae69fe1fe9b25f0614db7448c427c751 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 16 Jan 2025 23:59:38 +0000 Subject: [PATCH 317/434] 0.140.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 18 ++++++++++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b1e28c16..91ea999d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,24 @@ # CHANGELOG +## v0.140.0 (2025-01-16) + +### Bug Fixes + +* fix(docs): remove repetition of words (#1479) + +Co-authored-by: J. Nick Koston ([`dde26c6`](https://github.com/python-zeroconf/python-zeroconf/commit/dde26c655a49811c11071b0531e408a188687009)) + +### Features + +* feat: small performance improvement to writing outgoing packets (#1482) ([`d9be715`](https://github.com/python-zeroconf/python-zeroconf/commit/d9be7155a0ef1ac521e5bbedd3884ddeb9f0b99d)) + +* feat: migrate to native types (#1472) + +Co-authored-by: J. Nick Koston +Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> ([`22a0fb4`](https://github.com/python-zeroconf/python-zeroconf/commit/22a0fb487db27bc2c6448a9167742f3040e910ba)) + + ## v0.139.0 (2025-01-09) ### Features diff --git a/pyproject.toml b/pyproject.toml index c77333ee..934838fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.139.0" +version = "0.140.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 2c4004ab..d13d7e3f 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.139.0" +__version__ = "0.140.0" __license__ = "LGPL" From 9d228e28eead1561deda696e8837d59896cbc98d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Jan 2025 14:20:35 -1000 Subject: [PATCH 318/434] fix: wheel builds for aarch64 (#1485) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0ec37345..b5d21d40 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -215,7 +215,7 @@ jobs: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux* CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: ${matrix.os == ubuntu-24.04-arm && 'aarch64' || 'auto'} + CIBW_ARCHS_LINUX: ${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'auto' }} CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 @@ -226,7 +226,7 @@ jobs: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *manylinux* CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: ${matrix.os == ubuntu-24.04-arm && 'aarch64' || 'auto'} + CIBW_ARCHS_LINUX: ${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'auto' }} CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 From 9af848b42f002bded7a2aef343bdd423ed4745d4 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 17 Jan 2025 00:30:33 +0000 Subject: [PATCH 319/434] 0.140.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 ++++++++- pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 91ea999d..ef8f5deb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,14 @@ # CHANGELOG -## v0.140.0 (2025-01-16) +## v0.140.1 (2025-01-17) + +### Bug Fixes + +* fix: wheel builds for aarch64 (#1485) ([`9d228e2`](https://github.com/python-zeroconf/python-zeroconf/commit/9d228e28eead1561deda696e8837d59896cbc98d)) + + +## v0.140.0 (2025-01-17) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 934838fc..9ea7f9cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.140.0" +version = "0.140.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d13d7e3f..22434e47 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.140.0" +__version__ = "0.140.1" __license__ = "LGPL" From dbc5d11c9f46d4d705d0d6557e7876d8fcff0a11 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 17:52:44 -1000 Subject: [PATCH 320/434] chore(pre-commit.ci): pre-commit autoupdate (#1486) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c7603360..87c38083 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,7 +39,7 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.1 + rev: v0.9.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -58,7 +58,7 @@ repos: - id: mypy additional_dependencies: [] - repo: https://github.com/MarcoGorelli/cython-lint - rev: v0.16.2 + rev: v0.16.6 hooks: - id: cython-lint - id: double-quote-cython-strings From 7db643687199e7383ef18419824e5fbba69d6b51 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 21 Jan 2025 17:52:58 -1000 Subject: [PATCH 321/434] chore: bump upload/download artifact to v4 (#1487) --- .github/workflows/ci.yml | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5d21d40..e16ed4da 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,7 +161,7 @@ jobs: needs: [release] if: needs.release.outputs.released == 'true' - name: Build wheels on ${{ matrix.os }} + name: Build wheels on ${{ matrix.os }} (${{ matrix.musl }}) runs-on: ${{ matrix.os }} strategy: matrix: @@ -208,31 +208,20 @@ jobs: ref: "${{ steps.release_tag.outputs.newest_release_tag }}" fetch-depth: 0 - - name: Build wheels (non-musl) + - name: Build wheels ${{ matrix.musl }} uses: pypa/cibuildwheel@v2.22.0 - if: matrix.musl == '' # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *musllinux* + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc CIBW_ARCHS_LINUX: ${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'auto' }} CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 - - name: Build wheels (musl) - uses: pypa/cibuildwheel@v2.22.0 - if: matrix.musl == 'musllinux' - # to supply options, put them in 'env', like: - env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 *manylinux* - CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: ${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'auto' }} - CIBW_BUILD_VERBOSITY: 3 - REQUIRE_CYTHON: 1 - - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: path: ./wheelhouse/*.whl + name: wheels-${{ matrix.os }}-${{ matrix.musl }} upload_pypi: needs: [build_wheels] @@ -240,12 +229,13 @@ jobs: environment: release steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: # unpacks default artifact into dist/ # if `name: artifact` is omitted, the action will create extra parent dir - name: artifact + pattern: wheels-* path: dist + merge-multiple: true - uses: pypa/gh-action-pypi-publish@v1.5.0 with: From 8f86b35deca40cbc05451e758010d946884a917a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 18:13:12 -1000 Subject: [PATCH 322/434] chore(ci): bump the github-actions group across 1 directory with 7 updates (#1488) --- .github/workflows/ci.yml | 22 +++++++++++----------- commitlint.config.mjs | 8 ++++++++ 2 files changed, 19 insertions(+), 11 deletions(-) create mode 100644 commitlint.config.mjs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e16ed4da..e43c63b8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,10 +26,10 @@ jobs: name: Lint Commit Messages runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: wagoid/commitlint-github-action@v5 + - uses: wagoid/commitlint-github-action@v6 test: strategy: @@ -65,7 +65,7 @@ jobs: python-version: "pypy-3.10" runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install poetry run: pipx install poetry - name: Set up Python @@ -87,7 +87,7 @@ jobs: - name: Test with Pytest run: poetry run pytest --durations=20 --timeout=60 -v --cov=zeroconf --cov-branch --cov-report xml --cov-report html --cov-report term-missing tests - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} @@ -96,10 +96,10 @@ jobs: steps: - uses: actions/checkout@v4 - name: Setup Python 3.12 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.12 - - uses: snok/install-poetry@v1.3.4 + - uses: snok/install-poetry@v1.4.1 - name: Install Dependencies run: | REQUIRE_CYTHON=1 poetry install --only=main,dev @@ -134,14 +134,14 @@ jobs: # Do a dry run of PSR - name: Test release - uses: python-semantic-release/python-semantic-release@v9.12.0 + uses: python-semantic-release/python-semantic-release@v9.16.1 if: github.ref_name != 'master' with: root_options: --noop # On main branch: actual PSR + upload to PyPI & GitHub - name: Release - uses: python-semantic-release/python-semantic-release@v9.12.0 + uses: python-semantic-release/python-semantic-release@v9.16.1 id: release if: github.ref_name == 'master' with: @@ -183,7 +183,7 @@ jobs: musl: "musllinux" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 ref: "master" @@ -203,7 +203,7 @@ jobs: run: | echo "::set-output name=newest_release_tag::$(semantic-release print-version --current)" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: "${{ steps.release_tag.outputs.newest_release_tag }}" fetch-depth: 0 @@ -237,7 +237,7 @@ jobs: path: dist merge-multiple: true - - uses: pypa/gh-action-pypi-publish@v1.5.0 + - uses: pypa/gh-action-pypi-publish@v1.12.3 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} diff --git a/commitlint.config.mjs b/commitlint.config.mjs new file mode 100644 index 00000000..deb029ab --- /dev/null +++ b/commitlint.config.mjs @@ -0,0 +1,8 @@ +export default { + extends: ["@commitlint/config-conventional"], + rules: { + "header-max-length": [0, "always", Infinity], + "body-max-line-length": [0, "always", Infinity], + "footer-max-line-length": [0, "always", Infinity], + }, +}; From d6f1fda45ec1de6d1a34c7ccc62dbf87bbd5164e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Jan 2025 11:29:15 -1000 Subject: [PATCH 323/434] chore: add benchmarks for adding and expiring records (#1489) --- tests/benchmarks/test_cache.py | 48 ++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 tests/benchmarks/test_cache.py diff --git a/tests/benchmarks/test_cache.py b/tests/benchmarks/test_cache.py new file mode 100644 index 00000000..051c1e47 --- /dev/null +++ b/tests/benchmarks/test_cache.py @@ -0,0 +1,48 @@ +from pytest_codspeed import BenchmarkFixture + +from zeroconf import DNSCache, DNSPointer, current_time_millis +from zeroconf.const import _CLASS_IN, _TYPE_PTR + + +def test_add_expire_1000_records(benchmark: BenchmarkFixture) -> None: + """Benchmark for DNSCache to expire 10000 records.""" + cache = DNSCache() + now = current_time_millis() + records = [ + DNSPointer( + name=f"test{id}.local.", + type_=_TYPE_PTR, + class_=_CLASS_IN, + ttl=60, + alias=f"test{id}.local.", + created=now, + ) + for id in range(1000) + ] + + @benchmark + def _expire_records() -> None: + cache.async_add_records(records) + cache.async_expire(now + 61_000) + + +def test_expire_no_records_to_expire(benchmark: BenchmarkFixture) -> None: + """Benchmark for DNSCache with 1000 records none to expire.""" + cache = DNSCache() + now = current_time_millis() + cache.async_add_records( + DNSPointer( + name=f"test{id}.local.", + type_=_TYPE_PTR, + class_=_CLASS_IN, + ttl=60, + alias=f"test{id}.local.", + created=now, + ) + for id in range(1000) + ) + cache.async_expire(now) + + @benchmark + def _expire_records() -> None: + cache.async_expire(now) From 854fef637c370dd1cd55300402417193e29777ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Jan 2025 11:52:29 -1000 Subject: [PATCH 324/434] chore: adjust cache benchmark to better reflect real cache data (#1491) --- tests/benchmarks/test_cache.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/benchmarks/test_cache.py b/tests/benchmarks/test_cache.py index 051c1e47..6fde9438 100644 --- a/tests/benchmarks/test_cache.py +++ b/tests/benchmarks/test_cache.py @@ -15,7 +15,7 @@ def test_add_expire_1000_records(benchmark: BenchmarkFixture) -> None: class_=_CLASS_IN, ttl=60, alias=f"test{id}.local.", - created=now, + created=now + id, ) for id in range(1000) ] @@ -23,7 +23,7 @@ def test_add_expire_1000_records(benchmark: BenchmarkFixture) -> None: @benchmark def _expire_records() -> None: cache.async_add_records(records) - cache.async_expire(now + 61_000) + cache.async_expire(now + 100_000) def test_expire_no_records_to_expire(benchmark: BenchmarkFixture) -> None: @@ -37,7 +37,7 @@ def test_expire_no_records_to_expire(benchmark: BenchmarkFixture) -> None: class_=_CLASS_IN, ttl=60, alias=f"test{id}.local.", - created=now, + created=now + id, ) for id in range(1000) ) From 628b13670d04327dd8d4908842f31b476598c7e8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 22 Jan 2025 12:23:54 -1000 Subject: [PATCH 325/434] feat: speed up adding and expiring records in the DNSCache (#1490) --- src/zeroconf/_cache.pxd | 6 ++++-- src/zeroconf/_cache.py | 11 ++++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index a39ed756..273d46c3 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -39,7 +39,7 @@ cdef class DNSCache: @cython.locals(store=cython.dict) cpdef DNSRecord async_get_unique(self, DNSRecord entry) - @cython.locals(record=DNSRecord) + @cython.locals(record=DNSRecord, when_record=tuple, when=double) cpdef list async_expire(self, double now) @cython.locals(records=cython.dict, record=DNSRecord) @@ -57,8 +57,10 @@ cdef class DNSCache: @cython.locals( store=cython.dict, + service_store=cython.dict, service_record=DNSService, - when=object + when=object, + new=bint ) cdef bint _async_add(self, DNSRecord record) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index a43bdc5c..1b7aae38 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -86,7 +86,8 @@ def _async_add(self, record: _DNSRecord) -> bool: # replaces any existing records that are __eq__ to each other which # removes the risk that accessing the cache from the wrong # direction would return the old incorrect entry. - store = self.cache.setdefault(record.key, {}) + if (store := self.cache.get(record.key)) is None: + store = self.cache[record.key] = {} new = record not in store and not isinstance(record, DNSNsec) store[record] = record when = record.created + (record.ttl * 1000) @@ -97,7 +98,9 @@ def _async_add(self, record: _DNSRecord) -> bool: if isinstance(record, DNSService): service_record = record - self.service_cache.setdefault(record.server_key, {})[service_record] = service_record + if (service_store := self.service_cache.get(service_record.server_key)) is None: + service_store = self.service_cache[service_record.server_key] = {} + service_store[service_record] = service_record return new def async_add_records(self, entries: Iterable[DNSRecord]) -> bool: @@ -145,7 +148,8 @@ def async_expire(self, now: _float) -> List[DNSRecord]: expired: List[DNSRecord] = [] # Find any expired records and add them to the to-delete list while self._expire_heap: - when, record = self._expire_heap[0] + when_record = self._expire_heap[0] + when = when_record[0] if when > now: break heappop(self._expire_heap) @@ -153,6 +157,7 @@ def async_expire(self, now: _float) -> List[DNSRecord]: # with a different expiration time as it will be removed # later when it reaches the top of the heap and its # expiration time is met. + record = when_record[1] if self._expirations.get(record) == when: expired.append(record) From eae89d8ed000311b5950cab1a1bb0f67f30b0b94 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 22 Jan 2025 22:33:08 +0000 Subject: [PATCH 326/434] 0.141.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 5072 +++++--------------------------------- pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 650 insertions(+), 4426 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ef8f5deb..e5874ebb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5566 +1,1790 @@ # CHANGELOG +## v0.141.0 (2025-01-22) + +### Features + +- Speed up adding and expiring records in the DNSCache + ([#1490](https://github.com/python-zeroconf/python-zeroconf/pull/1490), + [`628b136`](https://github.com/python-zeroconf/python-zeroconf/commit/628b13670d04327dd8d4908842f31b476598c7e8)) + + ## v0.140.1 (2025-01-17) ### Bug Fixes -* fix: wheel builds for aarch64 (#1485) ([`9d228e2`](https://github.com/python-zeroconf/python-zeroconf/commit/9d228e28eead1561deda696e8837d59896cbc98d)) +- Wheel builds for aarch64 ([#1485](https://github.com/python-zeroconf/python-zeroconf/pull/1485), + [`9d228e2`](https://github.com/python-zeroconf/python-zeroconf/commit/9d228e28eead1561deda696e8837d59896cbc98d)) ## v0.140.0 (2025-01-17) ### Bug Fixes -* fix(docs): remove repetition of words (#1479) +- **docs**: Remove repetition of words + ([#1479](https://github.com/python-zeroconf/python-zeroconf/pull/1479), + [`dde26c6`](https://github.com/python-zeroconf/python-zeroconf/commit/dde26c655a49811c11071b0531e408a188687009)) -Co-authored-by: J. Nick Koston ([`dde26c6`](https://github.com/python-zeroconf/python-zeroconf/commit/dde26c655a49811c11071b0531e408a188687009)) +Co-authored-by: J. Nick Koston ### Features -* feat: small performance improvement to writing outgoing packets (#1482) ([`d9be715`](https://github.com/python-zeroconf/python-zeroconf/commit/d9be7155a0ef1ac521e5bbedd3884ddeb9f0b99d)) - -* feat: migrate to native types (#1472) +- Migrate to native types ([#1472](https://github.com/python-zeroconf/python-zeroconf/pull/1472), + [`22a0fb4`](https://github.com/python-zeroconf/python-zeroconf/commit/22a0fb487db27bc2c6448a9167742f3040e910ba)) Co-authored-by: J. Nick Koston -Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> ([`22a0fb4`](https://github.com/python-zeroconf/python-zeroconf/commit/22a0fb487db27bc2c6448a9167742f3040e910ba)) + +Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> + +- Small performance improvement to writing outgoing packets + ([#1482](https://github.com/python-zeroconf/python-zeroconf/pull/1482), + [`d9be715`](https://github.com/python-zeroconf/python-zeroconf/commit/d9be7155a0ef1ac521e5bbedd3884ddeb9f0b99d)) ## v0.139.0 (2025-01-09) ### Features -* feat: implement heapq for tracking cache expire times (#1465) ([`09db184`](https://github.com/python-zeroconf/python-zeroconf/commit/09db1848957b34415f364b7338e4adce99b57abc)) +- Implement heapq for tracking cache expire times + ([#1465](https://github.com/python-zeroconf/python-zeroconf/pull/1465), + [`09db184`](https://github.com/python-zeroconf/python-zeroconf/commit/09db1848957b34415f364b7338e4adce99b57abc)) ## v0.138.1 (2025-01-08) ### Bug Fixes -* fix: ensure cache does not return stale created and ttl values (#1469) ([`e05055c`](https://github.com/python-zeroconf/python-zeroconf/commit/e05055c584ca46080990437b2b385a187bc48458)) +- Ensure cache does not return stale created and ttl values + ([#1469](https://github.com/python-zeroconf/python-zeroconf/pull/1469), + [`e05055c`](https://github.com/python-zeroconf/python-zeroconf/commit/e05055c584ca46080990437b2b385a187bc48458)) ## v0.138.0 (2025-01-08) ### Features -* feat: improve performance of processing incoming records (#1467) +- Improve performance of processing incoming records + ([#1467](https://github.com/python-zeroconf/python-zeroconf/pull/1467), + [`ebbb2af`](https://github.com/python-zeroconf/python-zeroconf/commit/ebbb2afccabd3841a3cb0a39824b49773cc6258a)) -Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> ([`ebbb2af`](https://github.com/python-zeroconf/python-zeroconf/commit/ebbb2afccabd3841a3cb0a39824b49773cc6258a)) +Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> ## v0.137.2 (2025-01-06) ### Bug Fixes -* fix: split wheel builds to avoid timeout (#1461) ([`be05f0d`](https://github.com/python-zeroconf/python-zeroconf/commit/be05f0dc4f6b2431606031a7bb24585728d15f01)) +- Split wheel builds to avoid timeout + ([#1461](https://github.com/python-zeroconf/python-zeroconf/pull/1461), + [`be05f0d`](https://github.com/python-zeroconf/python-zeroconf/commit/be05f0dc4f6b2431606031a7bb24585728d15f01)) ## v0.137.1 (2025-01-06) ### Bug Fixes -* fix: move wheel builds to macos-13 (#1459) ([`4ff48a0`](https://github.com/python-zeroconf/python-zeroconf/commit/4ff48a01bc76c82e5710aafaf6cf6e79c069cd85)) +- Move wheel builds to macos-13 + ([#1459](https://github.com/python-zeroconf/python-zeroconf/pull/1459), + [`4ff48a0`](https://github.com/python-zeroconf/python-zeroconf/commit/4ff48a01bc76c82e5710aafaf6cf6e79c069cd85)) ## v0.137.0 (2025-01-06) ### Features -* feat: speed up parsing incoming records (#1458) ([`783c1b3`](https://github.com/python-zeroconf/python-zeroconf/commit/783c1b37d1372c90dfce658c66d03aa753afbf49)) +- Speed up parsing incoming records + ([#1458](https://github.com/python-zeroconf/python-zeroconf/pull/1458), + [`783c1b3`](https://github.com/python-zeroconf/python-zeroconf/commit/783c1b37d1372c90dfce658c66d03aa753afbf49)) ## v0.136.2 (2024-11-21) ### Bug Fixes -* fix: retrigger release from failed github workflow (#1443) ([`2ea705d`](https://github.com/python-zeroconf/python-zeroconf/commit/2ea705d850c1cb096c87372d5ec855f684603d01)) +- Retrigger release from failed github workflow + ([#1443](https://github.com/python-zeroconf/python-zeroconf/pull/1443), + [`2ea705d`](https://github.com/python-zeroconf/python-zeroconf/commit/2ea705d850c1cb096c87372d5ec855f684603d01)) ## v0.136.1 (2024-11-21) ### Bug Fixes -* fix(ci): run release workflow only on main repository (#1441) ([`f637c75`](https://github.com/python-zeroconf/python-zeroconf/commit/f637c75f638ba20c193e58ff63c073a4003430b9)) +- **ci**: Run release workflow only on main repository + ([#1441](https://github.com/python-zeroconf/python-zeroconf/pull/1441), + [`f637c75`](https://github.com/python-zeroconf/python-zeroconf/commit/f637c75f638ba20c193e58ff63c073a4003430b9)) -* fix(docs): update python to 3.8 (#1430) ([`483d067`](https://github.com/python-zeroconf/python-zeroconf/commit/483d0673d4ae3eec37840452723fc1839a6cc95c)) +- **docs**: Update python to 3.8 + ([#1430](https://github.com/python-zeroconf/python-zeroconf/pull/1430), + [`483d067`](https://github.com/python-zeroconf/python-zeroconf/commit/483d0673d4ae3eec37840452723fc1839a6cc95c)) ## v0.136.0 (2024-10-26) ### Bug Fixes -* fix: update python-semantic-release to fix release process (#1426) ([`2f20155`](https://github.com/python-zeroconf/python-zeroconf/commit/2f201558d0ab089cdfebb18d2d7bb5785b2cce16)) +- Add ignore for .c file for wheels + ([#1424](https://github.com/python-zeroconf/python-zeroconf/pull/1424), + [`6535963`](https://github.com/python-zeroconf/python-zeroconf/commit/6535963b5b789ce445e77bb728a5b7ee4263e582)) -* fix: add ignore for .c file for wheels (#1424) ([`6535963`](https://github.com/python-zeroconf/python-zeroconf/commit/6535963b5b789ce445e77bb728a5b7ee4263e582)) +- Correct typos ([#1422](https://github.com/python-zeroconf/python-zeroconf/pull/1422), + [`3991b42`](https://github.com/python-zeroconf/python-zeroconf/commit/3991b4256b8de5b37db7a6144e5112f711b2efef)) -* fix: correct typos (#1422) ([`3991b42`](https://github.com/python-zeroconf/python-zeroconf/commit/3991b4256b8de5b37db7a6144e5112f711b2efef)) +- Update python-semantic-release to fix release process + ([#1426](https://github.com/python-zeroconf/python-zeroconf/pull/1426), + [`2f20155`](https://github.com/python-zeroconf/python-zeroconf/commit/2f201558d0ab089cdfebb18d2d7bb5785b2cce16)) ### Features -* feat: use SPDX license identifier (#1425) ([`1596145`](https://github.com/python-zeroconf/python-zeroconf/commit/1596145452721e0de4e2a724b055e8e290792d3e)) +- Use SPDX license identifier + ([#1425](https://github.com/python-zeroconf/python-zeroconf/pull/1425), + [`1596145`](https://github.com/python-zeroconf/python-zeroconf/commit/1596145452721e0de4e2a724b055e8e290792d3e)) ## v0.135.0 (2024-09-24) ### Features -* feat: improve performance of DNSCache backend (#1415) ([`1df2e69`](https://github.com/python-zeroconf/python-zeroconf/commit/1df2e691ff11c9592e1cdad5599fb6601eb1aa3f)) +- Improve performance of DNSCache backend + ([#1415](https://github.com/python-zeroconf/python-zeroconf/pull/1415), + [`1df2e69`](https://github.com/python-zeroconf/python-zeroconf/commit/1df2e691ff11c9592e1cdad5599fb6601eb1aa3f)) ## v0.134.0 (2024-09-08) ### Bug Fixes -* fix: improve helpfulness of ServiceInfo.request assertions (#1408) ([`9262626`](https://github.com/python-zeroconf/python-zeroconf/commit/9262626895d354ed7376aa567043b793c37a985e)) +- Improve helpfulness of ServiceInfo.request assertions + ([#1408](https://github.com/python-zeroconf/python-zeroconf/pull/1408), + [`9262626`](https://github.com/python-zeroconf/python-zeroconf/commit/9262626895d354ed7376aa567043b793c37a985e)) ### Features -* feat: improve performance when IP addresses change frequently (#1407) ([`111c91a`](https://github.com/python-zeroconf/python-zeroconf/commit/111c91ab395a7520e477eb0e75d5924fba3c64c7)) +- Improve performance when IP addresses change frequently + ([#1407](https://github.com/python-zeroconf/python-zeroconf/pull/1407), + [`111c91a`](https://github.com/python-zeroconf/python-zeroconf/commit/111c91ab395a7520e477eb0e75d5924fba3c64c7)) ## v0.133.0 (2024-08-27) ### Features -* feat: improve performance of ip address caching (#1392) ([`f7c7708`](https://github.com/python-zeroconf/python-zeroconf/commit/f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2)) +- Add classifier for python 3.13 + ([#1393](https://github.com/python-zeroconf/python-zeroconf/pull/1393), + [`7fb2bb2`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb2bb21421c70db0eb288fa7e73d955f58b0f5d)) -* feat: enable building of arm64 macOS builds (#1384) +- Enable building of arm64 macOS builds + ([#1384](https://github.com/python-zeroconf/python-zeroconf/pull/1384), + [`0df2ce0`](https://github.com/python-zeroconf/python-zeroconf/commit/0df2ce0e6f7313831da6a63d477019982d5df55c)) Co-authored-by: Alex Ciobanu -Co-authored-by: J. Nick Koston ([`0df2ce0`](https://github.com/python-zeroconf/python-zeroconf/commit/0df2ce0e6f7313831da6a63d477019982d5df55c)) -* feat: add classifier for python 3.13 (#1393) ([`7fb2bb2`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb2bb21421c70db0eb288fa7e73d955f58b0f5d)) +Co-authored-by: J. Nick Koston + +- Improve performance of ip address caching + ([#1392](https://github.com/python-zeroconf/python-zeroconf/pull/1392), + [`f7c7708`](https://github.com/python-zeroconf/python-zeroconf/commit/f7c77081b2f8c70b1ed6a9b9751a86cf91f9aae2)) -* feat: python 3.13 support (#1390) ([`98cfa83`](https://github.com/python-zeroconf/python-zeroconf/commit/98cfa83710e43880698353821bae61108b08cb2f)) +- Python 3.13 support ([#1390](https://github.com/python-zeroconf/python-zeroconf/pull/1390), + [`98cfa83`](https://github.com/python-zeroconf/python-zeroconf/commit/98cfa83710e43880698353821bae61108b08cb2f)) ## v0.132.2 (2024-04-13) ### Bug Fixes -* fix: update references to minimum-supported python version of 3.8 (#1369) ([`599524a`](https://github.com/python-zeroconf/python-zeroconf/commit/599524a5ce1e4c1731519dd89377c2a852e59935)) +- Bump cibuildwheel to fix wheel builds + ([#1371](https://github.com/python-zeroconf/python-zeroconf/pull/1371), + [`83e4ce3`](https://github.com/python-zeroconf/python-zeroconf/commit/83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210)) -* fix: bump cibuildwheel to fix wheel builds (#1371) ([`83e4ce3`](https://github.com/python-zeroconf/python-zeroconf/commit/83e4ce3e31ddd4ae9aec2f8c9d84d7a93f8be210)) +- Update references to minimum-supported python version of 3.8 + ([#1369](https://github.com/python-zeroconf/python-zeroconf/pull/1369), + [`599524a`](https://github.com/python-zeroconf/python-zeroconf/commit/599524a5ce1e4c1731519dd89377c2a852e59935)) ## v0.132.1 (2024-04-12) ### Bug Fixes -* fix: set change during iteration when dispatching listeners (#1370) ([`e9f8aa5`](https://github.com/python-zeroconf/python-zeroconf/commit/e9f8aa5741ae2d490c33a562b459f0af1014dbb0)) +- Set change during iteration when dispatching listeners + ([#1370](https://github.com/python-zeroconf/python-zeroconf/pull/1370), + [`e9f8aa5`](https://github.com/python-zeroconf/python-zeroconf/commit/e9f8aa5741ae2d490c33a562b459f0af1014dbb0)) ## v0.132.0 (2024-04-01) ### Bug Fixes -* fix: avoid including scope_id in IPv6Address object if its zero (#1367) ([`edc4a55`](https://github.com/python-zeroconf/python-zeroconf/commit/edc4a556819956c238a11332052000dcbcb07e3d)) +- Avoid including scope_id in IPv6Address object if its zero + ([#1367](https://github.com/python-zeroconf/python-zeroconf/pull/1367), + [`edc4a55`](https://github.com/python-zeroconf/python-zeroconf/commit/edc4a556819956c238a11332052000dcbcb07e3d)) ### Features -* feat: make async_get_service_info available on the Zeroconf object (#1366) ([`c4c2dee`](https://github.com/python-zeroconf/python-zeroconf/commit/c4c2deeb05279ddbb0eba1330c7ae58795fea001)) +- Drop python 3.7 support ([#1359](https://github.com/python-zeroconf/python-zeroconf/pull/1359), + [`4877829`](https://github.com/python-zeroconf/python-zeroconf/commit/4877829e6442de5426db152d11827b1ba85dbf59)) -* feat: drop python 3.7 support (#1359) ([`4877829`](https://github.com/python-zeroconf/python-zeroconf/commit/4877829e6442de5426db152d11827b1ba85dbf59)) +- Make async_get_service_info available on the Zeroconf object + ([#1366](https://github.com/python-zeroconf/python-zeroconf/pull/1366), + [`c4c2dee`](https://github.com/python-zeroconf/python-zeroconf/commit/c4c2deeb05279ddbb0eba1330c7ae58795fea001)) ## v0.131.0 (2023-12-19) ### Features -* feat: small speed up to constructing outgoing packets (#1354) ([`517d7d0`](https://github.com/python-zeroconf/python-zeroconf/commit/517d7d00ca7738c770077738125aec0e4824c000)) +- Small speed up to constructing outgoing packets + ([#1354](https://github.com/python-zeroconf/python-zeroconf/pull/1354), + [`517d7d0`](https://github.com/python-zeroconf/python-zeroconf/commit/517d7d00ca7738c770077738125aec0e4824c000)) -* feat: speed up processing incoming packets (#1352) ([`6c15325`](https://github.com/python-zeroconf/python-zeroconf/commit/6c153258a995cf9459a6f23267b7e379b5e2550f)) +- Speed up processing incoming packets + ([#1352](https://github.com/python-zeroconf/python-zeroconf/pull/1352), + [`6c15325`](https://github.com/python-zeroconf/python-zeroconf/commit/6c153258a995cf9459a6f23267b7e379b5e2550f)) -* feat: speed up the query handler (#1350) ([`9eac0a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb)) +- Speed up the query handler ([#1350](https://github.com/python-zeroconf/python-zeroconf/pull/1350), + [`9eac0a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9eac0a122f28a7a4fa76cbfdda21d9a3571d7abb)) ## v0.130.0 (2023-12-16) ### Bug Fixes -* fix: scheduling race with the QueryScheduler (#1347) ([`cf40470`](https://github.com/python-zeroconf/python-zeroconf/commit/cf40470b89f918d3c24d7889d3536f3ffa44846c)) +- Ensure IPv6 scoped address construction uses the string cache + ([#1336](https://github.com/python-zeroconf/python-zeroconf/pull/1336), + [`f78a196`](https://github.com/python-zeroconf/python-zeroconf/commit/f78a196db632c4fe017a34f1af8a58903c15a575)) -* fix: ensure question history suppresses duplicates (#1338) ([`6f23656`](https://github.com/python-zeroconf/python-zeroconf/commit/6f23656576daa04e3de44e100f3ddd60ee4c560d)) +- Ensure question history suppresses duplicates + ([#1338](https://github.com/python-zeroconf/python-zeroconf/pull/1338), + [`6f23656`](https://github.com/python-zeroconf/python-zeroconf/commit/6f23656576daa04e3de44e100f3ddd60ee4c560d)) -* fix: microsecond precision loss in the query handler (#1339) ([`6560fad`](https://github.com/python-zeroconf/python-zeroconf/commit/6560fad584e0d392962c9a9248759f17c416620e)) +- Microsecond precision loss in the query handler + ([#1339](https://github.com/python-zeroconf/python-zeroconf/pull/1339), + [`6560fad`](https://github.com/python-zeroconf/python-zeroconf/commit/6560fad584e0d392962c9a9248759f17c416620e)) -* fix: ensure IPv6 scoped address construction uses the string cache (#1336) ([`f78a196`](https://github.com/python-zeroconf/python-zeroconf/commit/f78a196db632c4fe017a34f1af8a58903c15a575)) +- Scheduling race with the QueryScheduler + ([#1347](https://github.com/python-zeroconf/python-zeroconf/pull/1347), + [`cf40470`](https://github.com/python-zeroconf/python-zeroconf/commit/cf40470b89f918d3c24d7889d3536f3ffa44846c)) ### Features -* feat: make ServiceInfo aware of question history (#1348) ([`b9aae1d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9aae1de07bf1491e873bc314f8a1d7996127ad3)) +- Make ServiceInfo aware of question history + ([#1348](https://github.com/python-zeroconf/python-zeroconf/pull/1348), + [`b9aae1d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9aae1de07bf1491e873bc314f8a1d7996127ad3)) -* feat: small speed up to ServiceInfo construction (#1346) ([`b329d99`](https://github.com/python-zeroconf/python-zeroconf/commit/b329d99917bb731b4c70bf20c7c010eeb85ad9fd)) +- Significantly improve efficiency of the ServiceBrowser scheduler + ([#1335](https://github.com/python-zeroconf/python-zeroconf/pull/1335), + [`c65d869`](https://github.com/python-zeroconf/python-zeroconf/commit/c65d869aec731b803484871e9d242a984f9f5848)) -* feat: significantly improve efficiency of the ServiceBrowser scheduler (#1335) ([`c65d869`](https://github.com/python-zeroconf/python-zeroconf/commit/c65d869aec731b803484871e9d242a984f9f5848)) +- Small performance improvement constructing outgoing questions + ([#1340](https://github.com/python-zeroconf/python-zeroconf/pull/1340), + [`157185f`](https://github.com/python-zeroconf/python-zeroconf/commit/157185f28bf1e83e6811e2a5cd1fa9b38966f780)) -* feat: small speed up to processing incoming records (#1345) ([`7de655b`](https://github.com/python-zeroconf/python-zeroconf/commit/7de655b6f05012f20a3671e0bcdd44a1913d7b52)) +- Small performance improvement for converting time + ([#1342](https://github.com/python-zeroconf/python-zeroconf/pull/1342), + [`73d3ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/73d3ab90dd3b59caab771235dd6dbedf05bfe0b3)) -* feat: small performance improvement for converting time (#1342) ([`73d3ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/73d3ab90dd3b59caab771235dd6dbedf05bfe0b3)) +- Small performance improvement for ServiceInfo asking questions + ([#1341](https://github.com/python-zeroconf/python-zeroconf/pull/1341), + [`810a309`](https://github.com/python-zeroconf/python-zeroconf/commit/810a3093c5a9411ee97740b468bd706bdf4a95de)) -* feat: small performance improvement for ServiceInfo asking questions (#1341) ([`810a309`](https://github.com/python-zeroconf/python-zeroconf/commit/810a3093c5a9411ee97740b468bd706bdf4a95de)) +- Small speed up to processing incoming records + ([#1345](https://github.com/python-zeroconf/python-zeroconf/pull/1345), + [`7de655b`](https://github.com/python-zeroconf/python-zeroconf/commit/7de655b6f05012f20a3671e0bcdd44a1913d7b52)) -* feat: small performance improvement constructing outgoing questions (#1340) ([`157185f`](https://github.com/python-zeroconf/python-zeroconf/commit/157185f28bf1e83e6811e2a5cd1fa9b38966f780)) +- Small speed up to ServiceInfo construction + ([#1346](https://github.com/python-zeroconf/python-zeroconf/pull/1346), + [`b329d99`](https://github.com/python-zeroconf/python-zeroconf/commit/b329d99917bb731b4c70bf20c7c010eeb85ad9fd)) ## v0.129.0 (2023-12-13) ### Features -* feat: add decoded_properties method to ServiceInfo (#1332) ([`9b595a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9b595a1dcacf109c699953219d70fe36296c7318)) +- Add decoded_properties method to ServiceInfo + ([#1332](https://github.com/python-zeroconf/python-zeroconf/pull/1332), + [`9b595a1`](https://github.com/python-zeroconf/python-zeroconf/commit/9b595a1dcacf109c699953219d70fe36296c7318)) -* feat: ensure ServiceInfo.properties always returns bytes (#1333) ([`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) +- Cache is_unspecified for zeroconf ip address objects + ([#1331](https://github.com/python-zeroconf/python-zeroconf/pull/1331), + [`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) -* feat: cache is_unspecified for zeroconf ip address objects (#1331) ([`a1c84dc`](https://github.com/python-zeroconf/python-zeroconf/commit/a1c84dc6adeebd155faec1a647c0f70d70de2945)) +- Ensure ServiceInfo.properties always returns bytes + ([#1333](https://github.com/python-zeroconf/python-zeroconf/pull/1333), + [`d29553a`](https://github.com/python-zeroconf/python-zeroconf/commit/d29553ab7de6b7af70769ddb804fe2aaf492f320)) ## v0.128.5 (2023-12-13) ### Bug Fixes -* fix: performance regression with ServiceInfo IPv6Addresses (#1330) ([`e2f9f81`](https://github.com/python-zeroconf/python-zeroconf/commit/e2f9f81dbc54c3dd527eeb3298897d63f99d33f4)) +- Performance regression with ServiceInfo IPv6Addresses + ([#1330](https://github.com/python-zeroconf/python-zeroconf/pull/1330), + [`e2f9f81`](https://github.com/python-zeroconf/python-zeroconf/commit/e2f9f81dbc54c3dd527eeb3298897d63f99d33f4)) ## v0.128.4 (2023-12-10) ### Bug Fixes -* fix: re-expose ServiceInfo._set_properties for backwards compat (#1327) ([`39c4005`](https://github.com/python-zeroconf/python-zeroconf/commit/39c40051d7a63bdc63a3e2dfa20bd944fee4e761)) +- Re-expose ServiceInfo._set_properties for backwards compat + ([#1327](https://github.com/python-zeroconf/python-zeroconf/pull/1327), + [`39c4005`](https://github.com/python-zeroconf/python-zeroconf/commit/39c40051d7a63bdc63a3e2dfa20bd944fee4e761)) ## v0.128.3 (2023-12-10) ### Bug Fixes -* fix: correct nsec record writing (#1326) ([`cd7a16a`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7a16a32c37b2f7a2e90d3c749525a5393bad57)) +- Correct nsec record writing + ([#1326](https://github.com/python-zeroconf/python-zeroconf/pull/1326), + [`cd7a16a`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7a16a32c37b2f7a2e90d3c749525a5393bad57)) ## v0.128.2 (2023-12-10) ### Bug Fixes -* fix: timestamps missing double precision (#1324) ([`ecea4e4`](https://github.com/python-zeroconf/python-zeroconf/commit/ecea4e4217892ca8cf763074ac3e5d1b898acd21)) +- Match cython version for dev deps to build deps + ([#1325](https://github.com/python-zeroconf/python-zeroconf/pull/1325), + [`a0dac46`](https://github.com/python-zeroconf/python-zeroconf/commit/a0dac46c01202b3d5a0823ac1928fc1d75332522)) -* fix: match cython version for dev deps to build deps (#1325) ([`a0dac46`](https://github.com/python-zeroconf/python-zeroconf/commit/a0dac46c01202b3d5a0823ac1928fc1d75332522)) +- Timestamps missing double precision + ([#1324](https://github.com/python-zeroconf/python-zeroconf/pull/1324), + [`ecea4e4`](https://github.com/python-zeroconf/python-zeroconf/commit/ecea4e4217892ca8cf763074ac3e5d1b898acd21)) ## v0.128.1 (2023-12-10) ### Bug Fixes -* fix: correct handling of IPv6 addresses with scope_id in ServiceInfo (#1322) ([`1682991`](https://github.com/python-zeroconf/python-zeroconf/commit/1682991b985b1f7b2bf0cff1a7eb7793070e7cb1)) +- Correct handling of IPv6 addresses with scope_id in ServiceInfo + ([#1322](https://github.com/python-zeroconf/python-zeroconf/pull/1322), + [`1682991`](https://github.com/python-zeroconf/python-zeroconf/commit/1682991b985b1f7b2bf0cff1a7eb7793070e7cb1)) ## v0.128.0 (2023-12-02) ### Features -* feat: speed up unpacking TXT record data in ServiceInfo (#1318) ([`a200842`](https://github.com/python-zeroconf/python-zeroconf/commit/a20084281e66bdb9c37183a5eb992435f5b866ac)) +- Speed up unpacking TXT record data in ServiceInfo + ([#1318](https://github.com/python-zeroconf/python-zeroconf/pull/1318), + [`a200842`](https://github.com/python-zeroconf/python-zeroconf/commit/a20084281e66bdb9c37183a5eb992435f5b866ac)) ## v0.127.0 (2023-11-15) ### Features -* feat: small speed up to writing outgoing packets (#1316) ([`cd28476`](https://github.com/python-zeroconf/python-zeroconf/commit/cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65)) +- Small speed up to processing incoming dns records + ([#1315](https://github.com/python-zeroconf/python-zeroconf/pull/1315), + [`bfe4c24`](https://github.com/python-zeroconf/python-zeroconf/commit/bfe4c24881a7259713425df5ab00ffe487518841)) -* feat: speed up incoming packet reader (#1314) ([`0d60b61`](https://github.com/python-zeroconf/python-zeroconf/commit/0d60b61538a5d4b6f44b2369333b6e916a0a55b4)) +- Small speed up to writing outgoing packets + ([#1316](https://github.com/python-zeroconf/python-zeroconf/pull/1316), + [`cd28476`](https://github.com/python-zeroconf/python-zeroconf/commit/cd28476f6b0a6c2c733273fb24ddaac6c7bbdf65)) -* feat: small speed up to processing incoming dns records (#1315) ([`bfe4c24`](https://github.com/python-zeroconf/python-zeroconf/commit/bfe4c24881a7259713425df5ab00ffe487518841)) +- Speed up incoming packet reader + ([#1314](https://github.com/python-zeroconf/python-zeroconf/pull/1314), + [`0d60b61`](https://github.com/python-zeroconf/python-zeroconf/commit/0d60b61538a5d4b6f44b2369333b6e916a0a55b4)) ## v0.126.0 (2023-11-13) ### Features -* feat: speed up outgoing packet writer (#1313) ([`55cf4cc`](https://github.com/python-zeroconf/python-zeroconf/commit/55cf4ccdff886a136db4e2133d3e6cdd001a8bd6)) +- Speed up outgoing packet writer + ([#1313](https://github.com/python-zeroconf/python-zeroconf/pull/1313), + [`55cf4cc`](https://github.com/python-zeroconf/python-zeroconf/commit/55cf4ccdff886a136db4e2133d3e6cdd001a8bd6)) -* feat: speed up writing name compression for outgoing packets (#1312) ([`9caeabb`](https://github.com/python-zeroconf/python-zeroconf/commit/9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b)) +- Speed up writing name compression for outgoing packets + ([#1312](https://github.com/python-zeroconf/python-zeroconf/pull/1312), + [`9caeabb`](https://github.com/python-zeroconf/python-zeroconf/commit/9caeabb6d4659a25ea1251c1ee7bb824e05f3d8b)) ## v0.125.0 (2023-11-12) ### Features -* feat: speed up service browser queries when browsing many types (#1311) ([`d192d33`](https://github.com/python-zeroconf/python-zeroconf/commit/d192d33b1f05aa95a89965e86210aec086673a17)) +- Speed up service browser queries when browsing many types + ([#1311](https://github.com/python-zeroconf/python-zeroconf/pull/1311), + [`d192d33`](https://github.com/python-zeroconf/python-zeroconf/commit/d192d33b1f05aa95a89965e86210aec086673a17)) ## v0.124.0 (2023-11-12) ### Features -* feat: avoid decoding known answers if we have no answers to give (#1308) ([`605dc9c`](https://github.com/python-zeroconf/python-zeroconf/commit/605dc9ccd843a535802031f051b3d93310186ad1)) +- Avoid decoding known answers if we have no answers to give + ([#1308](https://github.com/python-zeroconf/python-zeroconf/pull/1308), + [`605dc9c`](https://github.com/python-zeroconf/python-zeroconf/commit/605dc9ccd843a535802031f051b3d93310186ad1)) -* feat: small speed up to process incoming packets (#1309) ([`56ef908`](https://github.com/python-zeroconf/python-zeroconf/commit/56ef90865189c01d2207abcc5e2efe3a7a022fa1)) +- Small speed up to process incoming packets + ([#1309](https://github.com/python-zeroconf/python-zeroconf/pull/1309), + [`56ef908`](https://github.com/python-zeroconf/python-zeroconf/commit/56ef90865189c01d2207abcc5e2efe3a7a022fa1)) ## v0.123.0 (2023-11-12) ### Features -* feat: speed up instances only used to lookup answers (#1307) ([`0701b8a`](https://github.com/python-zeroconf/python-zeroconf/commit/0701b8ab6009891cbaddaa1d17116d31fd1b2f78)) +- Speed up instances only used to lookup answers + ([#1307](https://github.com/python-zeroconf/python-zeroconf/pull/1307), + [`0701b8a`](https://github.com/python-zeroconf/python-zeroconf/commit/0701b8ab6009891cbaddaa1d17116d31fd1b2f78)) ## v0.122.3 (2023-11-09) ### Bug Fixes -* fix: do not build musllinux aarch64 wheels to reduce release time (#1306) ([`79aafb0`](https://github.com/python-zeroconf/python-zeroconf/commit/79aafb0acf7ca6b17976be7ede748008deada27b)) +- Do not build musllinux aarch64 wheels to reduce release time + ([#1306](https://github.com/python-zeroconf/python-zeroconf/pull/1306), + [`79aafb0`](https://github.com/python-zeroconf/python-zeroconf/commit/79aafb0acf7ca6b17976be7ede748008deada27b)) ## v0.122.2 (2023-11-09) ### Bug Fixes -* fix: do not build aarch64 wheels for PyPy (#1305) ([`7e884db`](https://github.com/python-zeroconf/python-zeroconf/commit/7e884db4d958459e64257aba860dba2450db0687)) +- Do not build aarch64 wheels for PyPy + ([#1305](https://github.com/python-zeroconf/python-zeroconf/pull/1305), + [`7e884db`](https://github.com/python-zeroconf/python-zeroconf/commit/7e884db4d958459e64257aba860dba2450db0687)) ## v0.122.1 (2023-11-09) ### Bug Fixes -* fix: skip wheel builds for eol python and older python with aarch64 (#1304) ([`6c8f5a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6c8f5a5dec2072aa6a8f889c5d8a4623ab392234)) +- Skip wheel builds for eol python and older python with aarch64 + ([#1304](https://github.com/python-zeroconf/python-zeroconf/pull/1304), + [`6c8f5a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6c8f5a5dec2072aa6a8f889c5d8a4623ab392234)) ## v0.122.0 (2023-11-08) ### Features -* feat: build aarch64 wheels (#1302) ([`4fe58e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83)) +- Build aarch64 wheels ([#1302](https://github.com/python-zeroconf/python-zeroconf/pull/1302), + [`4fe58e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4fe58e2edc6da64a8ece0e2b16ec9ebfc5b3cd83)) ## v0.121.0 (2023-11-08) ### Features -* feat: speed up record updates (#1301) ([`d2af6a0`](https://github.com/python-zeroconf/python-zeroconf/commit/d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4)) +- Speed up record updates ([#1301](https://github.com/python-zeroconf/python-zeroconf/pull/1301), + [`d2af6a0`](https://github.com/python-zeroconf/python-zeroconf/commit/d2af6a0978f5abe4f8bb70d3e29d9836d0fd77c4)) ## v0.120.0 (2023-11-05) ### Features -* feat: speed up incoming packet processing with a memory view (#1290) ([`f1f0a25`](https://github.com/python-zeroconf/python-zeroconf/commit/f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7)) +- Speed up decoding labels from incoming data + ([#1291](https://github.com/python-zeroconf/python-zeroconf/pull/1291), + [`c37ead4`](https://github.com/python-zeroconf/python-zeroconf/commit/c37ead4d7000607e81706a97b4cdffd80cf8cf99)) -* feat: speed up decoding labels from incoming data (#1291) ([`c37ead4`](https://github.com/python-zeroconf/python-zeroconf/commit/c37ead4d7000607e81706a97b4cdffd80cf8cf99)) +- Speed up incoming packet processing with a memory view + ([#1290](https://github.com/python-zeroconf/python-zeroconf/pull/1290), + [`f1f0a25`](https://github.com/python-zeroconf/python-zeroconf/commit/f1f0a2504afd4d29bc6b7cf715cd3cb81b9049f7)) -* feat: speed up ServiceBrowsers with a pxd for the signal interface (#1289) ([`8a17f20`](https://github.com/python-zeroconf/python-zeroconf/commit/8a17f2053a89db4beca9e8c1de4640faf27726b4)) +- Speed up ServiceBrowsers with a pxd for the signal interface + ([#1289](https://github.com/python-zeroconf/python-zeroconf/pull/1289), + [`8a17f20`](https://github.com/python-zeroconf/python-zeroconf/commit/8a17f2053a89db4beca9e8c1de4640faf27726b4)) ## v0.119.0 (2023-10-18) ### Features -* feat: update cibuildwheel to build wheels on latest cython final release (#1285) ([`e8c9083`](https://github.com/python-zeroconf/python-zeroconf/commit/e8c9083bb118764a85b12fac9055152a2f62a212)) +- Update cibuildwheel to build wheels on latest cython final release + ([#1285](https://github.com/python-zeroconf/python-zeroconf/pull/1285), + [`e8c9083`](https://github.com/python-zeroconf/python-zeroconf/commit/e8c9083bb118764a85b12fac9055152a2f62a212)) ## v0.118.1 (2023-10-18) ### Bug Fixes -* fix: reduce size of wheels by excluding generated .c files (#1284) ([`b6afa4b`](https://github.com/python-zeroconf/python-zeroconf/commit/b6afa4b2775a1fdb090145eccdc5711c98e7147a)) +- Reduce size of wheels by excluding generated .c files + ([#1284](https://github.com/python-zeroconf/python-zeroconf/pull/1284), + [`b6afa4b`](https://github.com/python-zeroconf/python-zeroconf/commit/b6afa4b2775a1fdb090145eccdc5711c98e7147a)) ## v0.118.0 (2023-10-14) ### Features -* feat: small improvements to ServiceBrowser performance (#1283) ([`0fc031b`](https://github.com/python-zeroconf/python-zeroconf/commit/0fc031b1e7bf1766d5a1d39d70d300b86e36715e)) +- Small improvements to ServiceBrowser performance + ([#1283](https://github.com/python-zeroconf/python-zeroconf/pull/1283), + [`0fc031b`](https://github.com/python-zeroconf/python-zeroconf/commit/0fc031b1e7bf1766d5a1d39d70d300b86e36715e)) ## v0.117.0 (2023-10-14) ### Features -* feat: small cleanups to incoming data handlers (#1282) ([`4f4bd9f`](https://github.com/python-zeroconf/python-zeroconf/commit/4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9)) +- Small cleanups to incoming data handlers + ([#1282](https://github.com/python-zeroconf/python-zeroconf/pull/1282), + [`4f4bd9f`](https://github.com/python-zeroconf/python-zeroconf/commit/4f4bd9ff7c1e575046e5ea213d9b8c91ac7a24a9)) ## v0.116.0 (2023-10-13) ### Features -* feat: reduce type checking overhead at run time (#1281) ([`8f30099`](https://github.com/python-zeroconf/python-zeroconf/commit/8f300996e5bd4316b2237f0502791dd0d6a855fe)) +- Reduce type checking overhead at run time + ([#1281](https://github.com/python-zeroconf/python-zeroconf/pull/1281), + [`8f30099`](https://github.com/python-zeroconf/python-zeroconf/commit/8f300996e5bd4316b2237f0502791dd0d6a855fe)) ## v0.115.2 (2023-10-05) ### Bug Fixes -* fix: ensure ServiceInfo cache is cleared when adding to the registry (#1279) +- Ensure ServiceInfo cache is cleared when adding to the registry + ([#1279](https://github.com/python-zeroconf/python-zeroconf/pull/1279), + [`2060eb2`](https://github.com/python-zeroconf/python-zeroconf/commit/2060eb2cc43489c34bea08924c3f40b875d5a498)) -* There were production use cases that mutated the service info and re-registered it that need to be accounted for ([`2060eb2`](https://github.com/python-zeroconf/python-zeroconf/commit/2060eb2cc43489c34bea08924c3f40b875d5a498)) +* There were production use cases that mutated the service info and re-registered it that need to be + accounted for ## v0.115.1 (2023-10-01) ### Bug Fixes -* fix: add missing python definition for addresses_by_version (#1278) ([`52ee02b`](https://github.com/python-zeroconf/python-zeroconf/commit/52ee02b16860e344c402124f4b2e2869536ec839)) +- Add missing python definition for addresses_by_version + ([#1278](https://github.com/python-zeroconf/python-zeroconf/pull/1278), + [`52ee02b`](https://github.com/python-zeroconf/python-zeroconf/commit/52ee02b16860e344c402124f4b2e2869536ec839)) ## v0.115.0 (2023-09-26) ### Features -* feat: speed up outgoing multicast queue (#1277) ([`a13fd49`](https://github.com/python-zeroconf/python-zeroconf/commit/a13fd49d77474fd5858de809e48cbab1ccf89173)) +- Speed up outgoing multicast queue + ([#1277](https://github.com/python-zeroconf/python-zeroconf/pull/1277), + [`a13fd49`](https://github.com/python-zeroconf/python-zeroconf/commit/a13fd49d77474fd5858de809e48cbab1ccf89173)) ## v0.114.0 (2023-09-25) ### Features -* feat: speed up responding to queries (#1275) ([`3c6b18c`](https://github.com/python-zeroconf/python-zeroconf/commit/3c6b18cdf4c94773ad6f4497df98feb337939ee9)) +- Speed up responding to queries + ([#1275](https://github.com/python-zeroconf/python-zeroconf/pull/1275), + [`3c6b18c`](https://github.com/python-zeroconf/python-zeroconf/commit/3c6b18cdf4c94773ad6f4497df98feb337939ee9)) ## v0.113.0 (2023-09-24) ### Features -* feat: improve performance of loading records from cache in ServiceInfo (#1274) ([`6257d49`](https://github.com/python-zeroconf/python-zeroconf/commit/6257d49952e02107f800f4ad4894716508edfcda)) +- Improve performance of loading records from cache in ServiceInfo + ([#1274](https://github.com/python-zeroconf/python-zeroconf/pull/1274), + [`6257d49`](https://github.com/python-zeroconf/python-zeroconf/commit/6257d49952e02107f800f4ad4894716508edfcda)) ## v0.112.0 (2023-09-14) ### Features -* feat: improve AsyncServiceBrowser performance (#1273) ([`0c88ecf`](https://github.com/python-zeroconf/python-zeroconf/commit/0c88ecf5ef6b9b256f991e7a630048de640999a6)) +- Improve AsyncServiceBrowser performance + ([#1273](https://github.com/python-zeroconf/python-zeroconf/pull/1273), + [`0c88ecf`](https://github.com/python-zeroconf/python-zeroconf/commit/0c88ecf5ef6b9b256f991e7a630048de640999a6)) ## v0.111.0 (2023-09-14) ### Features -* feat: speed up question and answer internals (#1272) ([`d24722b`](https://github.com/python-zeroconf/python-zeroconf/commit/d24722bfa4201d48ab482d35b0ef004f070ada80)) +- Speed up question and answer internals + ([#1272](https://github.com/python-zeroconf/python-zeroconf/pull/1272), + [`d24722b`](https://github.com/python-zeroconf/python-zeroconf/commit/d24722bfa4201d48ab482d35b0ef004f070ada80)) ## v0.110.0 (2023-09-14) ### Features -* feat: small speed ups to ServiceBrowser (#1271) ([`22c433d`](https://github.com/python-zeroconf/python-zeroconf/commit/22c433ddaea3049ac49933325ba938fd87a529c0)) +- Small speed ups to ServiceBrowser + ([#1271](https://github.com/python-zeroconf/python-zeroconf/pull/1271), + [`22c433d`](https://github.com/python-zeroconf/python-zeroconf/commit/22c433ddaea3049ac49933325ba938fd87a529c0)) ## v0.109.0 (2023-09-14) ### Features -* feat: speed up ServiceBrowsers with a cython pxd (#1270) ([`4837876`](https://github.com/python-zeroconf/python-zeroconf/commit/48378769c3887b5746ca00de30067a4c0851765c)) +- Speed up ServiceBrowsers with a cython pxd + ([#1270](https://github.com/python-zeroconf/python-zeroconf/pull/1270), + [`4837876`](https://github.com/python-zeroconf/python-zeroconf/commit/48378769c3887b5746ca00de30067a4c0851765c)) ## v0.108.0 (2023-09-11) ### Features -* feat: improve performance of constructing outgoing queries (#1267) ([`00c439a`](https://github.com/python-zeroconf/python-zeroconf/commit/00c439a6400b7850ef9fdd75bc8d82d4e64b1da0)) +- Improve performance of constructing outgoing queries + ([#1267](https://github.com/python-zeroconf/python-zeroconf/pull/1267), + [`00c439a`](https://github.com/python-zeroconf/python-zeroconf/commit/00c439a6400b7850ef9fdd75bc8d82d4e64b1da0)) ## v0.107.0 (2023-09-11) ### Features -* feat: speed up responding to queries (#1266) ([`24a0a00`](https://github.com/python-zeroconf/python-zeroconf/commit/24a0a00b3e457979e279a2eeadc8fad2ab09e125)) +- Speed up responding to queries + ([#1266](https://github.com/python-zeroconf/python-zeroconf/pull/1266), + [`24a0a00`](https://github.com/python-zeroconf/python-zeroconf/commit/24a0a00b3e457979e279a2eeadc8fad2ab09e125)) ## v0.106.0 (2023-09-11) ### Features -* feat: speed up answering questions (#1265) ([`37bfaf2`](https://github.com/python-zeroconf/python-zeroconf/commit/37bfaf2f630358e8c68652f3b3120931a6f94910)) +- Speed up answering questions + ([#1265](https://github.com/python-zeroconf/python-zeroconf/pull/1265), + [`37bfaf2`](https://github.com/python-zeroconf/python-zeroconf/commit/37bfaf2f630358e8c68652f3b3120931a6f94910)) ## v0.105.0 (2023-09-10) ### Features -* feat: speed up ServiceInfo with a cython pxd (#1264) ([`7ca690a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ca690ac3fa75e7474d3412944bbd5056cb313dd)) +- Speed up ServiceInfo with a cython pxd + ([#1264](https://github.com/python-zeroconf/python-zeroconf/pull/1264), + [`7ca690a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ca690ac3fa75e7474d3412944bbd5056cb313dd)) ## v0.104.0 (2023-09-10) ### Features -* feat: speed up generating answers (#1262) ([`50a8f06`](https://github.com/python-zeroconf/python-zeroconf/commit/50a8f066b6ab90bc9e3300f81cf9332550b720df)) +- Speed up generating answers + ([#1262](https://github.com/python-zeroconf/python-zeroconf/pull/1262), + [`50a8f06`](https://github.com/python-zeroconf/python-zeroconf/commit/50a8f066b6ab90bc9e3300f81cf9332550b720df)) ## v0.103.0 (2023-09-09) ### Features -* feat: avoid calling get_running_loop when resolving ServiceInfo (#1261) ([`33a2714`](https://github.com/python-zeroconf/python-zeroconf/commit/33a2714cadff96edf016b869cc63b0661d16ef2c)) +- Avoid calling get_running_loop when resolving ServiceInfo + ([#1261](https://github.com/python-zeroconf/python-zeroconf/pull/1261), + [`33a2714`](https://github.com/python-zeroconf/python-zeroconf/commit/33a2714cadff96edf016b869cc63b0661d16ef2c)) ## v0.102.0 (2023-09-07) ### Features -* feat: significantly speed up writing outgoing dns records (#1260) ([`bf2f366`](https://github.com/python-zeroconf/python-zeroconf/commit/bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077)) +- Significantly speed up writing outgoing dns records + ([#1260](https://github.com/python-zeroconf/python-zeroconf/pull/1260), + [`bf2f366`](https://github.com/python-zeroconf/python-zeroconf/commit/bf2f3660a1f341e50ab0ae586dfbacbc5ddcc077)) ## v0.101.0 (2023-09-07) ### Features -* feat: speed up writing outgoing dns records (#1259) ([`248655f`](https://github.com/python-zeroconf/python-zeroconf/commit/248655f0276223b089373c70ec13a0385dfaa4d6)) +- Speed up writing outgoing dns records + ([#1259](https://github.com/python-zeroconf/python-zeroconf/pull/1259), + [`248655f`](https://github.com/python-zeroconf/python-zeroconf/commit/248655f0276223b089373c70ec13a0385dfaa4d6)) ## v0.100.0 (2023-09-07) ### Features -* feat: small speed up to writing outgoing dns records (#1258) ([`1ed6bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/1ed6bd2ec4db0612b71384f923ffff1efd3ce878)) +- Small speed up to writing outgoing dns records + ([#1258](https://github.com/python-zeroconf/python-zeroconf/pull/1258), + [`1ed6bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/1ed6bd2ec4db0612b71384f923ffff1efd3ce878)) ## v0.99.0 (2023-09-06) ### Features -* feat: reduce IP Address parsing overhead in ServiceInfo (#1257) ([`83d0b7f`](https://github.com/python-zeroconf/python-zeroconf/commit/83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb)) +- Reduce IP Address parsing overhead in ServiceInfo + ([#1257](https://github.com/python-zeroconf/python-zeroconf/pull/1257), + [`83d0b7f`](https://github.com/python-zeroconf/python-zeroconf/commit/83d0b7fda2eb09c9c6e18b85f329d1ddc701e3fb)) ## v0.98.0 (2023-09-06) ### Features -* feat: speed up decoding incoming packets (#1256) ([`ac081cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac081cf00addde1ceea2c076f73905fdb293de3a)) +- Speed up decoding incoming packets + ([#1256](https://github.com/python-zeroconf/python-zeroconf/pull/1256), + [`ac081cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac081cf00addde1ceea2c076f73905fdb293de3a)) ## v0.97.0 (2023-09-03) ### Features -* feat: speed up answering queries (#1255) ([`2d3aed3`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3aed36e24c73013fcf4acc90803fc1737d0917)) +- Speed up answering queries ([#1255](https://github.com/python-zeroconf/python-zeroconf/pull/1255), + [`2d3aed3`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3aed36e24c73013fcf4acc90803fc1737d0917)) ## v0.96.0 (2023-09-03) ### Features -* feat: optimize DNSCache.get_by_details (#1254) +- Optimize DNSCache.get_by_details + ([#1254](https://github.com/python-zeroconf/python-zeroconf/pull/1254), + [`ce59787`](https://github.com/python-zeroconf/python-zeroconf/commit/ce59787a170781ffdaa22425018d288b395ac081)) * feat: optimize DNSCache.get_by_details -This is one of the most called functions since ServiceInfo.load_from_cache calls -it +This is one of the most called functions since ServiceInfo.load_from_cache calls it * fix: make get_all_by_details thread-safe -* fix: remove unneeded key checks ([`ce59787`](https://github.com/python-zeroconf/python-zeroconf/commit/ce59787a170781ffdaa22425018d288b395ac081)) +* fix: remove unneeded key checks ## v0.95.0 (2023-09-03) ### Features -* feat: speed up adding and removing RecordUpdateListeners (#1253) ([`22e4a29`](https://github.com/python-zeroconf/python-zeroconf/commit/22e4a296d440b3038c0ff5ed6fc8878304ec4937)) +- Speed up adding and removing RecordUpdateListeners + ([#1253](https://github.com/python-zeroconf/python-zeroconf/pull/1253), + [`22e4a29`](https://github.com/python-zeroconf/python-zeroconf/commit/22e4a296d440b3038c0ff5ed6fc8878304ec4937)) ## v0.94.0 (2023-09-03) ### Features -* feat: optimize cache implementation (#1252) ([`8d3ec79`](https://github.com/python-zeroconf/python-zeroconf/commit/8d3ec792277aaf7ef790318b5b35ab00839ca3b3)) +- Optimize cache implementation + ([#1252](https://github.com/python-zeroconf/python-zeroconf/pull/1252), + [`8d3ec79`](https://github.com/python-zeroconf/python-zeroconf/commit/8d3ec792277aaf7ef790318b5b35ab00839ca3b3)) ## v0.93.1 (2023-09-03) ### Bug Fixes -* fix: no change re-release due to unrecoverable failed CI run (#1251) ([`730921b`](https://github.com/python-zeroconf/python-zeroconf/commit/730921b155dfb9c62251c8c643b1302e807aff3b)) +- No change re-release due to unrecoverable failed CI run + ([#1251](https://github.com/python-zeroconf/python-zeroconf/pull/1251), + [`730921b`](https://github.com/python-zeroconf/python-zeroconf/commit/730921b155dfb9c62251c8c643b1302e807aff3b)) ## v0.93.0 (2023-09-02) ### Features -* feat: reduce overhead to answer questions (#1250) ([`7cb8da0`](https://github.com/python-zeroconf/python-zeroconf/commit/7cb8da0c6c5c944588009fe36012c1197c422668)) +- Reduce overhead to answer questions + ([#1250](https://github.com/python-zeroconf/python-zeroconf/pull/1250), + [`7cb8da0`](https://github.com/python-zeroconf/python-zeroconf/commit/7cb8da0c6c5c944588009fe36012c1197c422668)) ## v0.92.0 (2023-09-02) ### Features -* feat: cache construction of records used to answer queries from the service registry (#1243) ([`0890f62`](https://github.com/python-zeroconf/python-zeroconf/commit/0890f628dbbd577fb77d3e6f2e267052b2b2b515)) +- Cache construction of records used to answer queries from the service registry + ([#1243](https://github.com/python-zeroconf/python-zeroconf/pull/1243), + [`0890f62`](https://github.com/python-zeroconf/python-zeroconf/commit/0890f628dbbd577fb77d3e6f2e267052b2b2b515)) ## v0.91.1 (2023-09-02) ### Bug Fixes -* fix: remove useless calls in ServiceInfo (#1248) ([`4e40fae`](https://github.com/python-zeroconf/python-zeroconf/commit/4e40fae20bf50b4608e28fad4a360c4ed48ac86b)) +- Remove useless calls in ServiceInfo + ([#1248](https://github.com/python-zeroconf/python-zeroconf/pull/1248), + [`4e40fae`](https://github.com/python-zeroconf/python-zeroconf/commit/4e40fae20bf50b4608e28fad4a360c4ed48ac86b)) ## v0.91.0 (2023-09-02) ### Features -* feat: reduce overhead to process incoming updates by avoiding the handle_response shim (#1247) ([`5e31f0a`](https://github.com/python-zeroconf/python-zeroconf/commit/5e31f0afe4c341fbdbbbe50348a829ea553cbda0)) +- Reduce overhead to process incoming updates by avoiding the handle_response shim + ([#1247](https://github.com/python-zeroconf/python-zeroconf/pull/1247), + [`5e31f0a`](https://github.com/python-zeroconf/python-zeroconf/commit/5e31f0afe4c341fbdbbbe50348a829ea553cbda0)) ## v0.90.0 (2023-09-02) ### Features -* feat: avoid python float conversion in listener hot path (#1245) ([`816ad4d`](https://github.com/python-zeroconf/python-zeroconf/commit/816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a)) +- Avoid python float conversion in listener hot path + ([#1245](https://github.com/python-zeroconf/python-zeroconf/pull/1245), + [`816ad4d`](https://github.com/python-zeroconf/python-zeroconf/commit/816ad4dceb3859bad4bb136bdb1d1ee2daa0bf5a)) ### Refactoring -* refactor: reduce duplicate code in engine.py (#1246) ([`36ae505`](https://github.com/python-zeroconf/python-zeroconf/commit/36ae505dc9f95b59fdfb632960845a45ba8575b8)) +- Reduce duplicate code in engine.py + ([#1246](https://github.com/python-zeroconf/python-zeroconf/pull/1246), + [`36ae505`](https://github.com/python-zeroconf/python-zeroconf/commit/36ae505dc9f95b59fdfb632960845a45ba8575b8)) ## v0.89.0 (2023-09-02) ### Features -* feat: reduce overhead to process incoming questions (#1244) ([`18b65d1`](https://github.com/python-zeroconf/python-zeroconf/commit/18b65d1c75622869b0c29258215d3db3ae520d6c)) +- Reduce overhead to process incoming questions + ([#1244](https://github.com/python-zeroconf/python-zeroconf/pull/1244), + [`18b65d1`](https://github.com/python-zeroconf/python-zeroconf/commit/18b65d1c75622869b0c29258215d3db3ae520d6c)) ## v0.88.0 (2023-08-29) ### Features -* feat: speed up RecordManager with additional cython defs (#1242) ([`5a76fc5`](https://github.com/python-zeroconf/python-zeroconf/commit/5a76fc5ff74f2941ffbf7570e45390f35e0b7e01)) +- Speed up RecordManager with additional cython defs + ([#1242](https://github.com/python-zeroconf/python-zeroconf/pull/1242), + [`5a76fc5`](https://github.com/python-zeroconf/python-zeroconf/commit/5a76fc5ff74f2941ffbf7570e45390f35e0b7e01)) ## v0.87.0 (2023-08-29) ### Features -* feat: improve performance by adding cython pxd for RecordManager (#1241) ([`a7dad3d`](https://github.com/python-zeroconf/python-zeroconf/commit/a7dad3d9743586f352e21eea1e129c6875f9a713)) +- Improve performance by adding cython pxd for RecordManager + ([#1241](https://github.com/python-zeroconf/python-zeroconf/pull/1241), + [`a7dad3d`](https://github.com/python-zeroconf/python-zeroconf/commit/a7dad3d9743586f352e21eea1e129c6875f9a713)) ## v0.86.0 (2023-08-28) ### Features -* feat: build wheels for cpython 3.12 (#1239) ([`58bc154`](https://github.com/python-zeroconf/python-zeroconf/commit/58bc154f55b06b4ddfc4a141592488abe76f062a)) +- Build wheels for cpython 3.12 + ([#1239](https://github.com/python-zeroconf/python-zeroconf/pull/1239), + [`58bc154`](https://github.com/python-zeroconf/python-zeroconf/commit/58bc154f55b06b4ddfc4a141592488abe76f062a)) -* feat: use server_key when processing DNSService records (#1238) ([`cc8feb1`](https://github.com/python-zeroconf/python-zeroconf/commit/cc8feb110fefc3fb714fd482a52f16e2b620e8c4)) +- Use server_key when processing DNSService records + ([#1238](https://github.com/python-zeroconf/python-zeroconf/pull/1238), + [`cc8feb1`](https://github.com/python-zeroconf/python-zeroconf/commit/cc8feb110fefc3fb714fd482a52f16e2b620e8c4)) ## v0.85.0 (2023-08-27) ### Features -* feat: simplify code to unpack properties (#1237) ([`68d9998`](https://github.com/python-zeroconf/python-zeroconf/commit/68d99985a0e9d2c72ff670b2e2af92271a6fe934)) +- Simplify code to unpack properties + ([#1237](https://github.com/python-zeroconf/python-zeroconf/pull/1237), + [`68d9998`](https://github.com/python-zeroconf/python-zeroconf/commit/68d99985a0e9d2c72ff670b2e2af92271a6fe934)) ## v0.84.0 (2023-08-27) ### Features -* feat: context managers in ServiceBrowser and AsyncServiceBrowser (#1233) +- Context managers in ServiceBrowser and AsyncServiceBrowser + ([#1233](https://github.com/python-zeroconf/python-zeroconf/pull/1233), + [`bd8d846`](https://github.com/python-zeroconf/python-zeroconf/commit/bd8d8467dec2a39a0b525043ea1051259100fded)) -Co-authored-by: J. Nick Koston ([`bd8d846`](https://github.com/python-zeroconf/python-zeroconf/commit/bd8d8467dec2a39a0b525043ea1051259100fded)) +Co-authored-by: J. Nick Koston ## v0.83.1 (2023-08-27) ### Bug Fixes -* fix: rebuild wheels with cython 3.0.2 (#1236) ([`dd637fb`](https://github.com/python-zeroconf/python-zeroconf/commit/dd637fb2e5a87ba283750e69d116e124bef54e7c)) +- Rebuild wheels with cython 3.0.2 + ([#1236](https://github.com/python-zeroconf/python-zeroconf/pull/1236), + [`dd637fb`](https://github.com/python-zeroconf/python-zeroconf/commit/dd637fb2e5a87ba283750e69d116e124bef54e7c)) ## v0.83.0 (2023-08-26) ### Features -* feat: speed up question and answer history with a cython pxd (#1234) ([`703ecb2`](https://github.com/python-zeroconf/python-zeroconf/commit/703ecb2901b2150fb72fac3deed61d7302561298)) +- Speed up question and answer history with a cython pxd + ([#1234](https://github.com/python-zeroconf/python-zeroconf/pull/1234), + [`703ecb2`](https://github.com/python-zeroconf/python-zeroconf/commit/703ecb2901b2150fb72fac3deed61d7302561298)) ## v0.82.1 (2023-08-22) ### Bug Fixes -* fix: build failures with older cython 0.29 series (#1232) ([`30c3ad9`](https://github.com/python-zeroconf/python-zeroconf/commit/30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced)) +- Build failures with older cython 0.29 series + ([#1232](https://github.com/python-zeroconf/python-zeroconf/pull/1232), + [`30c3ad9`](https://github.com/python-zeroconf/python-zeroconf/commit/30c3ad9d1bc6b589e1ca6675fea21907ebcd1ced)) ## v0.82.0 (2023-08-22) ### Features -* feat: optimize processing of records in RecordUpdateListener subclasses (#1231) ([`3e89294`](https://github.com/python-zeroconf/python-zeroconf/commit/3e89294ea0ecee1122e1c1ffdc78925add8ca40e)) +- Optimize processing of records in RecordUpdateListener subclasses + ([#1231](https://github.com/python-zeroconf/python-zeroconf/pull/1231), + [`3e89294`](https://github.com/python-zeroconf/python-zeroconf/commit/3e89294ea0ecee1122e1c1ffdc78925add8ca40e)) ## v0.81.0 (2023-08-22) ### Features -* feat: speed up the service registry with a cython pxd (#1226) ([`47d3c7a`](https://github.com/python-zeroconf/python-zeroconf/commit/47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e)) +- Optimizing sending answers to questions + ([#1227](https://github.com/python-zeroconf/python-zeroconf/pull/1227), + [`cd7b56b`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7b56b2aa0c8ee429da430e9a36abd515512011)) -* feat: optimizing sending answers to questions (#1227) ([`cd7b56b`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7b56b2aa0c8ee429da430e9a36abd515512011)) +- Speed up the service registry with a cython pxd + ([#1226](https://github.com/python-zeroconf/python-zeroconf/pull/1226), + [`47d3c7a`](https://github.com/python-zeroconf/python-zeroconf/commit/47d3c7ad4bc5f2247631c3ad5e6b6156d45a0a4e)) ## v0.80.0 (2023-08-15) ### Features -* feat: optimize unpacking properties in ServiceInfo (#1225) ([`1492e41`](https://github.com/python-zeroconf/python-zeroconf/commit/1492e41b3d5cba5598cc9dd6bd2bc7d238f13555)) +- Optimize unpacking properties in ServiceInfo + ([#1225](https://github.com/python-zeroconf/python-zeroconf/pull/1225), + [`1492e41`](https://github.com/python-zeroconf/python-zeroconf/commit/1492e41b3d5cba5598cc9dd6bd2bc7d238f13555)) ## v0.79.0 (2023-08-14) ### Features -* feat: refactor notify implementation to reduce overhead of adding and removing listeners (#1224) ([`ceb92cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb92cfe42d885dbb38cee7aaeebf685d97627a9)) +- Refactor notify implementation to reduce overhead of adding and removing listeners + ([#1224](https://github.com/python-zeroconf/python-zeroconf/pull/1224), + [`ceb92cf`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb92cfe42d885dbb38cee7aaeebf685d97627a9)) ## v0.78.0 (2023-08-14) ### Features -* feat: add cython pxd file for _listener.py to improve incoming message processing performance (#1221) ([`f459856`](https://github.com/python-zeroconf/python-zeroconf/commit/f459856a0a61b8afa8a541926d7e15d51f8e4aea)) +- Add cython pxd file for _listener.py to improve incoming message processing performance + ([#1221](https://github.com/python-zeroconf/python-zeroconf/pull/1221), + [`f459856`](https://github.com/python-zeroconf/python-zeroconf/commit/f459856a0a61b8afa8a541926d7e15d51f8e4aea)) ## v0.77.0 (2023-08-14) ### Features -* feat: cythonize _listener.py to improve incoming message processing performance (#1220) ([`9efde8c`](https://github.com/python-zeroconf/python-zeroconf/commit/9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9)) +- Cythonize _listener.py to improve incoming message processing performance + ([#1220](https://github.com/python-zeroconf/python-zeroconf/pull/1220), + [`9efde8c`](https://github.com/python-zeroconf/python-zeroconf/commit/9efde8c8c1ed14c5d3c162f185b49212fcfcb5c9)) ## v0.76.0 (2023-08-14) ### Features -* feat: improve performance responding to queries (#1217) ([`69b33be`](https://github.com/python-zeroconf/python-zeroconf/commit/69b33be3b2f9d4a27ef5154cae94afca048efffa)) +- Improve performance responding to queries + ([#1217](https://github.com/python-zeroconf/python-zeroconf/pull/1217), + [`69b33be`](https://github.com/python-zeroconf/python-zeroconf/commit/69b33be3b2f9d4a27ef5154cae94afca048efffa)) ## v0.75.0 (2023-08-13) ### Features -* feat: expose flag to disable strict name checking in service registration (#1215) ([`5df8a57`](https://github.com/python-zeroconf/python-zeroconf/commit/5df8a57a14d59687a3c22ea8ee063e265031e278)) +- Expose flag to disable strict name checking in service registration + ([#1215](https://github.com/python-zeroconf/python-zeroconf/pull/1215), + [`5df8a57`](https://github.com/python-zeroconf/python-zeroconf/commit/5df8a57a14d59687a3c22ea8ee063e265031e278)) -* feat: speed up processing incoming records (#1216) ([`aff625d`](https://github.com/python-zeroconf/python-zeroconf/commit/aff625dc6a5e816dad519644c4adac4f96980c04)) +- Speed up processing incoming records + ([#1216](https://github.com/python-zeroconf/python-zeroconf/pull/1216), + [`aff625d`](https://github.com/python-zeroconf/python-zeroconf/commit/aff625dc6a5e816dad519644c4adac4f96980c04)) ## v0.74.0 (2023-08-04) ### Bug Fixes -* fix: remove typing on reset_ttl for cython compat (#1213) ([`0094e26`](https://github.com/python-zeroconf/python-zeroconf/commit/0094e2684344c6b7edd7948924f093f1b4c19901)) +- Remove typing on reset_ttl for cython compat + ([#1213](https://github.com/python-zeroconf/python-zeroconf/pull/1213), + [`0094e26`](https://github.com/python-zeroconf/python-zeroconf/commit/0094e2684344c6b7edd7948924f093f1b4c19901)) ### Features -* feat: speed up unpacking text records in ServiceInfo (#1212) ([`99a6f98`](https://github.com/python-zeroconf/python-zeroconf/commit/99a6f98e44a1287ba537eabb852b1b69923402f0)) +- Speed up unpacking text records in ServiceInfo + ([#1212](https://github.com/python-zeroconf/python-zeroconf/pull/1212), + [`99a6f98`](https://github.com/python-zeroconf/python-zeroconf/commit/99a6f98e44a1287ba537eabb852b1b69923402f0)) ## v0.73.0 (2023-08-03) ### Features -* feat: add a cache to service_type_name (#1211) ([`53a694f`](https://github.com/python-zeroconf/python-zeroconf/commit/53a694f60e675ae0560e727be6b721b401c2b68f)) +- Add a cache to service_type_name + ([#1211](https://github.com/python-zeroconf/python-zeroconf/pull/1211), + [`53a694f`](https://github.com/python-zeroconf/python-zeroconf/commit/53a694f60e675ae0560e727be6b721b401c2b68f)) ## v0.72.3 (2023-08-03) ### Bug Fixes -* fix: revert adding typing to DNSRecord.suppressed_by (#1210) ([`3dba5ae`](https://github.com/python-zeroconf/python-zeroconf/commit/3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a)) +- Revert adding typing to DNSRecord.suppressed_by + ([#1210](https://github.com/python-zeroconf/python-zeroconf/pull/1210), + [`3dba5ae`](https://github.com/python-zeroconf/python-zeroconf/commit/3dba5ae0c0e9473b7b20fd6fc79fa1a3b298dc5a)) ## v0.72.2 (2023-08-03) ### Bug Fixes -* fix: revert DNSIncoming cimport in _dns.pxd (#1209) ([`5f14b6d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1)) +- Revert DNSIncoming cimport in _dns.pxd + ([#1209](https://github.com/python-zeroconf/python-zeroconf/pull/1209), + [`5f14b6d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f14b6dc687b3a0716d0ca7f61ccf1e93dfe5fa1)) ## v0.72.1 (2023-08-03) ### Bug Fixes -* fix: race with InvalidStateError when async_request times out (#1208) ([`2233b6b`](https://github.com/python-zeroconf/python-zeroconf/commit/2233b6bc4ceeee5524d2ee88ecae8234173feb5f)) +- Race with InvalidStateError when async_request times out + ([#1208](https://github.com/python-zeroconf/python-zeroconf/pull/1208), + [`2233b6b`](https://github.com/python-zeroconf/python-zeroconf/commit/2233b6bc4ceeee5524d2ee88ecae8234173feb5f)) ## v0.72.0 (2023-08-02) ### Features -* feat: speed up processing incoming records (#1206) ([`126849c`](https://github.com/python-zeroconf/python-zeroconf/commit/126849c92be8cec9253fba9faa591029d992fcc3)) +- Speed up processing incoming records + ([#1206](https://github.com/python-zeroconf/python-zeroconf/pull/1206), + [`126849c`](https://github.com/python-zeroconf/python-zeroconf/commit/126849c92be8cec9253fba9faa591029d992fcc3)) ## v0.71.5 (2023-08-02) ### Bug Fixes -* fix: improve performance of ServiceInfo.async_request (#1205) ([`8019a73`](https://github.com/python-zeroconf/python-zeroconf/commit/8019a73c952f2fc4c88d849aab970fafedb316d8)) +- Improve performance of ServiceInfo.async_request + ([#1205](https://github.com/python-zeroconf/python-zeroconf/pull/1205), + [`8019a73`](https://github.com/python-zeroconf/python-zeroconf/commit/8019a73c952f2fc4c88d849aab970fafedb316d8)) ## v0.71.4 (2023-07-24) ### Bug Fixes -* fix: cleanup naming from previous refactoring in ServiceInfo (#1202) ([`b272d75`](https://github.com/python-zeroconf/python-zeroconf/commit/b272d75abd982f3be1f4b20f683cac38011cc6f4)) +- Cleanup naming from previous refactoring in ServiceInfo + ([#1202](https://github.com/python-zeroconf/python-zeroconf/pull/1202), + [`b272d75`](https://github.com/python-zeroconf/python-zeroconf/commit/b272d75abd982f3be1f4b20f683cac38011cc6f4)) ## v0.71.3 (2023-07-23) ### Bug Fixes -* fix: pin python-semantic-release to fix release process (#1200) ([`c145a23`](https://github.com/python-zeroconf/python-zeroconf/commit/c145a238d768aa17c3aebe120c20a46bfbec6b99)) +- Pin python-semantic-release to fix release process + ([#1200](https://github.com/python-zeroconf/python-zeroconf/pull/1200), + [`c145a23`](https://github.com/python-zeroconf/python-zeroconf/commit/c145a238d768aa17c3aebe120c20a46bfbec6b99)) ## v0.71.2 (2023-07-23) ### Bug Fixes -* fix: no change re-release to fix wheel builds (#1199) ([`8c3a4c8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c3a4c80c221bea7401c12e1c6a525e75b7ffea2)) +- No change re-release to fix wheel builds + ([#1199](https://github.com/python-zeroconf/python-zeroconf/pull/1199), + [`8c3a4c8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c3a4c80c221bea7401c12e1c6a525e75b7ffea2)) ## v0.71.1 (2023-07-23) ### Bug Fixes -* fix: add missing if TYPE_CHECKING guard to generate_service_query (#1198) ([`ac53adf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac53adf7e71db14c1a0f9adbfd1d74033df36898)) +- Add missing if TYPE_CHECKING guard to generate_service_query + ([#1198](https://github.com/python-zeroconf/python-zeroconf/pull/1198), + [`ac53adf`](https://github.com/python-zeroconf/python-zeroconf/commit/ac53adf7e71db14c1a0f9adbfd1d74033df36898)) ## v0.71.0 (2023-07-08) ### Features -* feat: improve incoming data processing performance (#1194) ([`a56c776`](https://github.com/python-zeroconf/python-zeroconf/commit/a56c776008ef86f99db78f5997e45a57551be725)) +- Improve incoming data processing performance + ([#1194](https://github.com/python-zeroconf/python-zeroconf/pull/1194), + [`a56c776`](https://github.com/python-zeroconf/python-zeroconf/commit/a56c776008ef86f99db78f5997e45a57551be725)) ## v0.70.0 (2023-07-02) ### Features -* feat: add support for sending to a specific `addr` and `port` with `ServiceInfo.async_request` and `ServiceInfo.request` (#1192) ([`405f547`](https://github.com/python-zeroconf/python-zeroconf/commit/405f54762d3f61e97de9c1787e837e953de31412)) +- Add support for sending to a specific `addr` and `port` with `ServiceInfo.async_request` and + `ServiceInfo.request` ([#1192](https://github.com/python-zeroconf/python-zeroconf/pull/1192), + [`405f547`](https://github.com/python-zeroconf/python-zeroconf/commit/405f54762d3f61e97de9c1787e837e953de31412)) ## v0.69.0 (2023-06-18) ### Features -* feat: cython3 support (#1190) ([`8ae8ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/8ae8ba1af324b0c8c2da3bd12c264a5c0f3dcc3d)) +- Cython3 support ([#1190](https://github.com/python-zeroconf/python-zeroconf/pull/1190), + [`8ae8ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/8ae8ba1af324b0c8c2da3bd12c264a5c0f3dcc3d)) -* feat: reorder incoming data handler to reduce overhead (#1189) ([`32756ff`](https://github.com/python-zeroconf/python-zeroconf/commit/32756ff113f675b7a9cf16d3c0ab840ba733e5e4)) +- Reorder incoming data handler to reduce overhead + ([#1189](https://github.com/python-zeroconf/python-zeroconf/pull/1189), + [`32756ff`](https://github.com/python-zeroconf/python-zeroconf/commit/32756ff113f675b7a9cf16d3c0ab840ba733e5e4)) ## v0.68.1 (2023-06-18) ### Bug Fixes -* fix: reduce debug logging overhead by adding missing checks to datagram_received (#1188) ([`ac5c50a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5c50afc70aaa33fcd20bf02222ff4f0c596fa3)) +- Reduce debug logging overhead by adding missing checks to datagram_received + ([#1188](https://github.com/python-zeroconf/python-zeroconf/pull/1188), + [`ac5c50a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5c50afc70aaa33fcd20bf02222ff4f0c596fa3)) ## v0.68.0 (2023-06-17) ### Features -* feat: reduce overhead to handle queries and responses (#1184) +- Reduce overhead to handle queries and responses + ([#1184](https://github.com/python-zeroconf/python-zeroconf/pull/1184), + [`81126b7`](https://github.com/python-zeroconf/python-zeroconf/commit/81126b7600f94848ef8c58b70bac0c6ab993c6ae)) - adds slots to handler classes -- avoid any expression overhead and inline instead ([`81126b7`](https://github.com/python-zeroconf/python-zeroconf/commit/81126b7600f94848ef8c58b70bac0c6ab993c6ae)) +- avoid any expression overhead and inline instead ## v0.67.0 (2023-06-17) ### Features -* feat: speed up answering incoming questions (#1186) ([`8f37665`](https://github.com/python-zeroconf/python-zeroconf/commit/8f376658d2a3bef0353646e6fddfda15626b73a9)) +- Speed up answering incoming questions + ([#1186](https://github.com/python-zeroconf/python-zeroconf/pull/1186), + [`8f37665`](https://github.com/python-zeroconf/python-zeroconf/commit/8f376658d2a3bef0353646e6fddfda15626b73a9)) ## v0.66.0 (2023-06-13) ### Features -* feat: optimize construction of outgoing dns records (#1182) ([`fc0341f`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0341f281cdb71428c0f1cf90c12d34cbb4acae)) +- Optimize construction of outgoing dns records + ([#1182](https://github.com/python-zeroconf/python-zeroconf/pull/1182), + [`fc0341f`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0341f281cdb71428c0f1cf90c12d34cbb4acae)) ## v0.65.0 (2023-06-13) ### Features -* feat: reduce overhead to enumerate ip addresses in ServiceInfo (#1181) ([`6a85cbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6a85cbf2b872cb0abd184c2dd728d9ae3eb8115c)) +- Reduce overhead to enumerate ip addresses in ServiceInfo + ([#1181](https://github.com/python-zeroconf/python-zeroconf/pull/1181), + [`6a85cbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6a85cbf2b872cb0abd184c2dd728d9ae3eb8115c)) ## v0.64.1 (2023-06-05) ### Bug Fixes -* fix: small internal typing cleanups (#1180) ([`f03e511`](https://github.com/python-zeroconf/python-zeroconf/commit/f03e511f7aae72c5ccd4f7514d89e168847bd7a2)) +- Small internal typing cleanups + ([#1180](https://github.com/python-zeroconf/python-zeroconf/pull/1180), + [`f03e511`](https://github.com/python-zeroconf/python-zeroconf/commit/f03e511f7aae72c5ccd4f7514d89e168847bd7a2)) ## v0.64.0 (2023-06-05) ### Bug Fixes -* fix: always answer QU questions when the exact same packet is received from different sources in sequence (#1178) +- Always answer QU questions when the exact same packet is received from different sources in + sequence ([#1178](https://github.com/python-zeroconf/python-zeroconf/pull/1178), + [`74d7ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/74d7ba1aeeae56be087ee8142ee6ca1219744baa)) -If the exact same packet with a QU question is asked from two different sources in a 1s window we end up ignoring the second one as a duplicate. We should still respond in this case because the client wants a unicast response and the question may not be answered by the previous packet since the response may not be multicast. +If the exact same packet with a QU question is asked from two different sources in a 1s window we + end up ignoring the second one as a duplicate. We should still respond in this case because the + client wants a unicast response and the question may not be answered by the previous packet since + the response may not be multicast. fix: include NSEC records in initial broadcast when registering a new service -This also revealed that we do not send NSEC records in the initial broadcast. This needed to be fixed in this PR as well for everything to work as expected since all the tests would fail with 2 updates otherwise. ([`74d7ba1`](https://github.com/python-zeroconf/python-zeroconf/commit/74d7ba1aeeae56be087ee8142ee6ca1219744baa)) +This also revealed that we do not send NSEC records in the initial broadcast. This needed to be + fixed in this PR as well for everything to work as expected since all the tests would fail with 2 + updates otherwise. ### Features -* feat: speed up processing incoming records (#1179) ([`d919316`](https://github.com/python-zeroconf/python-zeroconf/commit/d9193160b05beeca3755e19fd377ba13fe37b071)) +- Speed up processing incoming records + ([#1179](https://github.com/python-zeroconf/python-zeroconf/pull/1179), + [`d919316`](https://github.com/python-zeroconf/python-zeroconf/commit/d9193160b05beeca3755e19fd377ba13fe37b071)) ## v0.63.0 (2023-05-25) ### Features -* feat: small speed up to fetch dns addresses from ServiceInfo (#1176) ([`4deaa6e`](https://github.com/python-zeroconf/python-zeroconf/commit/4deaa6ed7c9161db55bf16ec068ab7260bbd4976)) +- Improve dns cache performance + ([#1172](https://github.com/python-zeroconf/python-zeroconf/pull/1172), + [`bb496a1`](https://github.com/python-zeroconf/python-zeroconf/commit/bb496a1dd5fa3562c0412cb064d14639a542592e)) -* feat: speed up the service registry (#1174) ([`360ceb2`](https://github.com/python-zeroconf/python-zeroconf/commit/360ceb2548c4c4974ff798aac43a6fff9803ea0e)) +- Small speed up to fetch dns addresses from ServiceInfo + ([#1176](https://github.com/python-zeroconf/python-zeroconf/pull/1176), + [`4deaa6e`](https://github.com/python-zeroconf/python-zeroconf/commit/4deaa6ed7c9161db55bf16ec068ab7260bbd4976)) -* feat: improve dns cache performance (#1172) ([`bb496a1`](https://github.com/python-zeroconf/python-zeroconf/commit/bb496a1dd5fa3562c0412cb064d14639a542592e)) +- Speed up the service registry + ([#1174](https://github.com/python-zeroconf/python-zeroconf/pull/1174), + [`360ceb2`](https://github.com/python-zeroconf/python-zeroconf/commit/360ceb2548c4c4974ff798aac43a6fff9803ea0e)) ## v0.62.0 (2023-05-04) ### Features -* feat: improve performance of ServiceBrowser outgoing query scheduler (#1170) ([`963d022`](https://github.com/python-zeroconf/python-zeroconf/commit/963d022ef82b615540fa7521d164a98a6c6f5209)) +- Improve performance of ServiceBrowser outgoing query scheduler + ([#1170](https://github.com/python-zeroconf/python-zeroconf/pull/1170), + [`963d022`](https://github.com/python-zeroconf/python-zeroconf/commit/963d022ef82b615540fa7521d164a98a6c6f5209)) ## v0.61.0 (2023-05-03) ### Features -* feat: speed up parsing NSEC records (#1169) ([`06fa94d`](https://github.com/python-zeroconf/python-zeroconf/commit/06fa94d87b4f0451cb475a921ce1d8e9562e0f26)) +- Speed up parsing NSEC records + ([#1169](https://github.com/python-zeroconf/python-zeroconf/pull/1169), + [`06fa94d`](https://github.com/python-zeroconf/python-zeroconf/commit/06fa94d87b4f0451cb475a921ce1d8e9562e0f26)) ## v0.60.0 (2023-05-01) ### Features -* feat: speed up processing incoming data (#1167) ([`fbaaf7b`](https://github.com/python-zeroconf/python-zeroconf/commit/fbaaf7bb6ff985bdabb85feb6cba144f12d4f1d6)) +- Speed up processing incoming data + ([#1167](https://github.com/python-zeroconf/python-zeroconf/pull/1167), + [`fbaaf7b`](https://github.com/python-zeroconf/python-zeroconf/commit/fbaaf7bb6ff985bdabb85feb6cba144f12d4f1d6)) ## v0.59.0 (2023-05-01) ### Features -* feat: speed up decoding dns questions when processing incoming data (#1168) ([`f927190`](https://github.com/python-zeroconf/python-zeroconf/commit/f927190cb24f70fd7c825c6e12151fcc0daf3973)) +- Speed up decoding dns questions when processing incoming data + ([#1168](https://github.com/python-zeroconf/python-zeroconf/pull/1168), + [`f927190`](https://github.com/python-zeroconf/python-zeroconf/commit/f927190cb24f70fd7c825c6e12151fcc0daf3973)) ## v0.58.2 (2023-04-26) ### Bug Fixes -* fix: re-release to rebuild failed wheels (#1165) ([`4986271`](https://github.com/python-zeroconf/python-zeroconf/commit/498627166a4976f1d9d8cd1f3654b0d50272d266)) +- Re-release to rebuild failed wheels + ([#1165](https://github.com/python-zeroconf/python-zeroconf/pull/1165), + [`4986271`](https://github.com/python-zeroconf/python-zeroconf/commit/498627166a4976f1d9d8cd1f3654b0d50272d266)) ## v0.58.1 (2023-04-26) ### Bug Fixes -* fix: reduce cast calls in service browser (#1164) ([`c0d65ae`](https://github.com/python-zeroconf/python-zeroconf/commit/c0d65aeae7037a18ed1149336f5e7bdb8b2dd8cf)) +- Reduce cast calls in service browser + ([#1164](https://github.com/python-zeroconf/python-zeroconf/pull/1164), + [`c0d65ae`](https://github.com/python-zeroconf/python-zeroconf/commit/c0d65aeae7037a18ed1149336f5e7bdb8b2dd8cf)) ## v0.58.0 (2023-04-23) ### Features -* feat: speed up incoming parser (#1163) ([`4626399`](https://github.com/python-zeroconf/python-zeroconf/commit/46263999c0c7ea5176885f1eadd2c8498834b70e)) +- Speed up incoming parser ([#1163](https://github.com/python-zeroconf/python-zeroconf/pull/1163), + [`4626399`](https://github.com/python-zeroconf/python-zeroconf/commit/46263999c0c7ea5176885f1eadd2c8498834b70e)) ## v0.57.0 (2023-04-23) ### Features -* feat: speed up incoming data parser (#1161) ([`cb4c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/cb4c3b2b80ca3b88b8de6e87062a45e03e8805a6)) +- Speed up incoming data parser + ([#1161](https://github.com/python-zeroconf/python-zeroconf/pull/1161), + [`cb4c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/cb4c3b2b80ca3b88b8de6e87062a45e03e8805a6)) ## v0.56.0 (2023-04-07) ### Features -* feat: reduce denial of service protection overhead (#1157) ([`2c2f26a`](https://github.com/python-zeroconf/python-zeroconf/commit/2c2f26a87d0aac81a77205b06bc9ba499caa2321)) +- Reduce denial of service protection overhead + ([#1157](https://github.com/python-zeroconf/python-zeroconf/pull/1157), + [`2c2f26a`](https://github.com/python-zeroconf/python-zeroconf/commit/2c2f26a87d0aac81a77205b06bc9ba499caa2321)) ## v0.55.0 (2023-04-07) ### Features -* feat: improve performance of processing incoming records (#1155) ([`b65e279`](https://github.com/python-zeroconf/python-zeroconf/commit/b65e2792751c44e0fafe9ad3a55dadc5d8ee9d46)) +- Improve performance of processing incoming records + ([#1155](https://github.com/python-zeroconf/python-zeroconf/pull/1155), + [`b65e279`](https://github.com/python-zeroconf/python-zeroconf/commit/b65e2792751c44e0fafe9ad3a55dadc5d8ee9d46)) ## v0.54.0 (2023-04-03) ### Features -* feat: avoid waking async_request when record updates are not relevant (#1153) ([`a3f970c`](https://github.com/python-zeroconf/python-zeroconf/commit/a3f970c7f66067cf2c302c49ed6ad8286f19b679)) +- Avoid waking async_request when record updates are not relevant + ([#1153](https://github.com/python-zeroconf/python-zeroconf/pull/1153), + [`a3f970c`](https://github.com/python-zeroconf/python-zeroconf/commit/a3f970c7f66067cf2c302c49ed6ad8286f19b679)) ## v0.53.1 (2023-04-03) ### Bug Fixes -* fix: addresses incorrect after server name change (#1154) ([`41ea06a`](https://github.com/python-zeroconf/python-zeroconf/commit/41ea06a0192c0d186e678009285759eb37d880d5)) +- Addresses incorrect after server name change + ([#1154](https://github.com/python-zeroconf/python-zeroconf/pull/1154), + [`41ea06a`](https://github.com/python-zeroconf/python-zeroconf/commit/41ea06a0192c0d186e678009285759eb37d880d5)) ## v0.53.0 (2023-04-02) ### Bug Fixes -* fix: make parsed_scoped_addresses return addresses in the same order as all other methods (#1150) ([`9b6adcf`](https://github.com/python-zeroconf/python-zeroconf/commit/9b6adcf5c04a469632ee866c32f5898c5cbf810a)) +- Make parsed_scoped_addresses return addresses in the same order as all other methods + ([#1150](https://github.com/python-zeroconf/python-zeroconf/pull/1150), + [`9b6adcf`](https://github.com/python-zeroconf/python-zeroconf/commit/9b6adcf5c04a469632ee866c32f5898c5cbf810a)) ### Features -* feat: improve ServiceBrowser performance by removing OrderedDict (#1148) ([`9a16be5`](https://github.com/python-zeroconf/python-zeroconf/commit/9a16be56a9f69a5d0f7cde13dc1337b6d93c1433)) +- Improve ServiceBrowser performance by removing OrderedDict + ([#1148](https://github.com/python-zeroconf/python-zeroconf/pull/1148), + [`9a16be5`](https://github.com/python-zeroconf/python-zeroconf/commit/9a16be56a9f69a5d0f7cde13dc1337b6d93c1433)) ## v0.52.0 (2023-04-02) ### Features -* feat: small cleanups to cache cleanup interval (#1146) ([`b434b60`](https://github.com/python-zeroconf/python-zeroconf/commit/b434b60f14ebe8f114b7b19bb4f54081c8ae0173)) +- Add ip_addresses_by_version to ServiceInfo + ([#1145](https://github.com/python-zeroconf/python-zeroconf/pull/1145), + [`524494e`](https://github.com/python-zeroconf/python-zeroconf/commit/524494edd49bd049726b19ae8ac8f6eea69a3943)) -* feat: add ip_addresses_by_version to ServiceInfo (#1145) ([`524494e`](https://github.com/python-zeroconf/python-zeroconf/commit/524494edd49bd049726b19ae8ac8f6eea69a3943)) +- Include tests and docs in sdist archives + ([#1142](https://github.com/python-zeroconf/python-zeroconf/pull/1142), + [`da10a3b`](https://github.com/python-zeroconf/python-zeroconf/commit/da10a3b2827cee0719d3bb9152ae897f061c6e2e)) -* feat: speed up processing records in the ServiceBrowser (#1143) ([`6a327d0`](https://github.com/python-zeroconf/python-zeroconf/commit/6a327d00ffb81de55b7c5b599893c789996680c1)) +feat: Include tests and docs in sdist archives -* feat: speed up matching types in the ServiceBrowser (#1144) ([`68871c3`](https://github.com/python-zeroconf/python-zeroconf/commit/68871c3b5569e41740a66b7d3d7fa5cc41514ea5)) +Include documentation and test files in source distributions, in order to make them more useful for + packagers (Linux distributions, Conda). Testing is an important part of packaging process, and at + least Gentoo users have requested offline documentation for Python packages. Furthermore, the + COPYING file was missing from sdist, even though it was referenced in README. -* feat: include tests and docs in sdist archives (#1142) +- Small cleanups to cache cleanup interval + ([#1146](https://github.com/python-zeroconf/python-zeroconf/pull/1146), + [`b434b60`](https://github.com/python-zeroconf/python-zeroconf/commit/b434b60f14ebe8f114b7b19bb4f54081c8ae0173)) -feat: Include tests and docs in sdist archives +- Speed up matching types in the ServiceBrowser + ([#1144](https://github.com/python-zeroconf/python-zeroconf/pull/1144), + [`68871c3`](https://github.com/python-zeroconf/python-zeroconf/commit/68871c3b5569e41740a66b7d3d7fa5cc41514ea5)) -Include documentation and test files in source distributions, in order -to make them more useful for packagers (Linux distributions, Conda). -Testing is an important part of packaging process, and at least Gentoo -users have requested offline documentation for Python packages. -Furthermore, the COPYING file was missing from sdist, even though it was -referenced in README. ([`da10a3b`](https://github.com/python-zeroconf/python-zeroconf/commit/da10a3b2827cee0719d3bb9152ae897f061c6e2e)) +- Speed up processing records in the ServiceBrowser + ([#1143](https://github.com/python-zeroconf/python-zeroconf/pull/1143), + [`6a327d0`](https://github.com/python-zeroconf/python-zeroconf/commit/6a327d00ffb81de55b7c5b599893c789996680c1)) ## v0.51.0 (2023-04-01) ### Features -* feat: improve performance of constructing ServiceInfo (#1141) ([`36d5b45`](https://github.com/python-zeroconf/python-zeroconf/commit/36d5b45a4ece1dca902e9c3c79b5a63b8d9ae41f)) +- Improve performance of constructing ServiceInfo + ([#1141](https://github.com/python-zeroconf/python-zeroconf/pull/1141), + [`36d5b45`](https://github.com/python-zeroconf/python-zeroconf/commit/36d5b45a4ece1dca902e9c3c79b5a63b8d9ae41f)) ## v0.50.0 (2023-04-01) ### Features -* feat: small speed up to handler dispatch (#1140) ([`5bd1b6e`](https://github.com/python-zeroconf/python-zeroconf/commit/5bd1b6e7b4dd796069461c737ded956305096307)) +- Small speed up to handler dispatch + ([#1140](https://github.com/python-zeroconf/python-zeroconf/pull/1140), + [`5bd1b6e`](https://github.com/python-zeroconf/python-zeroconf/commit/5bd1b6e7b4dd796069461c737ded956305096307)) ## v0.49.0 (2023-04-01) ### Features -* feat: speed up processing incoming records (#1139) ([`7246a34`](https://github.com/python-zeroconf/python-zeroconf/commit/7246a344b6c0543871b40715c95c9435db4c7f81)) +- Speed up processing incoming records + ([#1139](https://github.com/python-zeroconf/python-zeroconf/pull/1139), + [`7246a34`](https://github.com/python-zeroconf/python-zeroconf/commit/7246a344b6c0543871b40715c95c9435db4c7f81)) ## v0.48.0 (2023-04-01) ### Features -* feat: reduce overhead to send responses (#1135) ([`c4077dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c4077dde6dfde9e2598eb63daa03c36063a3e7b0)) +- Reduce overhead to send responses + ([#1135](https://github.com/python-zeroconf/python-zeroconf/pull/1135), + [`c4077dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c4077dde6dfde9e2598eb63daa03c36063a3e7b0)) ## v0.47.4 (2023-03-20) ### Bug Fixes -* fix: correct duplicate record entries in windows wheels by updating poetry-core (#1134) ([`a43055d`](https://github.com/python-zeroconf/python-zeroconf/commit/a43055d3fa258cd762c3e9394b01f8bdcb24f97e)) +- Correct duplicate record entries in windows wheels by updating poetry-core + ([#1134](https://github.com/python-zeroconf/python-zeroconf/pull/1134), + [`a43055d`](https://github.com/python-zeroconf/python-zeroconf/commit/a43055d3fa258cd762c3e9394b01f8bdcb24f97e)) ## v0.47.3 (2023-02-14) ### Bug Fixes -* fix: hold a strong reference to the query sender start task (#1128) ([`808c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/808c3b2194a7f499a469a9893102d328ccee83db)) +- Hold a strong reference to the query sender start task + ([#1128](https://github.com/python-zeroconf/python-zeroconf/pull/1128), + [`808c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/808c3b2194a7f499a469a9893102d328ccee83db)) ## v0.47.2 (2023-02-14) ### Bug Fixes -* fix: missing c extensions with newer poetry (#1129) ([`44d7fc6`](https://github.com/python-zeroconf/python-zeroconf/commit/44d7fc6483485102f60c91d591d0d697872f8865)) +- Missing c extensions with newer poetry + ([#1129](https://github.com/python-zeroconf/python-zeroconf/pull/1129), + [`44d7fc6`](https://github.com/python-zeroconf/python-zeroconf/commit/44d7fc6483485102f60c91d591d0d697872f8865)) ## v0.47.1 (2022-12-24) ### Bug Fixes -* fix: the equality checks for DNSPointer and DNSService should be case insensitive (#1122) ([`48ae77f`](https://github.com/python-zeroconf/python-zeroconf/commit/48ae77f026a96e2ca475b0ff80cb6d22207ce52f)) +- The equality checks for DNSPointer and DNSService should be case insensitive + ([#1122](https://github.com/python-zeroconf/python-zeroconf/pull/1122), + [`48ae77f`](https://github.com/python-zeroconf/python-zeroconf/commit/48ae77f026a96e2ca475b0ff80cb6d22207ce52f)) ## v0.47.0 (2022-12-22) ### Features -* feat: optimize equality checks for DNS records (#1120) ([`3a25ff7`](https://github.com/python-zeroconf/python-zeroconf/commit/3a25ff74bea83cd7d50888ce1ebfd7650d704bfa)) +- Optimize equality checks for DNS records + ([#1120](https://github.com/python-zeroconf/python-zeroconf/pull/1120), + [`3a25ff7`](https://github.com/python-zeroconf/python-zeroconf/commit/3a25ff74bea83cd7d50888ce1ebfd7650d704bfa)) ## v0.46.0 (2022-12-21) ### Features -* feat: optimize the dns cache (#1119) ([`e80fcef`](https://github.com/python-zeroconf/python-zeroconf/commit/e80fcef967024f8e846e44b464a82a25f5550edf)) +- Optimize the dns cache ([#1119](https://github.com/python-zeroconf/python-zeroconf/pull/1119), + [`e80fcef`](https://github.com/python-zeroconf/python-zeroconf/commit/e80fcef967024f8e846e44b464a82a25f5550edf)) ## v0.45.0 (2022-12-20) ### Features -* feat: optimize construction of outgoing packets (#1118) ([`81e186d`](https://github.com/python-zeroconf/python-zeroconf/commit/81e186d365c018381f9b486a4dbe4e2e4b8bacbf)) +- Optimize construction of outgoing packets + ([#1118](https://github.com/python-zeroconf/python-zeroconf/pull/1118), + [`81e186d`](https://github.com/python-zeroconf/python-zeroconf/commit/81e186d365c018381f9b486a4dbe4e2e4b8bacbf)) ## v0.44.0 (2022-12-18) ### Features -* feat: optimize dns objects by adding pxd files (#1113) ([`919d4d8`](https://github.com/python-zeroconf/python-zeroconf/commit/919d4d875747b4fa68e25bccd5aae7f304d8a36d)) +- Optimize dns objects by adding pxd files + ([#1113](https://github.com/python-zeroconf/python-zeroconf/pull/1113), + [`919d4d8`](https://github.com/python-zeroconf/python-zeroconf/commit/919d4d875747b4fa68e25bccd5aae7f304d8a36d)) ## v0.43.0 (2022-12-18) ### Features -* feat: optimize incoming parser by reducing call stack (#1116) ([`11f3f0e`](https://github.com/python-zeroconf/python-zeroconf/commit/11f3f0e699e00c1ee3d6d8ab5e30f62525510589)) +- Optimize incoming parser by reducing call stack + ([#1116](https://github.com/python-zeroconf/python-zeroconf/pull/1116), + [`11f3f0e`](https://github.com/python-zeroconf/python-zeroconf/commit/11f3f0e699e00c1ee3d6d8ab5e30f62525510589)) ## v0.42.0 (2022-12-18) ### Features -* feat: optimize incoming parser by using unpack_from (#1115) ([`a7d50ba`](https://github.com/python-zeroconf/python-zeroconf/commit/a7d50baab362eadd2d292df08a39de6836b41ea7)) +- Optimize incoming parser by using unpack_from + ([#1115](https://github.com/python-zeroconf/python-zeroconf/pull/1115), + [`a7d50ba`](https://github.com/python-zeroconf/python-zeroconf/commit/a7d50baab362eadd2d292df08a39de6836b41ea7)) ## v0.41.0 (2022-12-18) ### Features -* feat: optimize incoming parser by adding pxd files (#1111) ([`26efeb0`](https://github.com/python-zeroconf/python-zeroconf/commit/26efeb09783050266242542228f34eb4dd83e30c)) +- Optimize incoming parser by adding pxd files + ([#1111](https://github.com/python-zeroconf/python-zeroconf/pull/1111), + [`26efeb0`](https://github.com/python-zeroconf/python-zeroconf/commit/26efeb09783050266242542228f34eb4dd83e30c)) ## v0.40.1 (2022-12-18) ### Bug Fixes -* fix: fix project name in pyproject.toml (#1112) ([`a330f62`](https://github.com/python-zeroconf/python-zeroconf/commit/a330f62040475257c4a983044e1675aeb95e030a)) +- Fix project name in pyproject.toml + ([#1112](https://github.com/python-zeroconf/python-zeroconf/pull/1112), + [`a330f62`](https://github.com/python-zeroconf/python-zeroconf/commit/a330f62040475257c4a983044e1675aeb95e030a)) ## v0.40.0 (2022-12-17) ### Features -* feat: drop async_timeout requirement for python 3.11+ (#1107) ([`1f4224e`](https://github.com/python-zeroconf/python-zeroconf/commit/1f4224ef122299235013cb81b501f8ff9a30dea1)) +- Drop async_timeout requirement for python 3.11+ + ([#1107](https://github.com/python-zeroconf/python-zeroconf/pull/1107), + [`1f4224e`](https://github.com/python-zeroconf/python-zeroconf/commit/1f4224ef122299235013cb81b501f8ff9a30dea1)) ## v0.39.5 (2022-12-17) -### Unknown - -* 0.39.5 ([`2be6fbf`](https://github.com/python-zeroconf/python-zeroconf/commit/2be6fbfe3d10b185096814d2d0de322733d273cf)) - ## v0.39.4 (2022-10-31) -### Unknown - -* Bump version: 0.39.3 → 0.39.4 ([`e620f2a`](https://github.com/python-zeroconf/python-zeroconf/commit/e620f2a1d4f381feb99b639c6ab17845396ba7ea)) - -* Update changelog for 0.39.4 (#1103) ([`03821b6`](https://github.com/python-zeroconf/python-zeroconf/commit/03821b6f4d9fdc40d94d1070f69553649d18909b)) - -* Fix IP changes being missed by ServiceInfo (#1102) ([`524ae89`](https://github.com/python-zeroconf/python-zeroconf/commit/524ae89966d9300e78642a91434ad55643277a48)) - ## v0.39.3 (2022-10-26) -### Unknown - -* Bump version: 0.39.2 → 0.39.3 ([`aee3165`](https://github.com/python-zeroconf/python-zeroconf/commit/aee316539b0778eaf2b8878f78d9ead373760cfb)) - -* Update changelog for 0.39.3 (#1101) ([`39c9842`](https://github.com/python-zeroconf/python-zeroconf/commit/39c9842b80ac7d978e8c7ffef0ad836b3b4700f6)) - -* Fix port changes not being seen by ServiceInfo (#1100) ([`c96f5f6`](https://github.com/python-zeroconf/python-zeroconf/commit/c96f5f69d8e68672bb6760b1e40a0de51b62efd6)) - -* Update CI to use released python 3.11 (#1099) ([`6976980`](https://github.com/python-zeroconf/python-zeroconf/commit/6976980b4874dd65ee533d43be57694bb3b7d0fc)) - ## v0.39.2 (2022-10-20) -### Unknown - -* Bump version: 0.39.1 → 0.39.2 ([`785e475`](https://github.com/python-zeroconf/python-zeroconf/commit/785e475467225ddc4930d5302f130781223fd298)) - -* Update changelog for 0.39.2 (#1098) ([`b197344`](https://github.com/python-zeroconf/python-zeroconf/commit/b19734484b4c5eebb86fe6897a26ad082b07bed5)) - -* Improve cache of decode labels at offset (#1097) ([`d3c475f`](https://github.com/python-zeroconf/python-zeroconf/commit/d3c475f3e2590ae5a3056d85c29a66dc71ae3bdf)) - -* Only reprocess address records if the server changes (#1095) ([`0989336`](https://github.com/python-zeroconf/python-zeroconf/commit/0989336d79bc4dd0ef3b26e8d0f9529fca81c1fb)) - -* Prepare for python 3.11 support by adding rc2 to the CI (#1085) ([`7430ce1`](https://github.com/python-zeroconf/python-zeroconf/commit/7430ce1c462be0dd210712b4f7b3675efd3a6963)) - ## v0.39.1 (2022-09-05) -### Unknown - -* Bump version: 0.39.0 → 0.39.1 ([`6f90896`](https://github.com/python-zeroconf/python-zeroconf/commit/6f90896a590d6d60db75688a1ba753c333c8faab)) - -* Update changelog for 0.39.1 (#1091) ([`cad3963`](https://github.com/python-zeroconf/python-zeroconf/commit/cad3963e566a7bb2dd188088c11e7a0abb6b3924)) - -* Replace pack with to_bytes (#1090) ([`5968b76`](https://github.com/python-zeroconf/python-zeroconf/commit/5968b76ac2ffe6e41b8961c59bdcc5a48ba410eb)) - ## v0.39.0 (2022-08-05) -### Unknown - -* Bump version: 0.38.7 → 0.39.0 ([`60167b0`](https://github.com/python-zeroconf/python-zeroconf/commit/60167b05227ec33668aac5b960a8bc5ba5b833de)) - -* 0.39.0 changelog (#1087) ([`946890a`](https://github.com/python-zeroconf/python-zeroconf/commit/946890aca540bbae95abe8a6ffe66db56fa9e986)) - -* Remove coveralls from dev requirements (#1086) ([`087914d`](https://github.com/python-zeroconf/python-zeroconf/commit/087914da2e914275dd0fff1e4466b3c51ae0c6d3)) - -* Fix run_coro_with_timeout test not running in the CI (#1082) ([`b7a24fe`](https://github.com/python-zeroconf/python-zeroconf/commit/b7a24fef05fc6c166b25cfd4235e59c5cbb96a4c)) - -* Fix flakey service_browser_expire_callbacks test (#1084) ([`d5032b7`](https://github.com/python-zeroconf/python-zeroconf/commit/d5032b70b6ebc5c221a43f778f4d897a1d891f91)) - -* Fix flakey test_sending_unicast on windows (#1083) ([`389658d`](https://github.com/python-zeroconf/python-zeroconf/commit/389658d998a23deecd96023794d3672e51189a35)) - -* Replace wait_event_or_timeout internals with async_timeout (#1081) - -Its unlikely that https://bugs.python.org/issue39032 and -https://github.com/python/cpython/issues/83213 will be fixed -soon. While we moved away from an asyncio.Condition, we still -has a similar problem with waiting for an asyncio.Event which -wait_event_or_timeout played well with. async_timeout avoids -creating a task so its a bit more efficient. Since we call -these when resolving ServiceInfo, avoiding task creation -will resolve a performance problem when ServiceBrowsers -startup as they tend to create task storms when coupled -with ServiceInfo lookups. ([`7ffea9f`](https://github.com/python-zeroconf/python-zeroconf/commit/7ffea9f93e758f75a0eeb9997ff8d9c9d47ec31a)) - -* Update stale docstrings in AsyncZeroconf (#1079) ([`88323d0`](https://github.com/python-zeroconf/python-zeroconf/commit/88323d0c7866f78edde063080c63a72c6e875772)) - ## v0.38.7 (2022-06-14) -### Unknown - -* Bump version: 0.38.6 → 0.38.7 ([`f3a9f80`](https://github.com/python-zeroconf/python-zeroconf/commit/f3a9f804914fec37e961f80f347c4e706c4bae33)) - -* Update changelog for 0.38.7 (#1078) ([`5f7ba0d`](https://github.com/python-zeroconf/python-zeroconf/commit/5f7ba0d7dc9a5a6b2cf3a321b7b2f448d4332de9)) - -* Speed up unpacking incoming packet data (#1076) ([`533ad10`](https://github.com/python-zeroconf/python-zeroconf/commit/533ad10121739997a4925d90792cbe9e00a5ac4f)) - ## v0.38.6 (2022-05-06) -### Unknown - -* Bump version: 0.38.5 → 0.38.6 ([`1aa7842`](https://github.com/python-zeroconf/python-zeroconf/commit/1aa7842ae0f914c10465ae977551698046406d55)) - -* Update changelog for 0.38.6 (#1073) ([`dfd3222`](https://github.com/python-zeroconf/python-zeroconf/commit/dfd3222405f0123a849d376d8be466be46bdb557)) - -* Always return `started` as False once Zeroconf has been marked as done (#1072) ([`ed02e5d`](https://github.com/python-zeroconf/python-zeroconf/commit/ed02e5d92768d1fc41163f59e303a76843bfd9fd)) - -* Avoid waking up ServiceInfo listeners when there is no new data (#1068) ([`59624a6`](https://github.com/python-zeroconf/python-zeroconf/commit/59624a6cfb1839b2654a6021a7317a1bdad179e9)) - -* Remove left-in debug print (#1071) ([`5fb0954`](https://github.com/python-zeroconf/python-zeroconf/commit/5fb0954cf2c6040704c3db1d2b0fece389425e5b)) - -* Use unique name in test_service_browser_expire_callbacks test (#1069) ([`89c9022`](https://github.com/python-zeroconf/python-zeroconf/commit/89c9022f87d3a83cc586b153fb7d5ea3af69ae3b)) - -* Fix CI failures (#1070) ([`f9b2816`](https://github.com/python-zeroconf/python-zeroconf/commit/f9b2816e15b0459f8051079f77b70e983769cd44)) - ## v0.38.5 (2022-05-01) -### Unknown - -* Bump version: 0.38.4 → 0.38.5 ([`3c55388`](https://github.com/python-zeroconf/python-zeroconf/commit/3c5538899b8974e99c9a279ce3ac46971ab5d91c)) - -* Update changelog for 0.38.5 (#1066) ([`ae3635b`](https://github.com/python-zeroconf/python-zeroconf/commit/ae3635b9ee73edeaabe2cbc027b8fb8bd7cd97da)) - -* Fix ServiceBrowsers not getting `ServiceStateChange.Removed` callbacks on PTR record expire (#1064) ([`10ee205`](https://github.com/python-zeroconf/python-zeroconf/commit/10ee2053a80f7c7221b4fa1475d66b01abd21b11)) - -* Fix ci trying to run mypy on pypy (#1065) ([`31662b7`](https://github.com/python-zeroconf/python-zeroconf/commit/31662b7a0bba65bea1fbfc09c70cd2970160c5c6)) - -* Force minimum version of 3.7 and update example (#1060) - -Co-authored-by: J. Nick Koston ([`6e842f2`](https://github.com/python-zeroconf/python-zeroconf/commit/6e842f238b3e1f3b738ed058e0fa4068115f041b)) - -* Fix mypy error in zeroconf._service.info (#1062) ([`e9d25f7`](https://github.com/python-zeroconf/python-zeroconf/commit/e9d25f7749778979b7449464153163587583bf8d)) - -* Refactor to fix mypy error (#1061) ([`6c451f6`](https://github.com/python-zeroconf/python-zeroconf/commit/6c451f64e7cbeaa0bb77f66790936afda2d058ef)) - ## v0.38.4 (2022-02-28) -### Unknown - -* Bump version: 0.38.3 → 0.38.4 ([`5c40e89`](https://github.com/python-zeroconf/python-zeroconf/commit/5c40e89420255b5b978bff4682b21f0820fb4682)) - -* Update changelog for 0.38.4 (#1058) ([`3736348`](https://github.com/python-zeroconf/python-zeroconf/commit/3736348da30ee4b7c50713936f2ae919e5446ffa)) - -* Fix IP Address updates when hostname is uppercase (#1057) ([`79d067b`](https://github.com/python-zeroconf/python-zeroconf/commit/79d067b88f9108259a44f33801e26bd3a25ca759)) - ## v0.38.3 (2022-01-31) -### Unknown - -* Bump version: 0.38.2 → 0.38.3 ([`e42549c`](https://github.com/python-zeroconf/python-zeroconf/commit/e42549cb70796d0577c97be96a09bca0056a5755)) - -* Update changelog for 0.38.2/3 (#1053) ([`d99c7ff`](https://github.com/python-zeroconf/python-zeroconf/commit/d99c7ffea37fd27c315115133dab08445aa417d1)) - ## v0.38.2 (2022-01-31) -### Unknown - -* Bump version: 0.38.1 → 0.38.2 ([`50cd12d`](https://github.com/python-zeroconf/python-zeroconf/commit/50cd12d8c2ced166da8f4852120ba8a28b13cba0)) - -* Make decode errors more helpful in finding the source of the bad data (#1052) ([`25e6123`](https://github.com/python-zeroconf/python-zeroconf/commit/25e6123a07a9560e978a04d5e285bfa74ee41e64)) - ## v0.38.1 (2021-12-23) -### Unknown - -* Bump version: 0.38.0 → 0.38.1 ([`6a11f24`](https://github.com/python-zeroconf/python-zeroconf/commit/6a11f24e1fc9d73f0dbb62efd834f17a9bd451c4)) - -* Update changelog for 0.38.1 (#1045) ([`670d4ac`](https://github.com/python-zeroconf/python-zeroconf/commit/670d4ac3be7e32d02afe85b72264a241b5a25ba8)) - -* Avoid linear type searches in ServiceBrowsers (#1044) ([`ff76634`](https://github.com/python-zeroconf/python-zeroconf/commit/ff766345461a82547abe462b5d690621c755d480)) - -* Improve performance of query scheduler (#1043) ([`27e50ff`](https://github.com/python-zeroconf/python-zeroconf/commit/27e50ff95625d128f71864138b8e5d871503adf0)) - ## v0.38.0 (2021-12-23) -### Unknown - -* Bump version: 0.37.0 → 0.38.0 ([`95ee5dc`](https://github.com/python-zeroconf/python-zeroconf/commit/95ee5dc031c9c512f99536186d1d89a99e4af37f)) - -* Update changelog for 0.38.0 (#1042) ([`de14202`](https://github.com/python-zeroconf/python-zeroconf/commit/de1420213cd7e3bd8f57e727ff1031c7b10cf7a0)) - -* Handle Service types that end with another service type (#1041) - -Co-authored-by: J. Nick Koston ([`a4d619a`](https://github.com/python-zeroconf/python-zeroconf/commit/a4d619a9f094682d9dcfc7f8fa293f17bcae88f2)) - -* Add tests for instance names containing dot(s) (#1039) - -Co-authored-by: J. Nick Koston ([`22ed08c`](https://github.com/python-zeroconf/python-zeroconf/commit/22ed08c7e5403a788b1c177a1bb9558419bce2b1)) - -* Drop python 3.6 support (#1009) ([`631a6f7`](https://github.com/python-zeroconf/python-zeroconf/commit/631a6f7c7863897336a9d6ca4bd1736cc7cc97af)) - ## v0.37.0 (2021-11-18) -### Unknown - -* Bump version: 0.36.13 → 0.37.0 ([`2996e64`](https://github.com/python-zeroconf/python-zeroconf/commit/2996e642f6b1abba1dbb8242ccca4cd4b96696f6)) - -* Update changelog for 0.37.0 (#1035) ([`61a7e3f`](https://github.com/python-zeroconf/python-zeroconf/commit/61a7e3fb65d99db7d51f1df42b286b55710a2e99)) - -* Log an error when listeners are added that do not inherit from RecordUpdateListener (#1034) ([`ee071a1`](https://github.com/python-zeroconf/python-zeroconf/commit/ee071a12f31f7010110eef5ccef80c6cdf469d87)) - -* Throw NotRunningException when Zeroconf is not running (#1033) - -- Before this change the consumer would get a timeout or an EventLoopBlocked - exception when calling `ServiceInfo.*request` when the instance had already been shutdown. - This was quite a confusing result. ([`28938d2`](https://github.com/python-zeroconf/python-zeroconf/commit/28938d20bb62ae0d9aa2f94929f60434fb346704)) - -* Throw EventLoopBlocked instead of concurrent.futures.TimeoutError (#1032) ([`21bd107`](https://github.com/python-zeroconf/python-zeroconf/commit/21bd10762a89ca3f4ca89f598c9d93684a02f51b)) - ## v0.36.13 (2021-11-13) -### Unknown - -* Bump version: 0.36.12 → 0.36.13 ([`4241c76`](https://github.com/python-zeroconf/python-zeroconf/commit/4241c76550130469aecbe88cc1a7cdc13505f8ba)) - -* Update changelog for 0.36.13 (#1030) ([`106cf27`](https://github.com/python-zeroconf/python-zeroconf/commit/106cf27478bb0c1e6e5a7194661ff52947d61c96)) - -* Downgrade incoming corrupt packet logging to debug (#1029) - -- Warning about network traffic we have no control over - is confusing to users as they think there is - something wrong with zeroconf ([`73c52d0`](https://github.com/python-zeroconf/python-zeroconf/commit/73c52d04a140bc744669777a0f353eefc6623ff9)) - -* Skip unavailable interfaces during socket bind (#1028) - -- We already skip these when adding multicast members. - Apply the same logic to the socket bind call ([`aa59998`](https://github.com/python-zeroconf/python-zeroconf/commit/aa59998182ce29c55f8c3dde9a058ce36ac2bb2d)) - ## v0.36.12 (2021-11-05) -### Unknown - -* Bump version: 0.36.11 → 0.36.12 ([`8b0dc48`](https://github.com/python-zeroconf/python-zeroconf/commit/8b0dc48ed42d8edc78750122eb5685a50c3cdc11)) - -* Update changelog for 0.36.12 (#1027) ([`51bf364`](https://github.com/python-zeroconf/python-zeroconf/commit/51bf364b364ecaad16503df4a4c4c3bb5ead2775)) - -* Account for intricacies of floating-point arithmetic in service browser tests (#1026) ([`3c70808`](https://github.com/python-zeroconf/python-zeroconf/commit/3c708080b3e42a02930ad17c96a2cf0dcb06f441)) - -* Prevent service lookups from deadlocking if time abruptly moves backwards (#1006) - -- The typical reason time moves backwards is via an ntp update ([`38380a5`](https://github.com/python-zeroconf/python-zeroconf/commit/38380a58a64f563f105cecc610f194c20056b2b6)) - ## v0.36.11 (2021-10-30) -### Unknown - -* Bump version: 0.36.10 → 0.36.11 ([`3d8f50d`](https://github.com/python-zeroconf/python-zeroconf/commit/3d8f50de74f7b3941d9b35b6ae6e42ba02be9361)) - -* Update changelog for 0.36.11 (#1024) ([`69a9b8e`](https://github.com/python-zeroconf/python-zeroconf/commit/69a9b8e060ae8a596050d393c0a5c8b43beadc8e)) - -* Add readme check to the CI (#1023) ([`c966976`](https://github.com/python-zeroconf/python-zeroconf/commit/c966976531ac9222460763d647d0a3b75459e275)) - ## v0.36.10 (2021-10-30) -### Unknown - -* Bump version: 0.36.9 → 0.36.10 ([`e0b340a`](https://github.com/python-zeroconf/python-zeroconf/commit/e0b340afbfd25ae9d05a59a577938b062287c8b6)) - -* Update changelog for 0.36.10 (#1021) ([`69ce817`](https://github.com/python-zeroconf/python-zeroconf/commit/69ce817a68d65f2db0bfe6d4790d3a6a356ac83f)) - -* Fix test failure when has_working_ipv6 generates an exception (#1022) ([`cd8984d`](https://github.com/python-zeroconf/python-zeroconf/commit/cd8984d3e95bffe6fd32b97eae9844bf5afed4de)) - -* Strip scope_id from IPv6 address if given. (#1020) ([`686febd`](https://github.com/python-zeroconf/python-zeroconf/commit/686febdd181c837fa6a41afce91edeeded731fbe)) - -* Optimize decoding labels from incoming packets (#1019) - -- decode is a bit faster vs str() - -``` ->>> ts = Timer("s.decode('utf-8', 'replace')", "s = b'TV Beneden (2)\x10_androidtvremote\x04_tcp\x05local'") ->>> ts.timeit() -0.09910525000003645 ->>> ts = Timer("str(s, 'utf-8', 'replace')", "s = b'TV Beneden (2)\x10_androidtvremote\x04_tcp\x05local'") ->>> ts.timeit() -0.1304596250000145 -``` ([`4b9a6c3`](https://github.com/python-zeroconf/python-zeroconf/commit/4b9a6c3fd4aec920597e7e63e82e935df68804f4)) - -* Fix typo in changelog (#1017) ([`0fdcd51`](https://github.com/python-zeroconf/python-zeroconf/commit/0fdcd5146264b37daa7cc35bda883519175e362f)) - ## v0.36.9 (2021-10-22) -### Unknown - -* Bump version: 0.36.8 → 0.36.9 ([`d92d3d0`](https://github.com/python-zeroconf/python-zeroconf/commit/d92d3d030558c1b81b2e35f701b585f4b48fa99a)) - -* Update changelog for 0.36.9 (#1016) ([`1427ba7`](https://github.com/python-zeroconf/python-zeroconf/commit/1427ba75a8f7e2962aa0b3105d3c856002134790)) - -* Ensure ServiceInfo orders newest addresess first (#1012) ([`87a4d8f`](https://github.com/python-zeroconf/python-zeroconf/commit/87a4d8f4d5c8365425c2ee969032205f916f80c1)) - ## v0.36.8 (2021-10-10) -### Unknown - -* Bump version: 0.36.7 → 0.36.8 ([`61275ef`](https://github.com/python-zeroconf/python-zeroconf/commit/61275efd05688a61d656b43125b01a5d588f1dba)) - -* Update changelog for 0.36.8 (#1010) ([`1551618`](https://github.com/python-zeroconf/python-zeroconf/commit/15516188f346c70f64a923bb587804b9bf948873)) - -* Fix ServiceBrowser infinite looping when zeroconf is closed before its canceled (#1008) ([`b0e8c8a`](https://github.com/python-zeroconf/python-zeroconf/commit/b0e8c8a21fd721e60adbac4dbf7a03959fc3f641)) - -* Update CI to use python 3.10, pypy 3.7 (#1007) ([`fec9f3d`](https://github.com/python-zeroconf/python-zeroconf/commit/fec9f3dc9626be08eccdf1263dbf4d1686fd27b2)) - -* Cleanup typing in zeroconf._protocol.outgoing (#1000) ([`543558d`](https://github.com/python-zeroconf/python-zeroconf/commit/543558d0498ed03eb9dc4597c4c40484e16ee4e6)) - -* Breakout functions with no self-use in zeroconf._handlers (#1003) ([`af4d082`](https://github.com/python-zeroconf/python-zeroconf/commit/af4d082240a545ba3014eb7f1056c3b32ce2cb70)) - -* Use more f-strings in zeroconf._dns (#1002) ([`d3ed691`](https://github.com/python-zeroconf/python-zeroconf/commit/d3ed69107330f1a29f45d174caafdec1e894f666)) - -* Remove unused code in zeroconf._core (#1001) - -- Breakout functions without self-use ([`8e45ea9`](https://github.com/python-zeroconf/python-zeroconf/commit/8e45ea943be6490b2217f0eb01501e12a5221c16)) - ## v0.36.7 (2021-09-22) -### Unknown - -* Bump version: 0.36.6 → 0.36.7 ([`f44b40e`](https://github.com/python-zeroconf/python-zeroconf/commit/f44b40e26ea8872151ea9ee4762b95ca25790089)) - -* Update changelog for 0.36.7 (#999) ([`d2853c3`](https://github.com/python-zeroconf/python-zeroconf/commit/d2853c31db9ece28fb258c4146ba61cf0e6a6592)) - -* Improve log message when receiving an invalid or corrupt packet (#998) ([`b637846`](https://github.com/python-zeroconf/python-zeroconf/commit/b637846e7df3292d6dcdd38a8eb77b6fa3287c51)) - -* Reduce logging overhead (#994) ([`7df7e4a`](https://github.com/python-zeroconf/python-zeroconf/commit/7df7e4a68e33c3e3a5bddf0168e248a4542a788f)) - -* Reduce overhead to compare dns records (#997) ([`7fa51de`](https://github.com/python-zeroconf/python-zeroconf/commit/7fa51de5b71d03470643a83004b9f6f8d4017214)) - -* Refactor service registry to avoid use of getattr (#996) ([`7622365`](https://github.com/python-zeroconf/python-zeroconf/commit/762236547d4838f2b6a94cfa20221dfdd03e9b94)) - -* Flush CI cache (#995) ([`93ddf7c`](https://github.com/python-zeroconf/python-zeroconf/commit/93ddf7cf9b47d7ff1e341b6c2875254b6f00eef1)) - ## v0.36.6 (2021-09-19) -### Unknown - -* Bump version: 0.36.5 → 0.36.6 ([`0327a06`](https://github.com/python-zeroconf/python-zeroconf/commit/0327a068250c85f3ff84d3f0b809b51f83321c47)) - -* Fix tense of 0.36.6 changelog (#992) ([`29f995f`](https://github.com/python-zeroconf/python-zeroconf/commit/29f995fd3c09604f37980e74f2785b1a451da089)) - -* Update changelog for 0.36.6 (#991) ([`92f5f4a`](https://github.com/python-zeroconf/python-zeroconf/commit/92f5f4a80b8a8e50df5ca06e3cc45480dc39b504)) - -* Simplify the can_send_to check (#990) ([`1887c55`](https://github.com/python-zeroconf/python-zeroconf/commit/1887c554b3f9d0b90a1c01798d7f06a7e4de6900)) - ## v0.36.5 (2021-09-18) -### Unknown - -* Bump version: 0.36.4 → 0.36.5 ([`34f4a26`](https://github.com/python-zeroconf/python-zeroconf/commit/34f4a26c9254d6002bdccb1a003d9822a8798c04)) - -* Update changelog for 0.36.5 (#989) ([`aebabe9`](https://github.com/python-zeroconf/python-zeroconf/commit/aebabe95c59e34f703307340e087b3eab5339a06)) - -* Seperate zeroconf._protocol into an incoming and outgoing modules (#988) ([`87b6a32`](https://github.com/python-zeroconf/python-zeroconf/commit/87b6a32fb77d9bdcea9d2d7ffba189abc5371b50)) - -* Reduce dns protocol attributes and add slots (#987) ([`f4665fc`](https://github.com/python-zeroconf/python-zeroconf/commit/f4665fc67cd762c4ab66271a550d75640d3bffca)) - -* Fix typo in changelog (#986) ([`4398538`](https://github.com/python-zeroconf/python-zeroconf/commit/43985380b9e995d9790d71486aed258326ad86e4)) - ## v0.36.4 (2021-09-16) -### Unknown - -* Bump version: 0.36.3 → 0.36.4 ([`a23f6d2`](https://github.com/python-zeroconf/python-zeroconf/commit/a23f6d2cc40ea696410c3c31b73760065c36f0bf)) - -* Update changelog for 0.36.4 (#985) ([`f4d4164`](https://github.com/python-zeroconf/python-zeroconf/commit/f4d4164989931adbac0e5907b7bf276da1d0d7d7)) - -* Defer decoding known answers until needed (#983) ([`88b9875`](https://github.com/python-zeroconf/python-zeroconf/commit/88b987551cb98757c2df2540ba390f320d46fa7b)) - -* Collapse _GLOBAL_DONE into done (#984) ([`05c4329`](https://github.com/python-zeroconf/python-zeroconf/commit/05c4329d7647c381783ead086c2ed4f3b6b44262)) - -* Remove flake8 requirement restriction as its no longer needed (#981) ([`bc64d63`](https://github.com/python-zeroconf/python-zeroconf/commit/bc64d63ef73e643e71634957fd79e6f6597373d4)) - -* Reduce duplicate code to write records (#979) ([`acf6457`](https://github.com/python-zeroconf/python-zeroconf/commit/acf6457b3c6742c92e9112b0a39a387b33cea4db)) - -* Force CI cache clear (#982) ([`d9ea918`](https://github.com/python-zeroconf/python-zeroconf/commit/d9ea9189def07531d126e01c7397b2596d9a8695)) - -* Reduce name compression overhead and complexity (#978) ([`f1d6fc3`](https://github.com/python-zeroconf/python-zeroconf/commit/f1d6fc3f60e685ff63b1a1cb820cfc3ca5268fcb)) - ## v0.36.3 (2021-09-14) -### Unknown - -* Bump version: 0.36.2 → 0.36.3 ([`769b397`](https://github.com/python-zeroconf/python-zeroconf/commit/769b3973835ebc6f5a34e236a01cb2cd935e81de)) - -* Update changelog for 0.36.3 (#977) ([`84f16bf`](https://github.com/python-zeroconf/python-zeroconf/commit/84f16bff6df41f1907e060e7bd4ce24d173d51c4)) - -* Reduce DNSIncoming parsing overhead (#975) - -- Parsing incoming packets is the most expensive operation - zeroconf performs on networks with high mDNS volume ([`78f9cd5`](https://github.com/python-zeroconf/python-zeroconf/commit/78f9cd5123d0e3c582aba05bd61388419d4dc01e)) - ## v0.36.2 (2021-08-30) -### Unknown - -* Bump version: 0.36.1 → 0.36.2 ([`5f52438`](https://github.com/python-zeroconf/python-zeroconf/commit/5f52438f4c0851bb1a3b78575c0c28e0b6ce561d)) - -* Update changelog for 0.36.2 (#973) ([`b4efa33`](https://github.com/python-zeroconf/python-zeroconf/commit/b4efa33b4ef6d5292d8d477da4258d99d22c4e84)) - -* Include NSEC records for non-existant types when responding with addresses (#972) - -Implements datatracker.ietf.org/doc/html/rfc6762#section-6.2 ([`7a20fd3`](https://github.com/python-zeroconf/python-zeroconf/commit/7a20fd3bc8dc0a703619ca9413faf674b3d7a111)) - -* Add support for writing NSEC records (#971) ([`768a23c`](https://github.com/python-zeroconf/python-zeroconf/commit/768a23c656e3f091ecbecbb6b380b5becbbf9674)) - ## v0.36.1 (2021-08-29) -### Unknown - -* Bump version: 0.36.0 → 0.36.1 ([`e8d8401`](https://github.com/python-zeroconf/python-zeroconf/commit/e8d84017b750ab5f159abc7225f9922d84a8f9fd)) - -* Update changelog for 0.36.1 (#970) ([`d504333`](https://github.com/python-zeroconf/python-zeroconf/commit/d5043337de39a11b2b241e9247a34c41c0c7c2bc)) - -* Skip goodbye packets for addresses when there is another service registered with the same name (#968) ([`d9d3208`](https://github.com/python-zeroconf/python-zeroconf/commit/d9d3208eed84b71b61c458f2992b08b5db259da1)) - -* Fix equality and hash for dns records with the unique bit (#969) ([`574e241`](https://github.com/python-zeroconf/python-zeroconf/commit/574e24125a536dc4fb9a1784797efd495ceb1fdf)) - ## v0.36.0 (2021-08-16) -### Unknown - -* Bump version: 0.35.1 → 0.36.0 ([`e4985c7`](https://github.com/python-zeroconf/python-zeroconf/commit/e4985c7dd2088d4da9fc2be25f67beb65f548e95)) - -* Update changelog for 0.36.0 (#966) ([`bc50bce`](https://github.com/python-zeroconf/python-zeroconf/commit/bc50bce04b650756fef3f8b1cce6defbc5dccee5)) - -* Create full IPv6 address tuple to enable service discovery on Windows (#965) ([`733eb3a`](https://github.com/python-zeroconf/python-zeroconf/commit/733eb3a31ed40c976f5fa4b7b3baf055589ef36b)) - ## v0.35.1 (2021-08-15) -### Unknown - -* Bump version: 0.35.0 → 0.35.1 ([`4281221`](https://github.com/python-zeroconf/python-zeroconf/commit/4281221b668123b770c6d6b0835dd876d1d2f22d)) - -* Fix formatting in 0.35.1 changelog entry (#964) ([`c7c7d47`](https://github.com/python-zeroconf/python-zeroconf/commit/c7c7d4778e9962af5180616af73977d8503e4762)) - -* Update changelog for 0.35.1 (#963) ([`f7bebfe`](https://github.com/python-zeroconf/python-zeroconf/commit/f7bebfe09aeb9bb973dbe6ba147b682472b64246)) - -* Cache DNS record and question hashes (#960) ([`d4c109c`](https://github.com/python-zeroconf/python-zeroconf/commit/d4c109c3abffcba2331a7f9e7bf45c6477a8d4e8)) - -* Fix flakey test: test_future_answers_are_removed_on_send (#962) ([`3b482e2`](https://github.com/python-zeroconf/python-zeroconf/commit/3b482e229d37b85e59765e023ddbca77aa513731)) - -* Add coverage for sending answers removes future queued answers (#961) - -- If we send an answer that is queued to be sent out in the future - we should remove it from the queue as the question has already - been answered and we do not want to generate additional traffic. ([`2d1b832`](https://github.com/python-zeroconf/python-zeroconf/commit/2d1b8329ad39b94f9f4aa5f53caf3bb2813879ca)) - -* Only reschedule types if the send next time changes (#958) -- When the PTR response was seen again, the timer was being canceled and - rescheduled even if the timer was for the same time. While this did - not cause any breakage, it is quite inefficient. ([`7b125a1`](https://github.com/python-zeroconf/python-zeroconf/commit/7b125a1a0a109ef29d0a4e736a27645a7e9b4207)) +## v0.35.0 (2021-08-13) -## v0.35.0 (2021-08-13) +## v0.34.3 (2021-08-09) -### Unknown -* Bump version: 0.34.3 → 0.35.0 ([`1e60e13`](https://github.com/python-zeroconf/python-zeroconf/commit/1e60e13ae15a5b533a48cc955b98951eedd04dbb)) +## v0.34.2 (2021-08-09) -* Update changelog for 0.35.0 (#957) ([`dd40437`](https://github.com/python-zeroconf/python-zeroconf/commit/dd40437f4328f4ee36c43239ecf5f484b6ac261e)) -* Reduce chance of accidental synchronization of ServiceInfo requests (#955) ([`c772936`](https://github.com/python-zeroconf/python-zeroconf/commit/c77293692062ea701037e06c1cf5497f019ae2f2)) +## v0.34.1 (2021-08-08) -* Send unicast replies on the same socket the query was received (#952) -When replying to a QU question, we do not know if the sending host is reachable -from all of the sending sockets. We now avoid this problem by replying via -the receiving socket. This was the existing behavior when `InterfaceChoice.Default` -is set. +## v0.34.0 (2021-08-08) -This change extends the unicast relay behavior to used with `InterfaceChoice.Default` -to apply when `InterfaceChoice.All` or interfaces are explicitly passed when -instantiating a `Zeroconf` instance. -Fixes #951 ([`5fb3e20`](https://github.com/python-zeroconf/python-zeroconf/commit/5fb3e202c06e3a0d30e3c7824397d8e8a9f52555)) +## v0.33.4 (2021-08-06) -* Sort responses to increase chance of name compression (#954) -- When building an outgoing response, sort the names together - to increase the likelihood of name compression. In testing - this reduced the number of packets for large responses - (from 7 packets to 6) ([`ebc23ee`](https://github.com/python-zeroconf/python-zeroconf/commit/ebc23ee5e9592dd7f0235cd57f9b3ad727ec8bff)) +## v0.33.3 (2021-08-05) -## v0.34.3 (2021-08-09) +## v0.33.2 (2021-07-28) -### Unknown -* Bump version: 0.34.2 → 0.34.3 ([`9d69d18`](https://github.com/python-zeroconf/python-zeroconf/commit/9d69d18713bdfab53762a6b8c3aff7fd72ebd025)) +## v0.33.1 (2021-07-18) -* Update changelog for 0.34.3 (#950) ([`23b00e9`](https://github.com/python-zeroconf/python-zeroconf/commit/23b00e983b2e8335431dcc074935f379fd399d46)) -* Fix sending immediate multicast responses (#949) +## v0.33.0 (2021-07-18) -- Fixes a typo in handle_assembled_query that prevented immediate - responses from being sent. ([`02af7f7`](https://github.com/python-zeroconf/python-zeroconf/commit/02af7f78d2e5eabcc5cce8238546ee5170951b28)) +## v0.32.1 (2021-07-05) -## v0.34.2 (2021-08-09) -### Unknown +## v0.32.0 (2021-06-30) -* Bump version: 0.34.1 → 0.34.2 ([`6c21f68`](https://github.com/python-zeroconf/python-zeroconf/commit/6c21f6802b58d949038e9c8501ea204eeda57a16)) -* Update changelog for 0.34.2 (#947) ([`b87f493`](https://github.com/python-zeroconf/python-zeroconf/commit/b87f4934b39af02f26bbbfd6f372c7154fe95906)) +## v0.29.0 (2021-03-25) -* Ensure ServiceInfo requests can be answered with the default timeout with network protection (#946) -- Adjust the time windows to ensure responses that have triggered the -protection against against excessive packet flooding due to -software bugs or malicious attack described in RFC6762 section 6 -can respond in under 1350ms to ensure ServiceInfo can ask two -questions within the default timeout of 3000ms ([`6d7266d`](https://github.com/python-zeroconf/python-zeroconf/commit/6d7266d0e1e6dcb950456da0354b4c43fd5c0ecb)) +## v0.28.8 (2021-01-04) -* Coalesce aggregated multicast answers when the random delay is shorter than the last scheduled response (#945) -- Reduces traffic when we already know we will be sending a group of answers - inside the random delay window described in - https://datatracker.ietf.org/doc/html/rfc6762#section-6.3 +## v0.28.7 (2020-12-13) -closes #944 ([`9a5164a`](https://github.com/python-zeroconf/python-zeroconf/commit/9a5164a7a3231903537231bfb56479e617355f92)) +## v0.28.6 (2020-10-13) -## v0.34.1 (2021-08-08) -### Unknown +## v0.28.5 (2020-09-11) -* Bump version: 0.34.0 → 0.34.1 ([`7878a9e`](https://github.com/python-zeroconf/python-zeroconf/commit/7878a9eed93a8ec2396d8450389a08bf54bd5693)) -* Update changelog for 0.34.1 (#943) ([`9942484`](https://github.com/python-zeroconf/python-zeroconf/commit/9942484172d7a79fe84c47924538c2c02fde7264)) +## v0.28.4 (2020-09-06) -* Ensure multicast aggregation sends responses within 620ms (#942) ([`de96e2b`](https://github.com/python-zeroconf/python-zeroconf/commit/de96e2bf01af68d754bb7c71da949e30de88a77b)) +## v0.28.3 (2020-08-31) -## v0.34.0 (2021-08-08) -### Unknown +## v0.28.2 (2020-08-27) -* Bump version: 0.33.4 → 0.34.0 ([`549ac3d`](https://github.com/python-zeroconf/python-zeroconf/commit/549ac3de27eb3924cc7967088c3d316184722b9d)) -* Update changelog for 0.34.0 (#941) ([`342532e`](https://github.com/python-zeroconf/python-zeroconf/commit/342532e1d13ac24673735dc467a79edebdfb9362)) +## v0.28.1 (2020-08-17) -* Implement Multicast Response Aggregation (#940) -- Responses are now aggregated when possible per rules in RFC6762 section 6.4 -- Responses that trigger the protection against against excessive packet flooding due to - software bugs or malicious attack described in RFC6762 section 6 are delayed instead of discarding as it was causing responders that implement Passive Observation Of Failures (POOF) to evict the records. -- Probe responses are now always sent immediately as there were cases where they would fail to be answered in time to defend a name. +## v0.28.0 (2020-07-07) -closes #939 ([`55efb41`](https://github.com/python-zeroconf/python-zeroconf/commit/55efb4169b588cef093f3065f3a894878ae8bd95)) +## v0.27.1 (2020-06-05) -## v0.33.4 (2021-08-06) -### Unknown +## v0.27.0 (2020-05-27) -* Bump version: 0.33.3 → 0.33.4 ([`7bbacd5`](https://github.com/python-zeroconf/python-zeroconf/commit/7bbacd57a134c12ee1fb61d8318b312dfdae18f8)) -* Update changelog for 0.33.4 (#937) ([`858605d`](https://github.com/python-zeroconf/python-zeroconf/commit/858605db52f909d41198df76130597ff93f64cdd)) +## v0.26.3 (2020-05-26) -* Ensure zeroconf can be loaded when the system disables IPv6 (#933) -Co-authored-by: J. Nick Koston ([`496ac44`](https://github.com/python-zeroconf/python-zeroconf/commit/496ac44e99b56485cc9197490e71bb2dd7bec6f9)) +## v0.26.1 (2020-05-06) -## v0.33.3 (2021-08-05) +## v0.26.0 (2020-04-26) -### Unknown -* Bump version: 0.33.2 → 0.33.3 ([`206671a`](https://github.com/python-zeroconf/python-zeroconf/commit/206671a1237ee8237d302b04c5a84158fed1d50b)) +## v0.25.1 (2020-04-14) -* Update changelog for 0.33.3 (#936) ([`6a140cc`](https://github.com/python-zeroconf/python-zeroconf/commit/6a140cc6b9c7e50e572456662d2f76f6fbc2ed25)) -* Add support for forward dns compression pointers (#934) +## v0.25.0 (2020-04-03) -- nslookup supports these and some implementations (likely avahi) - will generate them -- Careful attention was given to make sure we detect loops - and do not create anti-patterns described in - https://github.com/Forescout/namewreck/blob/main/rfc/draft-dashevskyi-dnsrr-antipatterns-00.txt +## v0.24.5 (2020-03-08) -Fixes https://github.com/home-assistant/core/issues/53937 -Fixes https://github.com/home-assistant/core/issues/46985 -Fixes https://github.com/home-assistant/core/issues/53668 -Fixes #308 ([`5682a4c`](https://github.com/python-zeroconf/python-zeroconf/commit/5682a4c3c89043bf8a10e79232933ada5ab71972)) -* Provide sockname when logging a protocol error (#935) ([`319992b`](https://github.com/python-zeroconf/python-zeroconf/commit/319992bb093d9b965976bad724512d9bcd05aca7)) +## v0.24.4 (2019-12-30) -## v0.33.2 (2021-07-28) +## v0.24.3 (2019-12-23) -### Unknown -* Bump version: 0.33.1 → 0.33.2 ([`4d30c25`](https://github.com/python-zeroconf/python-zeroconf/commit/4d30c25fe57425bcae36a539006e44941ef46e2c)) +## v0.24.2 (2019-12-17) -* Update changelog for 0.33.2 (#931) ([`c80b5f7`](https://github.com/python-zeroconf/python-zeroconf/commit/c80b5f7253e521928d6f7e54681675be59371c6c)) -* Handle duplicate goodbye answers in the same packet (#928) +## v0.24.1 (2019-12-16) -- Solves an exception being thrown when we tried to remove the known answer - from the cache when the second goodbye answer in the same packet was processed -- We previously swallowed all exceptions on cache removal so this was not - visible until 0.32.x which removed the broad exception catch +## v0.24.0 (2019-11-19) -Fixes #926 ([`97e0b66`](https://github.com/python-zeroconf/python-zeroconf/commit/97e0b669be60f716e45e963f1bcfcd35b7213626)) -* Skip ipv6 interfaces that return ENODEV (#930) ([`73e3d18`](https://github.com/python-zeroconf/python-zeroconf/commit/73e3d1865f4167e7c9f7c23ec4cc7ebfac40f512)) +## v0.23.0 (2019-06-04) -* Remove some pylint workarounds (#925) ([`1247acd`](https://github.com/python-zeroconf/python-zeroconf/commit/1247acd2e6f6154a4e5f2e27a820c55329391d8e)) +## v0.22.0 (2019-04-27) -## v0.33.1 (2021-07-18) -### Unknown +## v0.21.3 (2018-09-21) -* Bump version: 0.33.0 → 0.33.1 ([`6774de3`](https://github.com/python-zeroconf/python-zeroconf/commit/6774de3e7f8b461ccb83675bbb05d47949df487b)) -* Update changelog for 0.33.1 (#924) +## v0.21.2 (2018-09-20) -- Fixes overly restrictive directory permissions reported in #923 ([`ed80333`](https://github.com/python-zeroconf/python-zeroconf/commit/ed80333896c0710857cc46b5af4d7ba3a81e07c8)) +## v0.21.1 (2018-09-17) -## v0.33.0 (2021-07-18) -### Unknown +## v0.21.0 (2018-09-16) -* Bump version: 0.32.1 → 0.33.0 ([`cfb28aa`](https://github.com/python-zeroconf/python-zeroconf/commit/cfb28aaf134e566d8a89b397967d1ad1ec66de35)) -* Update changelog for 0.33.0 release (#922) ([`e4a9655`](https://github.com/python-zeroconf/python-zeroconf/commit/e4a96550398c408c3e1e6944662cc3093db912a7)) +## v0.20.0 (2018-02-21) -* Fix examples/async_registration.py attaching to the correct loop (#921) ([`b0b23f9`](https://github.com/python-zeroconf/python-zeroconf/commit/b0b23f96d3b33a627a0d071557a36af97a65dae4)) -* Add support for bump2version (#920) ([`2e00002`](https://github.com/python-zeroconf/python-zeroconf/commit/2e0000252f0aecad8b62a649128326a6528b6824)) +## v0.19.1 (2017-06-13) -* Update changelog for 0.33.0 release (#919) ([`96be961`](https://github.com/python-zeroconf/python-zeroconf/commit/96be9618ede3c941e23cb23398b9aed11bed1ffa)) -* Let connection_lost close the underlying socket (#918) +## v0.19.0 (2017-03-21) -- The socket was closed during shutdown before asyncio's connection_lost - handler had a chance to close it which resulted in a traceback on - win32. -- Fixes #917 ([`919b096`](https://github.com/python-zeroconf/python-zeroconf/commit/919b096d6260a4f9f4306b9b4dddb5b026b49462)) +## v0.18.0 (2017-02-03) -* Reduce complexity of DNSRecord (#915) -- Use constants for calculations in is_expired/is_stale/is_recent ([`b6eaf72`](https://github.com/python-zeroconf/python-zeroconf/commit/b6eaf7249f386f573b0876204ccfdfa02ee9ac5b)) +## v0.17.7 (2017-02-01) -* Remove Zeroconf.wait as its now unused in the codebase (#914) ([`aa71084`](https://github.com/python-zeroconf/python-zeroconf/commit/aa7108481235cc018600d096b093c785447d8769)) -* Switch periodic cleanup task to call_later (#913) +## v0.17.6 (2016-07-08) -- Simplifies AsyncEngine to avoid the long running - task ([`38eb271`](https://github.com/python-zeroconf/python-zeroconf/commit/38eb271c952e89260ecac6fac3e723f4206c4648)) +### Testing -* Update changelog for 0.33.0 (#912) ([`b2a7a00`](https://github.com/python-zeroconf/python-zeroconf/commit/b2a7a00f82d401066166776cecf0857ebbdb56ad)) +- Added test for DNS-SD subtype discovery + ([`914241b`](https://github.com/python-zeroconf/python-zeroconf/commit/914241b92c3097669e1e8c1a380f6c2f23a14cf8)) -* Remove locking from ServiceRegistry (#911) -- All calls to the ServiceRegistry are now done in async context - which makes them thread safe. Locking is no longer needed. ([`2d3da7a`](https://github.com/python-zeroconf/python-zeroconf/commit/2d3da7a77699f88bd90ebc09d36b333690385f85)) +## v0.17.5 (2016-03-14) -* Remove duplicate unregister_all_services code (#910) ([`e63ca51`](https://github.com/python-zeroconf/python-zeroconf/commit/e63ca518c91cda7b9f460436aee4fdac1a7b9567)) -* Rename DNSNsec.next to DNSNsec.next_name (#908) ([`69942d5`](https://github.com/python-zeroconf/python-zeroconf/commit/69942d5bfb4d92c6a312aea7c17f63fce0401e23)) +## v0.17.4 (2015-09-22) -* Upgrade syntax to python 3.6 (#907) ([`0578731`](https://github.com/python-zeroconf/python-zeroconf/commit/057873128ff05a0b2d6eae07510e23d705d10bae)) -* Implement NSEC record parsing (#903) +## v0.17.3 (2015-08-19) -- This is needed for negative responses - https://datatracker.ietf.org/doc/html/rfc6762#section-6.1 ([`bc9e9cf`](https://github.com/python-zeroconf/python-zeroconf/commit/bc9e9cf8a5b997ca924730ed091a829f4f961ca3)) -* Centralize running coroutines from threads (#906) +## v0.17.2 (2015-07-12) -- Cleanup to ensure all coros we run from a thread - use _LOADED_SYSTEM_TIMEOUT ([`9399c57`](https://github.com/python-zeroconf/python-zeroconf/commit/9399c57bb2b280c7b433e7fbea7cca2c2f4417ee)) -* Reduce duplicate code between zeroconf.asyncio and zeroconf._core (#904) ([`e417fc0`](https://github.com/python-zeroconf/python-zeroconf/commit/e417fc0f5ed7eaa47a0dcaffdbc6fe335bfcc058)) +## v0.17.1 (2015-04-10) -* Disable N818 in flake8 (#905) -- We cannot rename these exceptions now without a breaking change - as they have existed for many years ([`f8af0fb`](https://github.com/python-zeroconf/python-zeroconf/commit/f8af0fb251938dcb410127b2af2b8b407989aa08)) - - -## v0.32.1 (2021-07-05) - -### Unknown - -* Release version 0.32.1 ([`675fd6f`](https://github.com/python-zeroconf/python-zeroconf/commit/675fd6fc959e76e4e3690e5c7a02db269ca9ef60)) - -* Fix the changelog's one sentence's tense ([`fc089be`](https://github.com/python-zeroconf/python-zeroconf/commit/fc089be1f412d991f44daeecd0944198d3a638a5)) - -* Update changelog (#899) ([`a93301d`](https://github.com/python-zeroconf/python-zeroconf/commit/a93301d0fd493bf18147187bf8efed1a4ea02214)) - -* Increase timeout in ServiceInfo.request to handle loaded systems (#895) - -It can take a few seconds for a loaded system to run the `async_request` coroutine when the event loop is busy or the system is CPU bound (example being Home Assistant startup). We now add -an additional `_LOADED_SYSTEM_TIMEOUT` (10s) to the `run_coroutine_threadsafe` calls to ensure the coroutine has the total amount of time to run up to its internal timeout (default of 3000ms). - -Ten seconds is a bit large of a timeout; however, its only unused in cases where we wrap other timeouts. We now expect the only instance the `run_coroutine_threadsafe` result timeout will happen in a production circumstance is when someone is running a `ServiceInfo.request()` in a thread and another thread calls `Zeroconf.close()` at just the right moment that the future is never completed unless the system is so loaded that it is nearly unresponsive. - -The timeout for `run_coroutine_threadsafe` is the maximum time a thread can cleanly shut down when zeroconf is closed out in another thread, which should always be longer than the underlying thread operation. ([`56c7d69`](https://github.com/python-zeroconf/python-zeroconf/commit/56c7d692d67b7f56c386a7f1f4e45ebfc4e8366a)) - -* Add test for running sync code within executor (#894) ([`90bc8ca`](https://github.com/python-zeroconf/python-zeroconf/commit/90bc8ca8dce1af26ea81c5d6ecb17cf6ea664a71)) - - -## v0.32.0 (2021-06-30) - -### Unknown - -* Fix readme formatting - -It wasn't proper reStructuredText before: - - % twine check dist/* - Checking dist/zeroconf-0.32.0-py3-none-any.whl: FAILED - `long_description` has syntax errors in markup and would not be rendered on PyPI. - line 381: Error: Unknown target name: "async". - warning: `long_description_content_type` missing. defaulting to `text/x-rst`. - Checking dist/zeroconf-0.32.0.tar.gz: FAILED - `long_description` has syntax errors in markup and would not be rendered on PyPI. - line 381: Error: Unknown target name: "async". - warning: `long_description_content_type` missing. defaulting to `text/x-rst`. ([`82ff150`](https://github.com/python-zeroconf/python-zeroconf/commit/82ff150e0a72a7e20823a0c805f48f117bf1e274)) - -* Release version 0.32.0 ([`ea7bc85`](https://github.com/python-zeroconf/python-zeroconf/commit/ea7bc8592e418332e5b9973007698d3cd79754d9)) - -* Reformat changelog to match prior versions (#892) ([`34f6e49`](https://github.com/python-zeroconf/python-zeroconf/commit/34f6e498dec18b84dab1c27c75348916bceef8e6)) - -* Fix spelling and grammar errors in 0.32.0 changelog (#891) ([`ba235dd`](https://github.com/python-zeroconf/python-zeroconf/commit/ba235dd8bc65de4f461f76fd2bf4647844437e1a)) - -* Rewrite 0.32.0 changelog in past tense (#890) ([`0d91156`](https://github.com/python-zeroconf/python-zeroconf/commit/0d911568d367f1520acb19bdf830fe188b6ffb70)) - -* Reformat backwards incompatible changes to match previous versions (#889) ([`9abb40c`](https://github.com/python-zeroconf/python-zeroconf/commit/9abb40cf331bc0acc5fdbb03fce5c958cec8b41e)) - -* Remove extra newlines between changelog entries (#888) ([`d31fd10`](https://github.com/python-zeroconf/python-zeroconf/commit/d31fd103cc942574f7fbc75e5346cc3d3eaf7ee1)) - -* Collapse changelog for 0.32.0 (#887) ([`14cf936`](https://github.com/python-zeroconf/python-zeroconf/commit/14cf9362c9ae947bcee5911b9c593ca76f50d529)) - -* Disable pylint in the CI (#886) ([`b9dc12d`](https://github.com/python-zeroconf/python-zeroconf/commit/b9dc12dee8b4a7f6d8e1f599948bf16e5e7fab47)) - -* Revert name change of zeroconf.asyncio to zeroconf.aio (#885) - -- Now that `__init__.py` no longer needs to import `asyncio`, - the name conflict is not a concern. - -Fixes #883 ([`b9eae5a`](https://github.com/python-zeroconf/python-zeroconf/commit/b9eae5a6f8f86bfe60446f133cad5fc33d072959)) - -* Update changelog (#879) ([`be1d3bb`](https://github.com/python-zeroconf/python-zeroconf/commit/be1d3bbe0ee12254d11e3d8b75c2faba950fabce)) - -* Add coverage to ensure loading zeroconf._logger does not override logging level (#878) ([`86e2ab9`](https://github.com/python-zeroconf/python-zeroconf/commit/86e2ab9db3c7bd47b6e81837d594280ced3b30f9)) - -* Add coverge for disconnected adapters in add_multicast_member (#877) ([`ab83819`](https://github.com/python-zeroconf/python-zeroconf/commit/ab83819ad6b6ff727a894271dde3e4be6c28cb2c)) - -* Break apart net_socket for easier testing (#875) ([`f0770fe`](https://github.com/python-zeroconf/python-zeroconf/commit/f0770fea80b00f2340815fa983968f68a15c702e)) - -* Fix flapping test test_integration_with_listener_class (#876) ([`decd8a2`](https://github.com/python-zeroconf/python-zeroconf/commit/decd8a26aa8a89ceefcd9452fe562f2eeaa3fecb)) - -* Add coverage to ensure unrelated A records do not generate ServiceBrowser callbacks (#874) - -closes #871 ([`471bacd`](https://github.com/python-zeroconf/python-zeroconf/commit/471bacd3200aa1216054c0e52b2e5842e9760aa0)) - -* Update changelog (#870) ([`972da99`](https://github.com/python-zeroconf/python-zeroconf/commit/972da99e4dd9d0fe1c1e0786da45d66fd43a717a)) - -* Fix deadlock when event loop is shutdown during service registration (#869) ([`4ed9036`](https://github.com/python-zeroconf/python-zeroconf/commit/4ed903698b10f434cfbbe601998f27c10d2fb9db)) - -* Break apart new_socket to be testable (#867) ([`22ff6b5`](https://github.com/python-zeroconf/python-zeroconf/commit/22ff6b56d7b6531d2af5c50dca66fd2be2b276f4)) - -* Add test coverage to ensure ServiceBrowser ignores unrelated updates (#866) ([`dcf18c8`](https://github.com/python-zeroconf/python-zeroconf/commit/dcf18c8a32652c6aa70af180b6a5261f4277faa9)) - -* Add test coverage for duplicate properties in a TXT record (#865) ([`6ef65fc`](https://github.com/python-zeroconf/python-zeroconf/commit/6ef65fc7cafc3d4089a2b943da224c6cb027b4b0)) - -* Update changelog (#864) ([`c64064a`](https://github.com/python-zeroconf/python-zeroconf/commit/c64064ad3b38a40775637c0fd8877d9d00d2d537)) - -* Ensure protocol and sending errors are logged once (#862) ([`c516919`](https://github.com/python-zeroconf/python-zeroconf/commit/c516919064687551299f23e23bf0797888020041)) - -* Remove unreachable code in AsyncListener.datagram_received (#863) ([`f536869`](https://github.com/python-zeroconf/python-zeroconf/commit/f5368692d7907e440ca81f0acee9744f79dbae80)) - -* Add unit coverage for shutdown_loop (#860) ([`af83c76`](https://github.com/python-zeroconf/python-zeroconf/commit/af83c766c2ae72bd23184c6f6300e4d620c7b3e8)) - -* Make a dispatch dict for ServiceStateChange listeners (#859) ([`57cccc4`](https://github.com/python-zeroconf/python-zeroconf/commit/57cccc4dcbdc9df52672297968ccb55054122049)) - -* Cleanup coverage data (#858) ([`3eb7be9`](https://github.com/python-zeroconf/python-zeroconf/commit/3eb7be95fd6cd4960f96f29aa72fc45347c57b6e)) - -* Fix changelog formatting (#857) ([`59247f1`](https://github.com/python-zeroconf/python-zeroconf/commit/59247f1c44b485bf51d4a8d3e3966b9faf40cf82)) - -* Update changelog (#856) ([`cb2e237`](https://github.com/python-zeroconf/python-zeroconf/commit/cb2e237b6f1af0a83bc7352464562cdb7bbcac14)) - -* Only run linters on Linux in CI (#855) - -- The github MacOS and Windows runners are slower and - will have the same results as the Linux runners so there - is no need to wait for them. - -closes #854 ([`03411f3`](https://github.com/python-zeroconf/python-zeroconf/commit/03411f35d82752d5d2633a67db132a011098d9e6)) - -* Speed up test_verify_name_change_with_lots_of_names under PyPy (#853) - -fixes #840 ([`0cd876f`](https://github.com/python-zeroconf/python-zeroconf/commit/0cd876f5a42699aeb0176380ba4cca4d8a536df3)) - -* Make ServiceInfo first question QU (#852) - -- We want an immediate response when making a request with ServiceInfo - by asking a QU question, most responders will not delay the response - and respond right away to our question. This also improves compatibility - with split networks as we may not have been able to see the response - otherwise. If the responder has not multicast the record recently - it may still choose to do so in addition to responding via unicast - -- Reduces traffic when there are multiple zeroconf instances running - on the network running ServiceBrowsers - -- If we don't get an answer on the first try, we ask a QM question - in the event we can't receive a unicast response for some reason - -- This change puts ServiceInfo inline with ServiceBrowser which - also asks the first question as QU since ServiceInfo is commonly - called from ServiceBrowser callbacks - -closes #851 ([`76e0b05`](https://github.com/python-zeroconf/python-zeroconf/commit/76e0b05ca9c601bd638817bf68ca8d981f1d65f8)) - -* Update changelog (#850) ([`8c9d1d8`](https://github.com/python-zeroconf/python-zeroconf/commit/8c9d1d8964d9226d5d3ac38bec908e930954b369)) - -* Switch ServiceBrowser query scheduling to use call_later instead of a loop (#849) - -- Simplifies scheduling as there is no more need to sleep in a loop as - we now schedule future callbacks with call_later - -- Simplifies cancelation as there is no more coroutine to cancel, only a timer handle - We no longer have to handle the canceled error and cleaning up the awaitable - -- Solves the infrequent test failures in test_backoff and test_integration ([`a8c1623`](https://github.com/python-zeroconf/python-zeroconf/commit/a8c16231881de43adedbedbc3f1ea707c0b457f2)) - -* Fix spurious failures in ZeroconfServiceTypes tests (#848) - -- These tests ran the same test twice in 0.5s and would - trigger the duplicate packet suppression. Rather then - making them run longer, we can disable the suppression - for the test. ([`9f71e5b`](https://github.com/python-zeroconf/python-zeroconf/commit/9f71e5b7364d4a23492cafe4f49a5c2acda4178d)) - -* Fix thread safety in handlers test (#847) ([`182c68f`](https://github.com/python-zeroconf/python-zeroconf/commit/182c68ff11ba381444a708e17560e920ae1849ef)) - -* Update changelog (#845) ([`72502c3`](https://github.com/python-zeroconf/python-zeroconf/commit/72502c303a1a889cf84906b8764fd941a840e6d3)) - -* Increase timeout in test_integration (#844) - -- The github macOS runners tend to be a bit loaded and these - sometimes fail because of it ([`dd86f2f`](https://github.com/python-zeroconf/python-zeroconf/commit/dd86f2f9fee4bbaebce956b330c1837a6e9c6c99)) - -* Use AAAA records instead of A records in test_integration_with_listener_ipv6 (#843) ([`688c518`](https://github.com/python-zeroconf/python-zeroconf/commit/688c5184dce67e5af857c138639ced4bdcec1e57)) - -* Fix ineffective patching on PyPy (#842) - -- Use patch in all places so its easier to find where we need - to clean up ([`ecd9c94`](https://github.com/python-zeroconf/python-zeroconf/commit/ecd9c941810e4b413b20dc55929b3ae1a7e57b27)) - -* Limit duplicate packet suppression to 1s intervals (#841) - -- Only suppress duplicate packets that happen within the same - second. Legitimate queriers will retry the question if they - are suppressed. The limit was reduced to one second to be - in line with rfc6762: - - To protect the network against excessive packet flooding due to - software bugs or malicious attack, a Multicast DNS responder MUST NOT - (except in the one special case of answering probe queries) multicast - a record on a given interface until at least one second has elapsed - since the last time that record was multicast on that particular ([`7fb11bf`](https://github.com/python-zeroconf/python-zeroconf/commit/7fb11bfc03c06cbe9ed5a4303b3e632d69665bb1)) - -* Skip dependencies install in CI on cache hit (#839) - -There is no need to reinstall dependencies in the CI when we have a cache hit. ([`937be52`](https://github.com/python-zeroconf/python-zeroconf/commit/937be522a42830b27326b5253d49003b57998bc9)) - -* Adjust restore key for CI cache (#838) ([`3fdd834`](https://github.com/python-zeroconf/python-zeroconf/commit/3fdd8349553c160586fb6831c9466410f19a3308)) - -* Make multipacket known answer suppression per interface (#836) - -- The suppression was happening per instance of Zeroconf instead - of per interface. Since the same network can be seen on multiple - interfaces (usually and wifi and ethernet), this would confuse the - multi-packet known answer supression since it was not expecting - to get the same data more than once - -Fixes #835 ([`7297f3e`](https://github.com/python-zeroconf/python-zeroconf/commit/7297f3ef71c9984296c3e28539ce7a4b42f04a05)) - -* Ensure coverage.xml is written for codecov (#837) ([`0b1abbc`](https://github.com/python-zeroconf/python-zeroconf/commit/0b1abbc8f2b09235cfd44e5586024c7b82dc5289)) - -* Wait for startup in test_integration (#834) ([`540c652`](https://github.com/python-zeroconf/python-zeroconf/commit/540c65218eb9d1aedc88a3d3724af97f39ccb88e)) - -* Cache dependency installs in CI (#833) ([`0bf4f75`](https://github.com/python-zeroconf/python-zeroconf/commit/0bf4f7537a042a00d9d3f815afcdf7ebe29d9f53)) - -* Annotate test failures on github (#831) ([`4039b0b`](https://github.com/python-zeroconf/python-zeroconf/commit/4039b0b755a3d0fe15e4cb1a7cb1592c35e048e1)) - -* Show 20 slowest tests on each run (#832) ([`8230e3f`](https://github.com/python-zeroconf/python-zeroconf/commit/8230e3f40da5d2d152942725d67d5f8c0b8c647b)) - -* Disable duplicate question suppression for test_integration (#830) - -- This test waits until we get 50 known answers. It would - sometimes fail because it could not ask enough - unsuppressed questions in the allowed time. ([`10f4a7f`](https://github.com/python-zeroconf/python-zeroconf/commit/10f4a7f8d607d09673be56e5709912403503d86b)) - -* Convert test_integration to asyncio to avoid testing threading races (#828) - -Fixes #768 ([`4c4b388`](https://github.com/python-zeroconf/python-zeroconf/commit/4c4b388ba125ad23a03722b30c71da86853fe05a)) - -* Update changelog (#827) ([`82f80c3`](https://github.com/python-zeroconf/python-zeroconf/commit/82f80c301a6324d2f1711ca751e81069e90030ec)) - -* Drop oversize packets before processing them (#826) - -- Oversized packets can quickly overwhelm the system and deny - service to legitimate queriers. In practice this is usually - due to broken mDNS implementations rather than malicious - actors. ([`6298ef9`](https://github.com/python-zeroconf/python-zeroconf/commit/6298ef9078cf2408bc1e57660ee141e882d13469)) - -* Guard against excessive ServiceBrowser queries from PTR records significantly lower than recommended (#824) - -* We now enforce a minimum TTL for PTR records to avoid -ServiceBrowsers generating excessive queries refresh queries. -Apple uses a 15s minimum TTL, however we do not have the same -level of rate limit and safe guards so we use 1/4 of the recommended value. ([`7f6d003`](https://github.com/python-zeroconf/python-zeroconf/commit/7f6d003210244b6f7df133bd474d7ddf64098422)) - -* Update changelog (#822) ([`4a82769`](https://github.com/python-zeroconf/python-zeroconf/commit/4a8276941a07188180ee31dc4ca578306c2df92b)) - -* Only wake up the query loop when there is a change in the next query time (#818) - -The ServiceBrowser query loop (async_browser_task) was being awoken on -every packet because it was using `zeroconf.async_wait` which wakes -up on every new packet. We only need to awaken the loop when the next time -we are going to send a query has changed. - -fixes #814 fixes #768 ([`4062fe2`](https://github.com/python-zeroconf/python-zeroconf/commit/4062fe21d8baaad36960f8cae0f59ac7083a6b55)) - -* Fix reliablity of tests that patch sending (#820) ([`a7b4f8e`](https://github.com/python-zeroconf/python-zeroconf/commit/a7b4f8e070de69db1ed872e2ff7a953ec624394c)) - -* Fix default v6_flow_scope argument with tests that mock send (#819) ([`f9d3529`](https://github.com/python-zeroconf/python-zeroconf/commit/f9d35299a39fee0b1632a3b2ac00170f761d53b1)) - -* Turn on logging in the types test (#816) - -- Will be needed to track down #813 ([`ffd2532`](https://github.com/python-zeroconf/python-zeroconf/commit/ffd2532f72a59ede86732b310512774b8fa344e7)) - -* New ServiceBrowsers now request QU in the first outgoing when unspecified (#812) ([`e32bb5d`](https://github.com/python-zeroconf/python-zeroconf/commit/e32bb5d98be0dc7ed130224206a4de699bcd68e3)) - -* Update changelog (#811) ([`13c558c`](https://github.com/python-zeroconf/python-zeroconf/commit/13c558cf3f40e52a13347a39b050e49a9241c269)) - -* Simplify wait_event_or_timeout (#810) - -- This function always did the same thing on timeout and - wait complete so we can use the same callback. This - solves the CI failing due to the test coverage flapping - back and forth as the timeout would rarely happen. ([`d4c8f0d`](https://github.com/python-zeroconf/python-zeroconf/commit/d4c8f0d3ffdcdc609810aca383492a57f9e1a723)) - -* Make DNSHinfo and DNSAddress use the same match order as DNSPointer and DNSText (#808) - -We want to check the data that is most likely to be unique first -so we can reject the __eq__ as soon as possible. ([`f9bbbce`](https://github.com/python-zeroconf/python-zeroconf/commit/f9bbbce388f2c6c24109c15ef843c10eeccf008f)) - -* Format tests/services/test_info.py with newer black (#809) ([`0129ac0`](https://github.com/python-zeroconf/python-zeroconf/commit/0129ac061db4a950f7bddf1084309e44aaabdbdf)) - -* Qualify IPv6 link-local addresses with scope_id (#343) - -Co-authored-by: Lokesh Prajapati -Co-authored-by: de Angelis, Antonio - -When a service is advertised on an IPv6 address where -the scope is link local, i.e. fe80::/64 (see RFC 4007) -the resolved IPv6 address must be extended with the -scope_id that identifies through the "%" symbol the -local interface to be used when routing to that address. -A new API `parsed_scoped_addresses()` is provided to -return qualified addresses to avoid breaking compatibility -on the existing parsed_addresses(). ([`05bb21b`](https://github.com/python-zeroconf/python-zeroconf/commit/05bb21b9b43f171e30b48fad6a756df49162b557)) - -* Tag 0.32.0b3 (#805) ([`5dccf34`](https://github.com/python-zeroconf/python-zeroconf/commit/5dccf3496a9bd4c268da4c39aab545ddcd50ac57)) - -* Update changelog (#804) ([`59e4bd2`](https://github.com/python-zeroconf/python-zeroconf/commit/59e4bd25347aac254700dc3a1518676042982b3a)) - -* Skip network adapters that are disconnected (#327) - -Co-authored-by: J. Nick Koston ([`df66da2`](https://github.com/python-zeroconf/python-zeroconf/commit/df66da2a943b9ff978602680b746f1edeba048dc)) - -* Add slots to DNS classes (#803) - -- On a busy network that receives many mDNS packets per second, we - will not know the answer to most of the questions being asked. - In this case the creating the DNS* objects are usually garbage - collected within 1s as they are not needed. We now set __slots__ - to speed up the creation and destruction of these objects ([`18fe341`](https://github.com/python-zeroconf/python-zeroconf/commit/18fe341300e28ed93d7b5d7ca8e07edb119bd597)) - -* Update changelog (#802) ([`58ae3cf`](https://github.com/python-zeroconf/python-zeroconf/commit/58ae3cf553cd925ac90f3db551f4085ea5bc8b79)) - -* Update changelog (#801) ([`662ed61`](https://github.com/python-zeroconf/python-zeroconf/commit/662ed6166282b9b5b6e83a596c0576a57f8962d2)) - -* Ensure we handle threadsafe shutdown under PyPy with multiple event loops (#800) ([`bbc9124`](https://github.com/python-zeroconf/python-zeroconf/commit/bbc91241a86f3339aa27cae7b4ea2ab9d7c1f37d)) - -* Update changelog (#798) ([`9961dce`](https://github.com/python-zeroconf/python-zeroconf/commit/9961dce598d3c6eeda68a2f874a7a50ec33f819c)) - -* Ensure fresh ServiceBrowsers see old_record as None when replaying the cache (#793) ([`38e66ec`](https://github.com/python-zeroconf/python-zeroconf/commit/38e66ec5ba5fcb96cef17b8949385075807a2fb7)) - -* Update changelog (#797) ([`c36099a`](https://github.com/python-zeroconf/python-zeroconf/commit/c36099a41a71298d58e7afa42ecdc7a54d3b010a)) - -* Pass both the new and old records to async_update_records (#792) - -* Pass the old_record (cached) as the value and the new_record (wire) -to async_update_records instead of forcing each consumer to -check the cache since we will always have the old_record -when generating the async_update_records call. This avoids -the overhead of multiple cache lookups for each listener. ([`d637d67`](https://github.com/python-zeroconf/python-zeroconf/commit/d637d67378698e0a505be90afbce4e2264b49444)) - -* Remove unused constant from zeroconf._handlers (#796) ([`cb91484`](https://github.com/python-zeroconf/python-zeroconf/commit/cb91484670ba76c8c453dc49502e89195561b31e)) - -* Make add_listener and remove_listener threadsafe (#794) ([`2bfbcbe`](https://github.com/python-zeroconf/python-zeroconf/commit/2bfbcbe9e05b9df98bba66a73deb0041c0e7c13b)) - -* Fix test_tc_bit_defers_last_response_missing failures due to thread safety (#795) ([`6aac0eb`](https://github.com/python-zeroconf/python-zeroconf/commit/6aac0eb0c1e394ec7ee21ddd6e98e446417d0e07)) - -* Ensure outgoing ServiceBrowser questions are seen by the question history (#790) ([`ecad4e8`](https://github.com/python-zeroconf/python-zeroconf/commit/ecad4e84c44ffd21dbf15e969c08f7b3376b131c)) - -* Update changelog (#788) ([`5d23628`](https://github.com/python-zeroconf/python-zeroconf/commit/5d2362825110e9f7a9c9259218a664e2e927e821)) - -* Add async_apple_scanner example (#719) ([`62dc9c9`](https://github.com/python-zeroconf/python-zeroconf/commit/62dc9c91c277bc4755f81597adca030a43d0ce5f)) - -* Add support for requesting QU questions to ServiceBrowser and ServiceInfo (#787) ([`135983c`](https://github.com/python-zeroconf/python-zeroconf/commit/135983cb96a27e3ad3750234286d1d9bfa6ff44f)) - -* Update changelog (#786) ([`3b3ecf0`](https://github.com/python-zeroconf/python-zeroconf/commit/3b3ecf09d2f30ee39c6c29b4d85e000577b2c4b9)) - -* Ensure the queue is created before adding listeners to ServiceBrowser (#785) - -* Ensure the queue is created before adding listeners to ServiceBrowser - -- The callback from the listener could generate an event that would - fire in async context that should have gone to the queue which - could result in the consumer running a sync call in the event loop - and blocking it. - -* add comments - -* add comments - -* add comments - -* add comments - -* black ([`97f5b50`](https://github.com/python-zeroconf/python-zeroconf/commit/97f5b502815075f2ff29bee3ace7cde6ad725dfb)) - -* Add a guard to prevent running ServiceInfo.request in async context (#784) - -* Add a guard to prevent running ServiceInfo.request in async context - -* test ([`dd85ae7`](https://github.com/python-zeroconf/python-zeroconf/commit/dd85ae7defd3f195ed0511a2fdb6512326ca0562)) - -* Inline utf8 decoding when processing incoming packets (#782) ([`3be1bc8`](https://github.com/python-zeroconf/python-zeroconf/commit/3be1bc84bff5ee2840040ddff41185b257a1055c)) - -* Drop utf cache from _dns (#781) - -- The cache did not make enough difference to justify the additional - complexity after additional testing was done ([`1b87343`](https://github.com/python-zeroconf/python-zeroconf/commit/1b873436e2d9ff36876a71c48fa697d277fd3ffa)) - -* Switch to using a simple cache instead of lru_cache (#779) ([`7aeafbf`](https://github.com/python-zeroconf/python-zeroconf/commit/7aeafbf3b990ab671ff691b6c20cd410f69808bf)) - -* Reformat test_handlers (#780) ([`767ae8f`](https://github.com/python-zeroconf/python-zeroconf/commit/767ae8f6cd92493f8f43d66edc70c8fd856ed11e)) - -* Fix Responding to Address Queries (RFC6762 section 6.2) (#777) ([`ac9f72a`](https://github.com/python-zeroconf/python-zeroconf/commit/ac9f72a986ae314af0043cae6fb6219baabea7e6)) - -* Implement duplicate question supression (#770) - -https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 ([`c0f4f48`](https://github.com/python-zeroconf/python-zeroconf/commit/c0f4f48e2bb996ce18cb569aa5369356cbc919ff)) - -* Fix deadlock on ServiceBrowser shutdown with PyPy (#774) ([`b5d54e4`](https://github.com/python-zeroconf/python-zeroconf/commit/b5d54e485d9dbcde1b7b472760a0b307198b8ec8)) - -* Add a guard against the task list changing when shutting down (#776) ([`e8836b1`](https://github.com/python-zeroconf/python-zeroconf/commit/e8836b134c47080edaf47532d7cb844b307dfb08)) - -* Verify async callers can still use Zeroconf without migrating to AsyncZeroconf (#775) ([`f23df4f`](https://github.com/python-zeroconf/python-zeroconf/commit/f23df4f5f05e3911cbf96234b198ea88691aadad)) - -* Implement accidental synchronization protection (RFC2762 section 5.2) (#773) ([`b600547`](https://github.com/python-zeroconf/python-zeroconf/commit/b600547a47878775e1c6fb8df46682a670beccba)) - -* Improve performance of parsing DNSIncoming by caching read_utf (#769) ([`5d44a36`](https://github.com/python-zeroconf/python-zeroconf/commit/5d44a36a59c21ef7869ba9e6dde9f658d3502793)) - -* Add test coverage to ensure RecordManager.add_listener callsback known question answers (#767) ([`e70431e`](https://github.com/python-zeroconf/python-zeroconf/commit/e70431e1fdc92c155309a1d40c89fed48737970c)) - -* Switch to using an asyncio.Event for async_wait (#759) - -- We no longer need to check for thread safety under a asyncio.Condition - as the ServiceBrowser and ServiceInfo internals schedule coroutines - in the eventloop. ([`6c82fa9`](https://github.com/python-zeroconf/python-zeroconf/commit/6c82fa9efd0f434f0f7c83e3bd98bd7851ede4cf)) - -* Break test_lots_of_names into two tests (#764) ([`85532e1`](https://github.com/python-zeroconf/python-zeroconf/commit/85532e13e42447fcd6d4d4b0060f04d33c3ab780)) - -* Fix test_lots_of_names overflowing the incoming buffer (#763) ([`38b59a6`](https://github.com/python-zeroconf/python-zeroconf/commit/38b59a64592f41b2bb547b35c72a010a925a2941)) - -* Fix race condition in ServiceBrowser test_integration (#762) - -- The event was being cleared in the wrong thread which - meant if the test was fast enough it would not be seen - the second time and give a spurious failure ([`fc0e599`](https://github.com/python-zeroconf/python-zeroconf/commit/fc0e599eec77477dd8f21ecd68b238e6a27f1bcf)) - -* Add 60s timeout for each test (#761) ([`936500a`](https://github.com/python-zeroconf/python-zeroconf/commit/936500a47cc33d9daa86f9012b1791986361ff63)) - -* Add missing coverage for SignalRegistrationInterface (#758) ([`9f68fc8`](https://github.com/python-zeroconf/python-zeroconf/commit/9f68fc8b1b834d0194e8ba1069d052aa853a8d38)) - -* Update changelog (#757) ([`1c93baa`](https://github.com/python-zeroconf/python-zeroconf/commit/1c93baa486b1b0f44487891766e0a0c1de3eb252)) - -* Simplify ServiceBrowser callsbacks (#756) ([`f24ebba`](https://github.com/python-zeroconf/python-zeroconf/commit/f24ebba9ecc4d1626d570956a7cc735206d7ff6e)) - -* Revert: Fix thread safety in _ServiceBrowser.update_records_complete (#708) (#755) - -- This guarding is no longer needed as the ServiceBrowser loop - now runs in the event loop and the thread safety guard is no - longer needed ([`f53c88b`](https://github.com/python-zeroconf/python-zeroconf/commit/f53c88b52ed080c80e2e98d3da91a830f0c7ebca)) - -* Drop AsyncServiceListener (#754) ([`04cd268`](https://github.com/python-zeroconf/python-zeroconf/commit/04cd2688022ebd07c1f875fefc73f8d15c4ed56c)) - -* Run ServiceBrowser queries in the event loop (#752) ([`4d0a8f3`](https://github.com/python-zeroconf/python-zeroconf/commit/4d0a8f3c643a0fc5c3a40420bab96ef18dddaecb)) - -* Remove unused argument from AsyncZeroconf (#751) ([`e7adce2`](https://github.com/python-zeroconf/python-zeroconf/commit/e7adce2bf6ea0b4af1709369a36421acd9757b4a)) - -* Fix warning about Zeroconf._async_notify_all not being awaited in sync shutdown (#750) ([`3b9baf0`](https://github.com/python-zeroconf/python-zeroconf/commit/3b9baf07278290b2b4eb8ac5850bccfbd8b107d8)) - -* Update async_service_info_request example to ensure it runs in the right event loop (#749) ([`0f702c6`](https://github.com/python-zeroconf/python-zeroconf/commit/0f702c6a41bb33ed63872249b82d1111bdac4fa6)) - -* Run ServiceInfo requests in the event loop (#748) ([`0dbcabf`](https://github.com/python-zeroconf/python-zeroconf/commit/0dbcabfade41057a055ebefffd410d1afc3eb0ea)) - -* Remove support for notify listeners (#733) ([`7b3b4b5`](https://github.com/python-zeroconf/python-zeroconf/commit/7b3b4b5b8303a684165fcd53c0d9c36a1b8dda3d)) - -* Update changelog (#747) ([`0909c80`](https://github.com/python-zeroconf/python-zeroconf/commit/0909c80c67287ba92ed334ab6896136aec0f3f24)) - -* Relocate service info tests to tests/services/test_info.py (#746) ([`541292e`](https://github.com/python-zeroconf/python-zeroconf/commit/541292e55fee8bbafe687afcb8d152f6fe0efb5f)) - -* Relocate service browser tests to tests/services/test_browser.py (#745) ([`869c95a`](https://github.com/python-zeroconf/python-zeroconf/commit/869c95a51e228131eb7debe1acc47c105b9bf7b5)) - -* Relocate ServiceBrowser to zeroconf._services.browser (#744) ([`368163d`](https://github.com/python-zeroconf/python-zeroconf/commit/368163d3c30325d60021203430711e10fd6d97e9)) - -* Relocate ServiceInfo to zeroconf._services.info (#741) ([`f0d727b`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d727bd9addd6dab373b75008f04a6f8547928b)) - -* Run question answer callbacks from add_listener in the event loop (#740) ([`c8e15dd`](https://github.com/python-zeroconf/python-zeroconf/commit/c8e15dd2bb5f6d2eb3a8ef5f26ad044517b70c47)) - -* Fix flakey cache bit flush test (#739) ([`e227d6e`](https://github.com/python-zeroconf/python-zeroconf/commit/e227d6e4c337ef9d5aa626c41587a8046313e416)) - -* Remove second level caching from ServiceBrowsers (#737) ([`5feda7e`](https://github.com/python-zeroconf/python-zeroconf/commit/5feda7e318f7d164d2b04b2d243a804372517da6)) - -* Breakout ServiceBrowser handler from listener creation (#736) ([`35ac7a3`](https://github.com/python-zeroconf/python-zeroconf/commit/35ac7a39d1fab00898ed6075e7e930424716b627)) - -* Add fast cache lookup functions (#732) ([`9d31245`](https://github.com/python-zeroconf/python-zeroconf/commit/9d31245f9ed4f6b1f7d9d7c51daf0ca394fd208f)) - -* Switch to using DNSRRSet in RecordManager (#735) ([`c035925`](https://github.com/python-zeroconf/python-zeroconf/commit/c035925f47732a889c76a2ff0989b92c6687c950)) - -* Add test coverage to ensure the cache flush bit is properly handled (#734) ([`50af944`](https://github.com/python-zeroconf/python-zeroconf/commit/50af94493ff6bf5d21445eaa80d3a96f348b0d11)) - -* Fix server cache to be case-insensitive (#731) ([`3ee9b65`](https://github.com/python-zeroconf/python-zeroconf/commit/3ee9b650bedbe61d59838897f653ad43a6d51910)) - -* Update changelog (#730) ([`733f79d`](https://github.com/python-zeroconf/python-zeroconf/commit/733f79d28c7dd4500a1598b279ee638ead8bdd55)) - -* Prefix cache functions that are non threadsafe with async_ (#724) ([`3503e76`](https://github.com/python-zeroconf/python-zeroconf/commit/3503e7614fc31bbfe2c919f13689468cc73179fd)) - -* Fix cache handling of records with different TTLs (#729) - -- There should only be one unique record in the cache at - a time as having multiple unique records will different - TTLs in the cache can result in unexpected behavior since - some functions returned all matching records and some - fetched from the right side of the list to return the - newest record. Intead we now store the records in a dict - to ensure that the newest record always replaces the same - unique record and we never have a source of truth problem - determining the TTL of a record from the cache. ([`88aa610`](https://github.com/python-zeroconf/python-zeroconf/commit/88aa610274bf79aef6c74998f2bfca8c8de0dccb)) - -* Add tests for the DNSCache class (#728) - -- There is currently a bug in the implementation where an entry - can exist in two places in the cache with different TTLs. Since - a known answer cannot be both expired and expired at the same - time, this is a bug that needs to be fixed. ([`ceb79bd`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb79bd7f7bdad434cbe5b4846492cd434ea883b)) - -* Update changelog (#727) ([`9cc834d`](https://github.com/python-zeroconf/python-zeroconf/commit/9cc834d501fa5e582adeb4468b02775288e1fa11)) - -* Rename handlers and internals to make it clear what is threadsafe (#726) - -- It was too easy to get confused about what was threadsafe and - what was not threadsafe which lead to unexpected failures. - Rename functions to make it clear what will be run in the event - loop and what is expected to be threadsafe ([`f91af79`](https://github.com/python-zeroconf/python-zeroconf/commit/f91af79c8779ac235598f5584f439c78b3bdcca2)) - -* Fix ServiceInfo with multiple A records (#725) ([`3338594`](https://github.com/python-zeroconf/python-zeroconf/commit/33385948da9123bc9348374edce7502abd898e82)) - -* Relocate cache tests to tests/test_cache.py (#722) ([`e2d4d98`](https://github.com/python-zeroconf/python-zeroconf/commit/e2d4d98db70b376c53883367b3a24c1d2510c2b5)) - -* Synchronize time for fate sharing (#718) ([`18ddb8d`](https://github.com/python-zeroconf/python-zeroconf/commit/18ddb8dbeef3edad3bb97131803dfecde4355467)) - -* Update changelog (#717) ([`1ab6859`](https://github.com/python-zeroconf/python-zeroconf/commit/1ab685960bc0e412d36baf6794fde06350998474)) - -* Cleanup typing in zero._core and document ignores (#714) ([`8183640`](https://github.com/python-zeroconf/python-zeroconf/commit/818364008e911757fca24e41a4eb36e0eef49bfa)) - -* Update README (#716) ([`0f2f4e2`](https://github.com/python-zeroconf/python-zeroconf/commit/0f2f4e207cb5007112ba09e87a332b1a46cd1577)) - -* Cleanup typing in zeroconf._logger (#715) ([`3fcdcfd`](https://github.com/python-zeroconf/python-zeroconf/commit/3fcdcfd9a3efc56a34f0334ffb8706613e07d19d)) - -* Cleanup typing in zeroconf._utils.net (#713) ([`a50b3ee`](https://github.com/python-zeroconf/python-zeroconf/commit/a50b3eeda5f275c31b36cdc1c8312f61599e72bf)) - -* Cleanup typing in zeroconf._services (#711) ([`a42512c`](https://github.com/python-zeroconf/python-zeroconf/commit/a42512ca6a6a4c15f37ab623a96deb2aa06dd053)) - -* Cleanup typing in zeroconf._services.registry (#712) ([`6b923de`](https://github.com/python-zeroconf/python-zeroconf/commit/6b923deb3682088d0fe9182377b5603d0ade1e1a)) - -* Add setter for DNSQuestion to easily make a QU question (#710) - -Closes #703 ([`aeb1b23`](https://github.com/python-zeroconf/python-zeroconf/commit/aeb1b23defa2d5956a6f19acca4ce410d6a04cc9)) - -* Synchronize created time for incoming and outgoing queries (#709) ([`c366c8c`](https://github.com/python-zeroconf/python-zeroconf/commit/c366c8cc45f565c4066fc72b481c6a960bac1cb9)) - -* Set stale unique records to expire 1s in the future instead of instant removal (#706) - -- Fixes #475 - -- https://tools.ietf.org/html/rfc6762#section-10.2 - Queriers receiving a Multicast DNS response with a TTL of zero SHOULD - NOT immediately delete the record from the cache, but instead record - a TTL of 1 and then delete the record one second later. In the case - of multiple Multicast DNS responders on the network described in - Section 6.6 above, if one of the responders shuts down and - incorrectly sends goodbye packets for its records, it gives the other - cooperating responders one second to send out their own response to - "rescue" the records before they expire and are deleted. ([`f3eeecd`](https://github.com/python-zeroconf/python-zeroconf/commit/f3eeecd84413b510b9b8e05e2d1f6ad99d0dc37d)) - -* Fix thread safety in _ServiceBrowser.update_records_complete (#708) ([`dc0c613`](https://github.com/python-zeroconf/python-zeroconf/commit/dc0c6137742edf97626c972e5c9191dfbffaecdc)) - -* Split DNSOutgoing/DNSIncoming/DNSMessage into zeroconf._protocol (#705) ([`f39bde0`](https://github.com/python-zeroconf/python-zeroconf/commit/f39bde0f6cba7a3c1b8fe8bc1a4ab4388801e486)) - -* Update changelog (#699) ([`c368e1c`](https://github.com/python-zeroconf/python-zeroconf/commit/c368e1c67c82598e920ca52b1f7a47ed6e1cf738)) - -* Efficiently bucket queries with known answers (#698) ([`7e30848`](https://github.com/python-zeroconf/python-zeroconf/commit/7e308480238fdf2cfe08474d679121e77f746fa6)) - -* Abstract DNSOutgoing ttl write into _write_ttl (#695) ([`26fa2fb`](https://github.com/python-zeroconf/python-zeroconf/commit/26fa2fb479fff87ca5af17c2c09a557c4b6176b5)) - -* Use unique names in service types tests (#697) ([`767546b`](https://github.com/python-zeroconf/python-zeroconf/commit/767546b656d7db6df0cbf2b257953498f1bc3996)) - -* Rollback data in one call instead of poping one byte at a time in DNSOutgoing (#696) ([`5cbaa3f`](https://github.com/python-zeroconf/python-zeroconf/commit/5cbaa3fc02f635e6c735e1ee5f1ca19b84c0a069)) - -* Fix off by 1 in test_tc_bit_defers_last_response_missing (#694) ([`32b7dc4`](https://github.com/python-zeroconf/python-zeroconf/commit/32b7dc40e2c3621fcacb2f389d51408ab35ac832)) - -* Suppress additionals when answer is suppressed (#690) ([`0cdba98`](https://github.com/python-zeroconf/python-zeroconf/commit/0cdba98e65dd3dce2db8aa607e97e3b67b97721a)) - -* Move setting DNS created and ttl into its own function (#692) ([`993a82e`](https://github.com/python-zeroconf/python-zeroconf/commit/993a82e414db8aadaee0e0475e178e75df417a71)) - -* Remove AA flags from handlers test (#693) - -- The flag was added by mistake when copying from other tests ([`b60f307`](https://github.com/python-zeroconf/python-zeroconf/commit/b60f307d59e342983d1baa6040c3d997f84538ab)) - -* Implement multi-packet known answer supression (#687) - -- Implements https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 - -- Fixes https://github.com/jstasiak/python-zeroconf/issues/499 ([`8a25a44`](https://github.com/python-zeroconf/python-zeroconf/commit/8a25a44ec5e4f21c6bdb282fefb8f6c2d296a70b)) - -* Remove sleeps from services types test (#688) - -- Instead of registering the services and doing the broadcast - we now put them in the registry directly. ([`4865d2b`](https://github.com/python-zeroconf/python-zeroconf/commit/4865d2ba782d0313c0f7d878f5887453086febaa)) - -* Add truncated property to DNSMessage to lookup the TC bit (#686) ([`e816053`](https://github.com/python-zeroconf/python-zeroconf/commit/e816053af4d900f57100c07c48f384165ba28b9a)) - -* Update changelog (#684) ([`6fd1bf2`](https://github.com/python-zeroconf/python-zeroconf/commit/6fd1bf2364da4fc2949a905d2e4acb7da003e84d)) - -* Add coverage to verify ServiceInfo tolerates bytes or string in the txt record (#683) ([`95ddb36`](https://github.com/python-zeroconf/python-zeroconf/commit/95ddb36de64ddf3be9e93f07a1daa8389410f73d)) - -* Fix logic reversal in apple_p2p test (#681) ([`00b972c`](https://github.com/python-zeroconf/python-zeroconf/commit/00b972c062fd0ed3f2fcc4ceaec84c43b9a613be)) - -* Check if SO_REUSEPORT exists instead of using an exception catch (#682) ([`d2b5e51`](https://github.com/python-zeroconf/python-zeroconf/commit/d2b5e51d0dcde801e171a4c1e43ef1f86abde825)) - -* Use DNSRRSet for known answer suppression (#680) - -- DNSRRSet uses hash table lookups under the hood which - is much faster than the linear searches used by - DNSRecord.suppressed_by ([`e5ea9bb`](https://github.com/python-zeroconf/python-zeroconf/commit/e5ea9bb6c0a3bce7d05241f275a205ddd9e6b615)) - -* Add DNSRRSet class for quick hashtable lookups of records (#678) - -- This class will be used to do fast checks to see - if records should be suppressed by a set of answers. ([`691c29e`](https://github.com/python-zeroconf/python-zeroconf/commit/691c29eeb049e17a12d6f0a6e3bce2c3f8c2aa02)) - -* Allow unregistering a service multiple times (#679) ([`d3d439a`](https://github.com/python-zeroconf/python-zeroconf/commit/d3d439ad5d475cff094a4ea83f19d17939527021)) - -* Remove unreachable BadTypeInNameException check in _ServiceBrowser (#677) ([`57c94bb`](https://github.com/python-zeroconf/python-zeroconf/commit/57c94bb25e056e1827f15c234d7e0bcb5702a0e3)) - -* Make calculation of times in DNSRecord lazy (#676) - -- Most of the time we only check one of the time attrs - or none at all. Wait to calculate them until they are - requested. ([`ba2a4f9`](https://github.com/python-zeroconf/python-zeroconf/commit/ba2a4f960d0f9478198968a1466a8b48c963b772)) - -* Add oversized packet to the invalid packet test (#671) ([`8535110`](https://github.com/python-zeroconf/python-zeroconf/commit/8535110dd661ce406904930994a9f86faf897597)) - -* Add test for sending unicast responses (#670) ([`d274cd3`](https://github.com/python-zeroconf/python-zeroconf/commit/d274cd3a3409997b764c49d3eae7e8ee2fba33b6)) - -* Add missing coverage for ServiceInfo address changes (#669) ([`d59fb8b`](https://github.com/python-zeroconf/python-zeroconf/commit/d59fb8be29d8602ad66d89f595b26671a528fd77)) - -* Add missing coverage for ServiceListener (#668) ([`75347b4`](https://github.com/python-zeroconf/python-zeroconf/commit/75347b4e30429e130716b666da52953700f0f8e9)) - -* Update async_browser.py example to use AsyncZeroconfServiceTypes (#665) ([`481cc42`](https://github.com/python-zeroconf/python-zeroconf/commit/481cc42d000f5b0258f1be3b6df7cb7b24428b7f)) - -* Permit the ServiceBrowser to browse overlong types (#666) - -- At least one type "tivo-videostream" exists in the wild - so we are permissive about what we will look for, and - strict about what we will announce. - -Fixes #661 ([`e76c7a5`](https://github.com/python-zeroconf/python-zeroconf/commit/e76c7a5b76485efce0929ee8417aa2e0f262c04c)) - -* Add an AsyncZeroconfServiceTypes to mirror ZeroconfServiceTypes to zeroconf.aio (#658) ([`aaf8a36`](https://github.com/python-zeroconf/python-zeroconf/commit/aaf8a368063f080be4a9c01fe671243e63bdf576)) - -* Fix flakey ZeroconfServiceTypes types test (#662) ([`72db0c1`](https://github.com/python-zeroconf/python-zeroconf/commit/72db0c10246e948c15d9a53f60a54b835ccc67bc)) - -* Add test for launching with apple_p2p=True (#660) - -- Switch to using `sys.platform` to detect Mac instead of - `platform.system()` since `platform.system()` is not intended - to be machine parsable and is only for humans. - -Closes #650 ([`0e52be0`](https://github.com/python-zeroconf/python-zeroconf/commit/0e52be059065e23ebe9e11c465adc20655b6080e)) - -* Add test for Zeroconf.get_service_info failure case (#657) ([`5752ace`](https://github.com/python-zeroconf/python-zeroconf/commit/5752ace7727bffa34cdac0455125a941014ab123)) - -* Add coverage for registering a service with a custom ttl (#656) ([`87fe529`](https://github.com/python-zeroconf/python-zeroconf/commit/87fe529a33b920532b2af688bb66182ae832a3ad)) - -* Improve aio utils tests to validate high lock contention (#655) ([`efd6bfb`](https://github.com/python-zeroconf/python-zeroconf/commit/efd6bfbe81f448da2ee68b91d49cbe1982271da3)) - -* Add test coverage for normalize_interface_choice exception paths (#654) ([`3c61d03`](https://github.com/python-zeroconf/python-zeroconf/commit/3c61d03f5954c3e45229d6c1399a63c0f7331d55)) - -* Remove all calls to the executor in AsyncZeroconf (#653) ([`7d8994b`](https://github.com/python-zeroconf/python-zeroconf/commit/7d8994bc3cb4d5978bb1ff189bb5a4b7c81b5c4c)) - -* Set __all__ in zeroconf.aio to ensure private functions do now show in the docs (#652) ([`b940f87`](https://github.com/python-zeroconf/python-zeroconf/commit/b940f878fe1f8e6b8dfe2554b781cd6034dee722)) - -* Ensure interface_index_to_ip6_address skips ipv4 adapters (#651) ([`df9f8d9`](https://github.com/python-zeroconf/python-zeroconf/commit/df9f8d9a0110cc9135b7c2f0b4cd47e985da9a7e)) - -* Add async_unregister_all_services to AsyncZeroconf (#649) ([`72e709b`](https://github.com/python-zeroconf/python-zeroconf/commit/72e709b40caed016ba981be3752c439bbbf40ec7)) - -* Use cache clear helper in aio tests (#648) ([`79e39c0`](https://github.com/python-zeroconf/python-zeroconf/commit/79e39c0e923a1f6d87353761809f34f0fe1f0800)) - -* Ensure services are removed from the registry when calling unregister_all_services (#644) - -- There was a race condition where a query could be answered for a service - in the registry while goodbye packets which could result a fresh record - being broadcast after the goodbye if a query came in at just the right - time. To avoid this, we now remove the services from the registry right - after we generate the goodbye packet ([`cf0b5b9`](https://github.com/python-zeroconf/python-zeroconf/commit/cf0b5b9e2cfa4779425401b3d205f5d913621864)) - -* Use ServiceInfo.key/ServiceInfo.server_key instead of lowering in ServiceRegistry (#647) ([`a83d390`](https://github.com/python-zeroconf/python-zeroconf/commit/a83d390bef042da51d93014c222c65af81723a20)) - -* Add missing coverage to ServiceRegistry (#646) ([`9354ab3`](https://github.com/python-zeroconf/python-zeroconf/commit/9354ab39f350e4e6451dc4965225591761ada40d)) - -* Ensure the ServiceInfo.key gets updated when the name is changed externally (#645) ([`330e36c`](https://github.com/python-zeroconf/python-zeroconf/commit/330e36ceb4202c579fe979958c63c37033ababbb)) - -* Ensure cache is cleared before starting known answer enumeration query test (#639) ([`5ebd954`](https://github.com/python-zeroconf/python-zeroconf/commit/5ebd95452b16e76c37649486b232856a80390ac3)) - -* Ensure AsyncZeroconf.async_close can be called multiple times like Zeroconf.close (#638) ([`ce6912a`](https://github.com/python-zeroconf/python-zeroconf/commit/ce6912a75392cde41d8950b224ba3d14460993ff)) - -* Update changelog (#637) ([`09c18a4`](https://github.com/python-zeroconf/python-zeroconf/commit/09c18a4173a013e67da5a1cdc7089452ba6f67ee)) - -* Ensure eventloop shutdown is threadsafe (#636) - -- Prevent ConnectionResetError from being thrown on - Windows with ProactorEventLoop on cpython 3.8+ ([`bbbbddf`](https://github.com/python-zeroconf/python-zeroconf/commit/bbbbddf40d78dbd62a84f2439763d0a59211c5b9)) - -* Update changelog (#635) ([`c854d03`](https://github.com/python-zeroconf/python-zeroconf/commit/c854d03efd31e1d002518a43221b347fa6ca5de5)) - -* Clear cache in ZeroconfServiceTypes tests to ensure responses can be mcast before the timeout (#634) - -- We prevent the same record from being multicast within 1s - because of RFC6762 sec 14. Since these test timeout after - 0.5s, the answers they are looking for many be suppressed. - Since a legitimate querier will retry again later, we need - to clear the cache to simulate that the record has not - been multicast recently ([`a0977a1`](https://github.com/python-zeroconf/python-zeroconf/commit/a0977a1ddfd7a7a1abcf74c1d90c18021aebc910)) - -* Mark DNSOutgoing write functions as protected (#633) ([`5f66caa`](https://github.com/python-zeroconf/python-zeroconf/commit/5f66caaccf44c1504988cb82c1cba78d28dde7e7)) - -* Return early in the shutdown/close process (#632) ([`4ce33e4`](https://github.com/python-zeroconf/python-zeroconf/commit/4ce33e48e2094f17d8358cf221c7e2f9a8cb3568)) - -* Update changelog (#631) ([`64f6dd7`](https://github.com/python-zeroconf/python-zeroconf/commit/64f6dd7e244c86d58b962f48a50d07625f2a2a33)) - -* Remove unreachable cache check for DNSAddresses (#629) - -- The ServiceBrowser would check to see if a DNSAddress was - already in the cache and return early to avoid sending - updates when the address already was held in the cache. - This check was not needed since there is already a check - a few lines before as `self.zc.cache.get(record)` which - effectively does the same thing. This lead to the check - never being covered in the tests and 2 cache lookups when - only one was needed. ([`2b31612`](https://github.com/python-zeroconf/python-zeroconf/commit/2b31612e3f128b1193da9e0d2640f4e93fab2e3a)) - -* Add test for wait_condition_or_timeout_times_out util (#630) ([`2065b1d`](https://github.com/python-zeroconf/python-zeroconf/commit/2065b1d7ec7cb5d41c34826c2d8887bdd8a018b6)) - -* Return early on invalid data received (#628) - -- Improve coverage for handling invalid incoming data ([`28a614e`](https://github.com/python-zeroconf/python-zeroconf/commit/28a614e0586a0ca1c5c1651b59c9a4d9c1af9a1b)) - -* Update changelog (#627) ([`215d6ba`](https://github.com/python-zeroconf/python-zeroconf/commit/215d6badb3db796b13a000b26953cb57c557e5e5)) - -* Add test to ensure ServiceBrowser sees port change as an update (#625) ([`113874a`](https://github.com/python-zeroconf/python-zeroconf/commit/113874a7b59ac9cc887b1b626ac1486781c7d56f)) - -* Fix random test failures due to monkey patching not being undone between tests (#626) - -- Switch patching to use unitest.mock.patch to ensure the patch - is reverted when the test is completed - -Fixes #505 ([`5750f7c`](https://github.com/python-zeroconf/python-zeroconf/commit/5750f7ceef0441fe1cedc0d96e7ef5ccc232d875)) - -* Ensure zeroconf can be loaded when the system disables IPv6 (#624) ([`42d53c7`](https://github.com/python-zeroconf/python-zeroconf/commit/42d53c7c04a7bbf4e60e691e2e58fe7acfec8ad9)) - -* Update changelog (#623) ([`4d05961`](https://github.com/python-zeroconf/python-zeroconf/commit/4d05961088efa8b503cad5658afade874eaeec76)) - -* Eliminate aio sender thread (#622) ([`f15e84f`](https://github.com/python-zeroconf/python-zeroconf/commit/f15e84f3ee7a644792fe98edde84dd216b3497cb)) - -* Replace select loop with asyncio loop (#504) ([`8f00cfc`](https://github.com/python-zeroconf/python-zeroconf/commit/8f00cfca0e67dde6afda399da6984ed7d8f929df)) - -* Add support for handling QU questions (#621) - -- Implements RFC 6762 sec 5.4: - Questions Requesting Unicast Responses - https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 ([`9a32db8`](https://github.com/python-zeroconf/python-zeroconf/commit/9a32db8582588e4bf812fd5670a7e61c50631a2e)) - -* Add is_recent property to DNSRecord (#620) - -- RFC 6762 defines recent as not multicast within one quarter of its TTL - https://datatracker.ietf.org/doc/html/rfc6762#section-5.4 ([`1f36754`](https://github.com/python-zeroconf/python-zeroconf/commit/1f36754f3964738e496a1da9c24380e204aaff01)) - -* Protect the network against excessive packet flooding (#619) ([`0e644ad`](https://github.com/python-zeroconf/python-zeroconf/commit/0e644ad650627024c7a3f926a86f7d9ecc66e591)) - -* Ensure matching PTR queries are returned with the ANY query (#618) - -Fixes #464 ([`b6365aa`](https://github.com/python-zeroconf/python-zeroconf/commit/b6365aa1f889a3045aa185f67354de622bd7ebd3)) - -* Suppress additionals when they are already in the answers section (#617) ([`427b728`](https://github.com/python-zeroconf/python-zeroconf/commit/427b7285269984cbb6f28c87a8bf8f864a5e15d7)) - -* Fix queries for AAAA records (#616) ([`0100c08`](https://github.com/python-zeroconf/python-zeroconf/commit/0100c08c5a3fb90d0795cf57f0bd3e11c7a94a0b)) - -* Breakout the query response handler into its own class (#615) ([`c828c75`](https://github.com/python-zeroconf/python-zeroconf/commit/c828c7555ed1fb82ff95ed578262d1553f19d903)) - -* Avoid including additionals when the answer is suppressed by known-answer supression (#614) ([`219aa3e`](https://github.com/python-zeroconf/python-zeroconf/commit/219aa3e54c944b2935c9a40cc15de19284aded3c)) - -* Add the ability for ServiceInfo.dns_addresses to filter by address type (#612) ([`aea2c8a`](https://github.com/python-zeroconf/python-zeroconf/commit/aea2c8ab24d4be19b34f407c854241e0d73d0525)) - -* Make DNSRecords hashable (#611) - -- Allows storing them in a set for de-duplication - -- Needed to be able to check for duplicates to solve https://github.com/jstasiak/python-zeroconf/issues/604 ([`b7d8678`](https://github.com/python-zeroconf/python-zeroconf/commit/b7d867878153fa600053869265260992e5462b2d)) - -* Ensure the QU bit is set for probe queries (#609) - -- The bit should be set per - https://datatracker.ietf.org/doc/html/rfc6762#section-8.1 ([`22bd147`](https://github.com/python-zeroconf/python-zeroconf/commit/22bd1475fb58c7c421c0009cd0c5c791cedb225d)) - -* Log destination when sending packets (#606) ([`850e211`](https://github.com/python-zeroconf/python-zeroconf/commit/850e2115aa79c10765dfc45a290a68193397de6c)) - -* Fix docs version to match readme (cpython 3.6+) (#602) ([`809b6df`](https://github.com/python-zeroconf/python-zeroconf/commit/809b6df376205e6ab5ce8fb5fe3a92e77662fe2d)) - -* Add ZeroconfServiceTypes to zeroconf.__all__ (#601) - -- This class is in the readme, but is not exported by - default ([`f6cd8f6`](https://github.com/python-zeroconf/python-zeroconf/commit/f6cd8f6d23459f9ed48ad06ff6702e606d620eaf)) - -* Ensure unicast responses can be sent to any source port (#598) - -- Unicast responses were only being sent if the source port - was 53, this prevented responses when testing with dig: - - dig -p 5353 @224.0.0.251 media-12.local - - The above query will now see a response ([`3556c22`](https://github.com/python-zeroconf/python-zeroconf/commit/3556c22aacc72e62c318955c084533b70311bcc9)) - -* Add id_ param to allow setting the id in the DNSOutgoing constructor (#599) ([`cb64e0d`](https://github.com/python-zeroconf/python-zeroconf/commit/cb64e0dd5d1c621f61d0d0f92ea282d287a9c242)) - -* Fix lookup of uppercase names in registry (#597) - -- If the ServiceInfo was registered with an uppercase name and the query was - for a lowercase name, it would not be found and vice-versa. ([`fe72524`](https://github.com/python-zeroconf/python-zeroconf/commit/fe72524dbaf934ca63ebce053e34f3e838743460)) - -* Add unicast property to DNSQuestion to determine if the QU bit is set (#593) ([`d2d8262`](https://github.com/python-zeroconf/python-zeroconf/commit/d2d826220bd4f287835ebb4304450cc2311d1db6)) - -* Reduce branching in DNSOutgoing.add_answer_at_time (#592) ([`35e25fd`](https://github.com/python-zeroconf/python-zeroconf/commit/35e25fd46f8d3689b723dd845eba9862a5dc8a22)) - -* Move notify listener tests to test_core (#591) ([`72032d6`](https://github.com/python-zeroconf/python-zeroconf/commit/72032d6dde2ee7388b8cb4545554519d3ffa8508)) - -* Set mypy follow_imports to skip as ignore is not a valid option (#590) ([`fd70ac1`](https://github.com/python-zeroconf/python-zeroconf/commit/fd70ac1b6bdded992f8fbbb723ca92f5395abf23)) - -* Relocate handlers tests to tests/test_handlers (#588) ([`8aa14d3`](https://github.com/python-zeroconf/python-zeroconf/commit/8aa14d33849c057c91a00e1093606081ade488e7)) - -* Relocate ServiceRegistry tests to tests/services/test_registry (#587) ([`ae6530a`](https://github.com/python-zeroconf/python-zeroconf/commit/ae6530a59e2d8ddb9a7367243c29c5e00665a82f)) - -* Disable flakey ServiceTypesQuery ipv6 win32 test (#586) ([`5cb5702`](https://github.com/python-zeroconf/python-zeroconf/commit/5cb5702fca2845e99b457e4427428497c3cd9b31)) - -* Relocate network utils tests to tests/utils/test_net (#585) ([`12f5676`](https://github.com/python-zeroconf/python-zeroconf/commit/12f567695b5364c9c5c5af0a7017d877de84274d)) - -* Relocate ServiceTypesQuery tests to tests/services/test_types (#584) ([`1fe282b`](https://github.com/python-zeroconf/python-zeroconf/commit/1fe282ba246505d172356cc8672307c7d125820d)) - -* Mark zeroconf.services as protected by renaming to zeroconf._services (#583) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`4a88066`](https://github.com/python-zeroconf/python-zeroconf/commit/4a88066d66b2f2a00ebc388c5cda478c52cb9e6c)) - -* Mark zeroconf.utils as protected by renaming to zeroconf._utils (#582) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`cc5bc36`](https://github.com/python-zeroconf/python-zeroconf/commit/cc5bc36f6f7597a0adb0d637147c2f93ca243ff4)) - -* Mark zeroconf.cache as protected by renaming to zeroconf._cache (#581) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`a16e85b`](https://github.com/python-zeroconf/python-zeroconf/commit/a16e85b20c2069aa9cee0510c618cb61d46dc19c)) - -* Mark zeroconf.exceptions as protected by renaming to zeroconf._exceptions (#580) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`241700a`](https://github.com/python-zeroconf/python-zeroconf/commit/241700a07a76a8c45afbe1bdd8325cd9f0eb0168)) - -* Fix flakey backoff test race on startup (#579) ([`dd9ada7`](https://github.com/python-zeroconf/python-zeroconf/commit/dd9ada781fdb1d5efc7c6ad194426e92550245b1)) - -* Mark zeroconf.logger as protected by renaming to zeroconf._logger (#578) ([`500066f`](https://github.com/python-zeroconf/python-zeroconf/commit/500066f940aa89737f343976ee0387eae97eac37)) - -* Mark zeroconf.handlers as protected by renaming to zeroconf._handlers (#577) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`1a2ee68`](https://github.com/python-zeroconf/python-zeroconf/commit/1a2ee6892e996c1e84ba97082e5cda609d1d55d7)) - -* Log zeroconf.asyncio deprecation warning with the logger module (#576) ([`c29a235`](https://github.com/python-zeroconf/python-zeroconf/commit/c29a235eb59ed3b4883305cf11f8bf9fa06284d3)) - -* Mark zeroconf.core as protected by renaming to zeroconf._core (#575) ([`601e8f7`](https://github.com/python-zeroconf/python-zeroconf/commit/601e8f70499638a6f24291bc0a28054fd78243c0)) - -* Mark zeroconf.dns as protected by renaming to zeroconf._dns (#574) - -- The public API should only access zeroconf and zeroconf.aio - as internals may be relocated between releases ([`0e61b15`](https://github.com/python-zeroconf/python-zeroconf/commit/0e61b1502c7fd3412f979bc4d651ee016e712de9)) - -* Update changelog (#573) ([`f10a562`](https://github.com/python-zeroconf/python-zeroconf/commit/f10a562471ad89527e6eef6ba935a27177bb1417)) - -* Relocate services tests to test_services (#570) ([`ae552e9`](https://github.com/python-zeroconf/python-zeroconf/commit/ae552e94732568fd798e1f2d0e811849edff7790)) - -* Remove DNSOutgoing.packet backwards compatibility (#569) - -- DNSOutgoing.packet only returned a partial message when the - DNSOutgoing contents exceeded _MAX_MSG_ABSOLUTE or _MAX_MSG_TYPICAL - This was a legacy function that was replaced with .packets() - which always returns a complete payload in #248 As packet() - should not be used since it will end up missing data, it has - been removed ([`1e7c074`](https://github.com/python-zeroconf/python-zeroconf/commit/1e7c07481bb0cd08fe492dab02be888c6a1dadf2)) - -* Breakout DNSCache into zeroconf.cache (#568) ([`0e0bc2a`](https://github.com/python-zeroconf/python-zeroconf/commit/0e0bc2a901ed1d64e357c63e9fb8655f3a6e9298)) - -* Removed protected imports from zeroconf namespace (#567) - -- These protected items are not intended to be part of the - public API ([`a8420cd`](https://github.com/python-zeroconf/python-zeroconf/commit/a8420cde192647486eba4da4e54df9d0fe65adba)) - -* Update setup.py for utils and services (#562) ([`7807fa0`](https://github.com/python-zeroconf/python-zeroconf/commit/7807fa0dfdab20d950c446f17b7233a8c65cbab1)) - -* Move additional dns tests to test_dns (#561) ([`ae1ce09`](https://github.com/python-zeroconf/python-zeroconf/commit/ae1ce092de7eb4797da0f56e9eb8e538c95a8cc1)) - -* Move exceptions tests to test_exceptions (#560) ([`b5d848d`](https://github.com/python-zeroconf/python-zeroconf/commit/b5d848de1ed95c55f8c262bcf0811248818da901)) - -* Move additional tests to test_core (#559) ([`eb37f08`](https://github.com/python-zeroconf/python-zeroconf/commit/eb37f089579fdc5a405dbc2f0ce5620cf9d1b011)) - -* Relocate additional dns tests to test_dns (#558) ([`18b9d0a`](https://github.com/python-zeroconf/python-zeroconf/commit/18b9d0a8bd07c0a0d2923763a5f131905c31e0df)) - -* Relocate dns tests to test_dns (#557) ([`f0d99e2`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d99e2e68791376a8517254338c708a3244f178)) - -* Relocate some of the services tests to test_services (#556) ([`715cd9a`](https://github.com/python-zeroconf/python-zeroconf/commit/715cd9a1d208139862e6d9d718114e1e472efd28)) - -* Fix invalid typing in ServiceInfo._set_text (#554) ([`3d69656`](https://github.com/python-zeroconf/python-zeroconf/commit/3d69656c4e5fbd8f90d54826877a04120d5ec951)) - -* Add missing coverage for ipv6 network utils (#555) ([`3dfda64`](https://github.com/python-zeroconf/python-zeroconf/commit/3dfda644efef83640e80876e4fe7da10e87b5990)) - -* Move ZeroconfServiceTypes to zeroconf.services.types (#553) ([`e50b62b`](https://github.com/python-zeroconf/python-zeroconf/commit/e50b62bb633916d5b84df7bcf7a804c9e3ef7fc2)) - -* Add recipe for TYPE_CHECKING to .coveragerc (#552) ([`e7fb4e5`](https://github.com/python-zeroconf/python-zeroconf/commit/e7fb4e5fb2a6b2163b143a63e2a9e8c5d1eca482)) - -* Move QueryHandler and RecordManager handlers into zeroconf.handlers (#551) ([`5b489e5`](https://github.com/python-zeroconf/python-zeroconf/commit/5b489e5b15ff89a0ffc000ccfeab2a8af346a65e)) - -* Move ServiceListener to zeroconf.services (#550) ([`ffdc988`](https://github.com/python-zeroconf/python-zeroconf/commit/ffdc9887ede1f867c155743b344efc53e0ceee42)) - -* Move the ServiceRegistry into its own module (#549) ([`4086fb4`](https://github.com/python-zeroconf/python-zeroconf/commit/4086fb4304b0653153865306e46c865c90137922)) - -* Move ServiceStateChange to zeroconf.services (#548) ([`c8a0a71`](https://github.com/python-zeroconf/python-zeroconf/commit/c8a0a71c31252bbc4a242701bc786eb419e1a8e8)) - -* Relocate core functions into zeroconf.core (#547) ([`bf0e867`](https://github.com/python-zeroconf/python-zeroconf/commit/bf0e867ead1e48e05a27fe8db69900d9dc387ea2)) - -* Breakout service classes into zeroconf.services (#544) ([`bdea21c`](https://github.com/python-zeroconf/python-zeroconf/commit/bdea21c0a61b6d9d0af3810f18dbc2fc2364c484)) - -* Move service_type_name to zeroconf.utils.name (#543) ([`b4814f5`](https://github.com/python-zeroconf/python-zeroconf/commit/b4814f5f216cd4072bafdd7dd1e68ee522f329c2)) - -* Relocate DNS classes to zeroconf.dns (#541) ([`1e3e7df`](https://github.com/python-zeroconf/python-zeroconf/commit/1e3e7df8b7fdacd90cf5d864411e5db5a915be94)) - -* Update zeroconf.aio import locations (#539) ([`8733cad`](https://github.com/python-zeroconf/python-zeroconf/commit/8733cad2eae71ebdf94ecadc6fd5439882477235)) - -* Move int2byte to zeroconf.utils.struct (#540) ([`6af42b5`](https://github.com/python-zeroconf/python-zeroconf/commit/6af42b54640ebba541302bfcf7688b3926453b15)) - -* Breakout network utils into zeroconf.utils.net (#537) ([`5af3eb5`](https://github.com/python-zeroconf/python-zeroconf/commit/5af3eb58bfdc1736e6db175c4c6f7c6f2c05b694)) - -* Move time utility functions into zeroconf.utils.time (#536) ([`7ff810a`](https://github.com/python-zeroconf/python-zeroconf/commit/7ff810a02e608fae39634be09d6c3ce0a93485b8)) - -* Avoid making DNSOutgoing aware of the Zeroconf object (#535) - -- This is not a breaking change since this code has not - yet shipped ([`2976cc2`](https://github.com/python-zeroconf/python-zeroconf/commit/2976cc2001cbba2c0afc57b9a3d301f382ddac8a)) - -* Add missing coverage for QuietLogger (#534) ([`328c1b9`](https://github.com/python-zeroconf/python-zeroconf/commit/328c1b9acdcd5cafa2df3e5b4b833b908d299500)) - -* Move logger into zeroconf.logger (#533) ([`e2e4eed`](https://github.com/python-zeroconf/python-zeroconf/commit/e2e4eede9117827f47c66a4852dd2d236b46ecda)) - -* Move exceptions into zeroconf.exceptions (#532) ([`5100506`](https://github.com/python-zeroconf/python-zeroconf/commit/5100506f896b649e6a6a8e2efb592362cd2644d3)) - -* Move constants into const.py (#531) ([`89d4755`](https://github.com/python-zeroconf/python-zeroconf/commit/89d4755106a6c3bced395b0a26eb3082c1268fa1)) - -* Move asyncio utils into zeroconf.utils.aio (#530) ([`2d8a27a`](https://github.com/python-zeroconf/python-zeroconf/commit/2d8a27a54aee298af74121986b4ea76f1f50b421)) - -* Relocate tests to tests directory (#527) ([`3f1a5a7`](https://github.com/python-zeroconf/python-zeroconf/commit/3f1a5a7b7a929d5f699812a809347b0c2f799fbf)) - -* Fix flakey test_update_record test (round 2) (#528) ([`14542bd`](https://github.com/python-zeroconf/python-zeroconf/commit/14542bd2bd327fd9b3d93cfb48a3bf09d6c89e15)) - -* Move ipversion auto detection code into its own function (#524) ([`16d40b5`](https://github.com/python-zeroconf/python-zeroconf/commit/16d40b50ccab6a8d53fe4aeb7b0006f7fd67ef53)) - -* Fix flakey test_update_record (#525) - -- Ensure enough time has past that the first record update - was processed before sending the second one ([`f49342c`](https://github.com/python-zeroconf/python-zeroconf/commit/f49342cdaff2d012ad23635b49ae746ad71333df)) - -* Update python compatibility as PyPy3 7.2 is required (#523) - -- When the version requirement changed to cpython 3.6, PyPy - was not bumped as well ([`b37d115`](https://github.com/python-zeroconf/python-zeroconf/commit/b37d115a233b61e2989d1439f65cdd911b86f407)) - -* Make the cache cleanup interval a constant (#522) ([`7ce29a2`](https://github.com/python-zeroconf/python-zeroconf/commit/7ce29a2f736af13886aa66dc1c49e15768e6fdcc)) - -* Add test helper to inject DNSIncoming (#518) ([`ef7aa25`](https://github.com/python-zeroconf/python-zeroconf/commit/ef7aa250e140d70b8c62abf4d13dcaa36f128c63)) - -* Remove broad exception catch from RecordManager.remove_listener (#517) ([`e125239`](https://github.com/python-zeroconf/python-zeroconf/commit/e12523933819087d2a087b8388e79b24af058a58)) - -* Small cleanups to RecordManager.add_listener (#516) ([`f80a051`](https://github.com/python-zeroconf/python-zeroconf/commit/f80a0515cf73b1e304d0615f8cee91ae38ac1ae8)) - -* Move RecordUpdateListener management into RecordManager (#514) ([`6cc3adb`](https://github.com/python-zeroconf/python-zeroconf/commit/6cc3adb020115ef9626caf61bb5f7550a2da8b4c)) - -* Update changelog (#513) ([`3d6c682`](https://github.com/python-zeroconf/python-zeroconf/commit/3d6c68278713a2ca66e27938feedcc451a078369)) - -* Break out record updating into RecordManager (#512) ([`9a766a2`](https://github.com/python-zeroconf/python-zeroconf/commit/9a766a2a96abd0f105056839b5c30f2ede31ea2e)) - -* Remove uneeded wait in the Engine thread (#511) - -- It is not longer necessary to wait since the socketpair - was added in #243 which will cause the select to unblock - when a new socket is added or removed. ([`70b455b`](https://github.com/python-zeroconf/python-zeroconf/commit/70b455ba53ce43e9280c02612e8a89665abd57f6)) - -* Stop monkey patching send in the TTL test (#510) ([`954ca3f`](https://github.com/python-zeroconf/python-zeroconf/commit/954ca3fb498bdc7cd5a6a168c40ad5b6b2476e71)) - -* Stop monkey patching send in the PTR optimization test (#509) ([`db866f7`](https://github.com/python-zeroconf/python-zeroconf/commit/db866f7d032ed031e6aa5e14fba24b3dafeafa8d)) - -* Extract code for handling queries into QueryHandler (#507) ([`1cfcc56`](https://github.com/python-zeroconf/python-zeroconf/commit/1cfcc5636a845924eb683ad4acf4d9a36ef85fb7)) - -* Update changelog for zeroconf.asyncio -> zeroconf.aio (#506) ([`26b7005`](https://github.com/python-zeroconf/python-zeroconf/commit/26b70050ffe7dee4fb34428f285be377d1d8f210)) - -* Rename zeroconf.asyncio to zeroconf.aio (#503) - -- The asyncio name could shadow system asyncio in some cases. If - zeroconf is in sys.path, this would result in loading zeroconf.asyncio - when system asyncio was intended. - -- An `zeroconf.asyncio` shim module has been added that imports `zeroconf.aio` - that was available in 0.31 to provide backwards compatibility in 0.32. - This module will be removed in 0.33 to fix the underlying problem - detailed in #502 ([`bfca3b4`](https://github.com/python-zeroconf/python-zeroconf/commit/bfca3b46fd9a395f387bd90b68c523a3ca84bde4)) - -* Update changelog, move breaking changes to the top of the list (#501) ([`9b480bc`](https://github.com/python-zeroconf/python-zeroconf/commit/9b480bc1abb2c2702f60796f2edae76ce03ca5d4)) - -* Set the TC bit for query packets where the known answers span multiple packets (#494) ([`f04a2eb`](https://github.com/python-zeroconf/python-zeroconf/commit/f04a2eb43745eba7c43c9c56179ed1fceb992bd8)) - -* Ensure packets are properly seperated when exceeding maximum size (#498) - -- Ensure that questions that exceed the max packet size are - moved to the next packet. This fixes DNSQuestions being - sent in multiple packets in violation of: - https://datatracker.ietf.org/doc/html/rfc6762#section-7.2 - -- Ensure only one resource record is sent when a record - exceeds _MAX_MSG_TYPICAL - https://datatracker.ietf.org/doc/html/rfc6762#section-17 ([`e2908c6`](https://github.com/python-zeroconf/python-zeroconf/commit/e2908c6c89802ba7a0ea51ac351da40bce3f1cb6)) - -* Make a base class for DNSIncoming and DNSOutgoing (#497) ([`38e4b42`](https://github.com/python-zeroconf/python-zeroconf/commit/38e4b42b847e700db52bc51973210efc485d8c23)) - -* Update internal version check to match docs (3.6+) (#491) ([`20f8b3d`](https://github.com/python-zeroconf/python-zeroconf/commit/20f8b3d6fb8d117b0c3c794c4075a00e117e3f31)) - -* Remove unused __ne__ code from Python 2 era (#492) ([`f0c02a0`](https://github.com/python-zeroconf/python-zeroconf/commit/f0c02a02c1a2d7c914c62479bad4957b06471661)) - -* Lint before testing in the CI (#488) ([`69880ae`](https://github.com/python-zeroconf/python-zeroconf/commit/69880ae6ca4d4f0a7d476b0271b89adea92b9389)) - -* Add AsyncServiceBrowser example (#487) ([`ef9334f`](https://github.com/python-zeroconf/python-zeroconf/commit/ef9334f1279d029752186bc6f4a1ebff6229bf5b)) - -* Move threading daemon property into ServiceBrowser class (#486) ([`275765a`](https://github.com/python-zeroconf/python-zeroconf/commit/275765a4fd3b477b79163c04f6411709e14506b9)) - -* Enable test_integration_with_listener_class test on PyPy (#485) ([`49db96d`](https://github.com/python-zeroconf/python-zeroconf/commit/49db96dae466a602662f4fde1537f62a8c8d3110)) - -* RecordUpdateListener now uses update_records instead of update_record (#419) ([`0a69aa0`](https://github.com/python-zeroconf/python-zeroconf/commit/0a69aa0d37e13cb2c65ceb5cc3ab0fd7e9d34b22)) - -* AsyncServiceBrowser must recheck for handlers to call when holding condition (#483) - -- There was a short race condition window where the AsyncServiceBrowser - could add to _handlers_to_call in the Engine thread, have the - condition notify_all called, but since the AsyncServiceBrowser was - not yet holding the condition it would not know to stop waiting - and process the handlers to call. ([`9606936`](https://github.com/python-zeroconf/python-zeroconf/commit/960693628006e23fd13fcaefef915ca0c84401b9)) - -* Relocate ServiceBrowser wait time calculation to seperate function (#484) - -- Eliminate the need to duplicate code between the ServiceBrowser - and AsyncServiceBrowser to calculate the wait time. ([`9c06ce1`](https://github.com/python-zeroconf/python-zeroconf/commit/9c06ce15db31ebffe3a556896393d48cb786b5d9)) - -* Switch from using an asyncio.Event to asyncio.Condition for waiting (#482) ([`393910b`](https://github.com/python-zeroconf/python-zeroconf/commit/393910b67ac667a660ee9351cc8f94310937f654)) - -* ServiceBrowser must recheck for handlers to call when holding condition (#477) ([`8da00ca`](https://github.com/python-zeroconf/python-zeroconf/commit/8da00caf31e007153e10a8038a0a484edea03c2f)) - -* Provide a helper function to convert milliseconds to seconds (#481) ([`849e9bc`](https://github.com/python-zeroconf/python-zeroconf/commit/849e9bc792c6cc77b879b4761195192bea1720ce)) - -* Fix AsyncServiceInfo.async_request not waiting long enough (#480) - -- The call to async_wait should have been in milliseconds, but - the time was being passed in seconds which resulted in waiting - 1000x shorter ([`b0c0cdc`](https://github.com/python-zeroconf/python-zeroconf/commit/b0c0cdc6779dc095cf03ebd92652af69800b7bca)) - -* Add support for updating multiple records at once to ServiceInfo (#474) - -- Adds `update_records` method to `ServiceInfo` ([`ed53f62`](https://github.com/python-zeroconf/python-zeroconf/commit/ed53f6283265eb8fb506d4af8fb31bd4eaa7292b)) - -* Narrow exception catch in DNSAddress.__repr__ to only expected exceptions (#473) ([`b853413`](https://github.com/python-zeroconf/python-zeroconf/commit/b8534130ec31a6be191fcc60615ab2fd02fd8d7a)) - -* Add test coverage to ensure ServiceInfo rejects expired records (#468) ([`d0f5a60`](https://github.com/python-zeroconf/python-zeroconf/commit/d0f5a60275ccf810407055c63ca9080fa6654443)) - -* Reduce branching in service_type_name (#472) ([`00af5ad`](https://github.com/python-zeroconf/python-zeroconf/commit/00af5adc4be76afd23135d37653119f45c57a531)) - -* Fix flakey test_update_record (#470) ([`1eaeef2`](https://github.com/python-zeroconf/python-zeroconf/commit/1eaeef2d6f07efba67e91699529f8361226233ce)) - -* Reduce branching in Zeroconf.handle_response (#467) - -- Adds `add_records` and `remove_records` to `DNSCache` to - permit multiple records to be added or removed in one call - -- This change is not enough to remove the too-many-branches - pylint disable, however when combined with #419 it should - no longer be needed ([`8a9ae29`](https://github.com/python-zeroconf/python-zeroconf/commit/8a9ae29b6f6643f3625938ac44df66dcc556de46)) - -* Ensure PTR questions asked in uppercase are answered (#465) ([`7a50402`](https://github.com/python-zeroconf/python-zeroconf/commit/7a5040247cbaad6bed3fc1204820dfc31ed9b0ae)) - -* Clear cache between ServiceTypesQuery tests (#466) - -- Ensures the test relies on the ZeroconfServiceTypes.find making - the correct calls instead of the cache from the previous call ([`c3365e1`](https://github.com/python-zeroconf/python-zeroconf/commit/c3365e1fd060cebc63cc42443260bd785077c246)) - -* Break apart Zeroconf.handle_query to reduce branching (#462) ([`c1ed987`](https://github.com/python-zeroconf/python-zeroconf/commit/c1ed987ede34b0049e6466e673b1629d7cd0cd6a)) - -* Support for context managers in Zeroconf and AsyncZeroconf (#284) - -Co-authored-by: J. Nick Koston ([`4c4b529`](https://github.com/python-zeroconf/python-zeroconf/commit/4c4b529c841f015108a7489bd8f3b92a5e57e827)) - -* Use constant for service type enumeration (#461) ([`558cec3`](https://github.com/python-zeroconf/python-zeroconf/commit/558cec3687ac7e7f494ab7aa4ce574c1e784b81f)) - -* Reduce branching in Zeroconf.handle_response (#459) ([`ceb0def`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb0def1b43f2e55bb17e33d13d4efdaa055221c)) - -* Reduce branching in Zeroconf.handle_query (#460) ([`5e24da0`](https://github.com/python-zeroconf/python-zeroconf/commit/5e24da08bc463bf79b27eb3768ec01755804f403)) - -* Enable pylint (#438) ([`6fafdee`](https://github.com/python-zeroconf/python-zeroconf/commit/6fafdee241571d68937e29ee0a2b1bd5ef0038d9)) - -* Trap OSError directly in Zeroconf.send instead of checking isinstance (#453) - -- Fixes: Instance of 'Exception' has no 'errno' member (no-member) ([`9510808`](https://github.com/python-zeroconf/python-zeroconf/commit/9510808cfd334b0b2f6381da8214225c4cfbf6a0)) - -* Disable protected-access on the ServiceBrowser usage of _handlers_lock (#452) - -- This will be fixed in https://github.com/jstasiak/python-zeroconf/pull/419 ([`69c4cf6`](https://github.com/python-zeroconf/python-zeroconf/commit/69c4cf69bbc34474e70eac3ad0fe905be7ab4eb4)) - -* Mark functions with too many branches in need of refactoring (#455) ([`5fce89d`](https://github.com/python-zeroconf/python-zeroconf/commit/5fce89db2707b163231aec216e4c4fc310527e4c)) - -* Disable pylint no-self-use check on abstract methods (#451) ([`7544cdf`](https://github.com/python-zeroconf/python-zeroconf/commit/7544cdf956c4eeb4b688729432ba87278f606b7c)) - -* Use unique name in test_async_service_browser test (#450) ([`f26a92b`](https://github.com/python-zeroconf/python-zeroconf/commit/f26a92bc2abe61f5a2b5acd76991f81d07452201)) - -* Disable no-member check for WSAEINVAL false positive (#454) ([`ef0cf8e`](https://github.com/python-zeroconf/python-zeroconf/commit/ef0cf8e393a8ffdccb3cd2094a8764f707f518c1)) - -* Mark methods used by asyncio without self use (#447) ([`7e03f83`](https://github.com/python-zeroconf/python-zeroconf/commit/7e03f836dd7a4ee938bfff21cd150e863f608b5e)) - -* Extract _get_queue from _AsyncSender (#444) ([`18851ed`](https://github.com/python-zeroconf/python-zeroconf/commit/18851ed4c0f605996798472e1a68dded16d41ff6)) - -* Add missing update_service method to ZeroconfServiceTypes (#449) ([`ffc6cbb`](https://github.com/python-zeroconf/python-zeroconf/commit/ffc6cbb94d7401a70ebd6f747ed6c5e56e528bb0)) - -* Fix redefining argument with the local name 'record' in ServiceInfo.update_record (#448) ([`929ba12`](https://github.com/python-zeroconf/python-zeroconf/commit/929ba12d046496782491d96160e6cb8d0d04cfe5)) - -* Remove unneeded-not in new_socket (#445) ([`424c002`](https://github.com/python-zeroconf/python-zeroconf/commit/424c00257083f1d091a52ff0c966b306eea70efb)) - -* Disable broad except checks in places we still catch broad exceptions (#443) ([`6002c9c`](https://github.com/python-zeroconf/python-zeroconf/commit/6002c9c88a9a49814f86070c07925f798a61461a)) - -* Merge _TYPE_CNAME and _TYPE_PTR comparison in DNSIncoming.read_others (#442) ([`41be4f4`](https://github.com/python-zeroconf/python-zeroconf/commit/41be4f4db0501adb9fbaa6b353fbcb36a45e6e21)) - -* Convert unnecessary use of a comprehension to a list (#441) ([`a70370a`](https://github.com/python-zeroconf/python-zeroconf/commit/a70370a0f653df911cc6f641522cec0fcc8471a3)) - -* Remove unused now argument from ServiceInfo._process_record (#440) ([`594da70`](https://github.com/python-zeroconf/python-zeroconf/commit/594da709273c2e0a53fee2f9ad7fcec607ad0868)) - -* Disable pylint too-many-branches for functions that need refactoring (#439) ([`4bcb698`](https://github.com/python-zeroconf/python-zeroconf/commit/4bcb698bda0ec7266d5e454b5e81a07eb64be32a)) - -* Cleanup unused variables (#437) ([`8412eb7`](https://github.com/python-zeroconf/python-zeroconf/commit/8412eb791dd5ad1c287c1d7cc24c5db75a5291b7)) - -* Cleanup unnecessary else after returns (#436) ([`1d3f986`](https://github.com/python-zeroconf/python-zeroconf/commit/1d3f986e00e18682c209cecbdea2481f4ca987b5)) - -* Update changelog for latest changes (#435) ([`6737e13`](https://github.com/python-zeroconf/python-zeroconf/commit/6737e13d8e6227b96d5cc0e776c62889b7dc4fd3)) - -* Add zeroconf.asyncio to the docs (#434) ([`5460cae`](https://github.com/python-zeroconf/python-zeroconf/commit/5460caef83b5cdb9c5d637741ed95dea6b328f08)) - -* Fix warning when generating sphinx docs (#432) - -- `docstring of zeroconf.ServiceInfo:5: WARNING: Unknown target name: "type".` ([`e5a0c9a`](https://github.com/python-zeroconf/python-zeroconf/commit/e5a0c9a45df93a668f3611ddf5c41a1800cb4556)) - -* Implement an AsyncServiceBrowser to compliment the sync ServiceBrowser (#429) ([`415a7b7`](https://github.com/python-zeroconf/python-zeroconf/commit/415a7b762030e9d236bef71f39156686a0b277f9)) - -* Seperate non-thread specific code from ServiceBrowser into _ServiceBrowserBase (#428) ([`e7b2bb5`](https://github.com/python-zeroconf/python-zeroconf/commit/e7b2bb5e351f04f4f1e14ef5a20ed2111f8097c4)) - -* Remove is_type_unique as it is unused (#426) ([`e68e337`](https://github.com/python-zeroconf/python-zeroconf/commit/e68e337cd482e06a422b2d2e2e6ae12ce1673ce5)) - -* Avoid checking the registry when answering requests for _services._dns-sd._udp.local. (#425) - -- _services._dns-sd._udp.local. is a special case and should never - be in the registry ([`47e266e`](https://github.com/python-zeroconf/python-zeroconf/commit/47e266eb66be36b355f1738cd4d2f7369712b7b3)) - -* Remove unused argument from ServiceInfo.dns_addresses (#423) - -- This should always return all addresses since its _CLASS_UNIQUE ([`fc97e5c`](https://github.com/python-zeroconf/python-zeroconf/commit/fc97e5c3ad35da789373a1898c00efe0f13a3b5f)) - -* A methods to generate DNSRecords from ServiceInfo (#422) ([`41de419`](https://github.com/python-zeroconf/python-zeroconf/commit/41de419453c0679c5a04ec248339783afbeb0e4f)) - -* Seperate logic for consuming records in ServiceInfo (#421) ([`8bca030`](https://github.com/python-zeroconf/python-zeroconf/commit/8bca0305deae0db8ced7e213be3aaee975985c56)) - -* Seperate query generation for ServiceBrowser (#420) ([`58cfcf0`](https://github.com/python-zeroconf/python-zeroconf/commit/58cfcf0c902b5e27937f118bf4f7a855db635301)) - -* Add async_request example with browse (#415) ([`7f08826`](https://github.com/python-zeroconf/python-zeroconf/commit/7f08826c03b7997758ff0236834bf6f1a091c558)) - -* Add async_register_service/async_unregister_service example (#414) ([`71cfbcb`](https://github.com/python-zeroconf/python-zeroconf/commit/71cfbcb85bdd5948f1b96a871b10e9e35ab76c3b)) - -* Update changelog for 0.32.0 (#411) ([`bb83edf`](https://github.com/python-zeroconf/python-zeroconf/commit/bb83edfbca339fb6ec20b821d79b171220f5e675)) - -* Add async_get_service_info to AsyncZeroconf and async_request to AsyncServiceInfo (#408) ([`0fa049c`](https://github.com/python-zeroconf/python-zeroconf/commit/0fa049c2e0f5e9f18830583a8df2736630c891e2)) - -* Add async_wait function to AsyncZeroconf (#410) ([`53306e1`](https://github.com/python-zeroconf/python-zeroconf/commit/53306e1b99d9133590d47081994ee77cef468828)) - -* Add support for registering notify listeners (#409) - -- Notify listeners will be used by AsyncZeroconf to set - asyncio.Event objects when new data is received - -- Registering a notify listener: - notify_listener = YourNotifyListener() - Use zeroconf.add_notify_listener(notify_listener) - -- Unregistering a notify listener: - Use zeroconf.remove_notify_listener(notify_listener) - -- Notify listeners must inherit from the NotifyListener - class ([`745087b`](https://github.com/python-zeroconf/python-zeroconf/commit/745087b234dd5ff65b4b041a7221d58030a69cdd)) - -* Remove unreachable code in ServiceInfo.get_name (#407) ([`ff31f38`](https://github.com/python-zeroconf/python-zeroconf/commit/ff31f386273fbe9fd0b466bbe5f724c815745215)) - -* Allow passing in a sync Zeroconf instance to AsyncZeroconf (#406) - -- Uses the same pattern as ZeroconfServiceTypes.find ([`2da6198`](https://github.com/python-zeroconf/python-zeroconf/commit/2da6198b2e60a598580637e80b3bd579c1f845a5)) - -* Use a dedicated thread for sending outgoing packets with asyncio (#404) - -- Sends now go into a queue and are processed by the thread FIFO - -- Avoids overwhelming the executor when registering multiple - services in parallel ([`1e7b46c`](https://github.com/python-zeroconf/python-zeroconf/commit/1e7b46c36f6e0735b44d3edd9740891a2dc0c761)) - -* Seperate query generation for Zeroconf (#403) - -- Will be used to send the query in asyncio ([`e753078`](https://github.com/python-zeroconf/python-zeroconf/commit/e753078f0345fa28ffceb8de69542c8549d2994c)) - -* Seperate query generation in ServiceInfo (#401) ([`bddf69c`](https://github.com/python-zeroconf/python-zeroconf/commit/bddf69c0839eda966376987a8c4a1fbe3d865529)) - -* Remove unreachable code in ServiceInfo (part 2) (#402) - -- self.server is never None ([`4ae27be`](https://github.com/python-zeroconf/python-zeroconf/commit/4ae27beba29c6e9ac1782f40eadda584b4722af7)) - -* Remove unreachable code in ServiceInfo (#400) - -- self.server is never None ([`dd63835`](https://github.com/python-zeroconf/python-zeroconf/commit/dd6383589b161e828def0ed029519a645e434512)) - -* Update changelog with latest changes (#394) ([`a6010a9`](https://github.com/python-zeroconf/python-zeroconf/commit/a6010a94b626a9a1585cc47417c08516020729d7)) - -* Add test coverage for multiple AAAA records (#391) ([`acf174d`](https://github.com/python-zeroconf/python-zeroconf/commit/acf174db93ee60f1a80d501eb691d9cb434a90b7)) - -* Enable IPv6 in the CI (#393) ([`ec2fafd`](https://github.com/python-zeroconf/python-zeroconf/commit/ec2fafd904cd2d341a3815fcf6d34508dcddda5a)) - -* Fix IPv6 setup under MacOS when binding to "" (#392) - -- Setting IP_MULTICAST_TTL and IP_MULTICAST_LOOP does not work under - MacOS when the bind address is "" ([`d67d5f4`](https://github.com/python-zeroconf/python-zeroconf/commit/d67d5f41effff4c01735de0ae64ed25a5dbe7567)) - -* Update changelog for 0.32.0 (Unreleased) (#390) ([`33a3a6a`](https://github.com/python-zeroconf/python-zeroconf/commit/33a3a6ae42ef8c4ea0f606ad2a02df3f6bc13752)) - -* Ensure ZeroconfServiceTypes.find always cancels the ServiceBrowser (#389) ([`8f4d2e8`](https://github.com/python-zeroconf/python-zeroconf/commit/8f4d2e858a5efadeb33120322c1169f3ce7d6e0c)) - -* Fix flapping test: test_update_record (#388) ([`ba8d8e3`](https://github.com/python-zeroconf/python-zeroconf/commit/ba8d8e3e658c71e0d603db3f4c5bdfe8e508710a)) - -* Simplify DNSPointer processing in ServiceBrowser (#386) ([`709bd9a`](https://github.com/python-zeroconf/python-zeroconf/commit/709bd9abae63cf566220693501cd37cf74391ccf)) - -* Ensure listeners do not miss initial packets if Engine starts too quickly (#387) ([`62a02d7`](https://github.com/python-zeroconf/python-zeroconf/commit/62a02d774fd874340fa3043bd3bf260a77ffe3d8)) - -* Update changelog with latest commits (#384) ([`69d9357`](https://github.com/python-zeroconf/python-zeroconf/commit/69d9357b3dae7a99d302bf4ad71d4ed45cbe3e42)) - -* Ensure the cache is checked for name conflict after final service query with asyncio (#382) - -- The check was not happening after the last query ([`5057f97`](https://github.com/python-zeroconf/python-zeroconf/commit/5057f97b9b724c041d2bee65972fe3637bf04f0b)) - -* Fix multiple unclosed instances in tests (#383) ([`69a79b9`](https://github.com/python-zeroconf/python-zeroconf/commit/69a79b9fd48a24d311520e228c78b2aae52d1dd5)) - -* Update changelog with latest merges (#381) ([`2b502bc`](https://github.com/python-zeroconf/python-zeroconf/commit/2b502bc2e21efa2f840c42ed79f850b276a8c103)) - -* Complete ServiceInfo request as soon as all questions are answered (#380) - -- Closes a small race condition where there were no questions - to ask because the cache was populated in between checks ([`3afa5c1`](https://github.com/python-zeroconf/python-zeroconf/commit/3afa5c13f2be956505428c5b01f6ce507845131a)) - -* Coalesce browser questions scheduled at the same time (#379) - -- With multiple types, the ServiceBrowser questions can be - chatty because it would generate a question packet for - each type. If multiple types are due to be requested, - try to combine the questions into a single outgoing - packet(s) ([`60c1895`](https://github.com/python-zeroconf/python-zeroconf/commit/60c1895e67a6147ab8c6ba7d21d4fe5adec3e590)) - -* Bump version to 0.31.0 to match released version (#378) ([`23442d2`](https://github.com/python-zeroconf/python-zeroconf/commit/23442d2e5a0336a64646cb70f2ce389746744ce0)) - -* Update changelog with latest merges (#377) ([`5535ea8`](https://github.com/python-zeroconf/python-zeroconf/commit/5535ea8c365557681721fdafdcabfc342c75daf5)) - -* Ensure duplicate packets do not trigger duplicate updates (#376) - -- If TXT or SRV records update was already processed and then - recieved again, it was possible for a second update to be - called back in the ServiceBrowser ([`b158b1c`](https://github.com/python-zeroconf/python-zeroconf/commit/b158b1cff31620d5cf27969e475d788332f4b38c)) - -* Only trigger a ServiceStateChange.Updated event when an ip address is added (#375) ([`5133742`](https://github.com/python-zeroconf/python-zeroconf/commit/51337425c9be08d59d496c6783d07d5e4e2382d4)) - -* Fix RFC6762 Section 10.2 paragraph 2 compliance (#374) ([`03f2eb6`](https://github.com/python-zeroconf/python-zeroconf/commit/03f2eb688859a78807305771d04b216e20e72064)) - -* Reduce length of ServiceBrowser thread name with many types (#373) - -- Before - -"zeroconf-ServiceBrowser__ssh._tcp.local.-_enphase-envoy._tcp.local.-_hap._udp.local." -"-_nut._tcp.local.-_Volumio._tcp.local.-_kizbox._tcp.local.-_home-assistant._tcp.local." -"-_viziocast._tcp.local.-_dvl-deviceapi._tcp.local.-_ipp._tcp.local.-_touch-able._tcp.local." -"-_hap._tcp.local.-_system-bridge._udp.local.-_dkapi._tcp.local.-_airplay._tcp.local." -"-_elg._tcp.local.-_miio._udp.local.-_wled._tcp.local.-_esphomelib._tcp.local." -"-_ipps._tcp.local.-_fbx-api._tcp.local.-_xbmc-jsonrpc-h._tcp.local.-_powerview._tcp.local." -"-_spotify-connect._tcp.local.-_leap._tcp.local.-_api._udp.local.-_plugwise._tcp.local." -"-_googlecast._tcp.local.-_printer._tcp.local.-_axis-video._tcp.local.-_http._tcp.local." -"-_mediaremotetv._tcp.local.-_homekit._tcp.local.-_bond._tcp.local.-_daap._tcp.local._243" - -- After - -"zeroconf-ServiceBrowser-_miio._udp-_mediaremotetv._tcp-_dvl-deviceapi._tcp-_ipp._tcp" -"-_dkapi._tcp-_hap._udp-_xbmc-jsonrpc-h._tcp-_hap._tcp-_googlecast._tcp-_airplay._tcp" -"-_viziocast._tcp-_api._udp-_kizbox._tcp-_spotify-connect._tcp-_home-assistant._tcp" -"-_bond._tcp-_powerview._tcp-_daap._tcp-_http._tcp-_leap._tcp-_elg._tcp-_homekit._tcp" -"-_ipps._tcp-_plugwise._tcp-_ssh._tcp-_esphomelib._tcp-_Volumio._tcp-_fbx-api._tcp" -"-_wled._tcp-_touch-able._tcp-_enphase-envoy._tcp-_axis-video._tcp-_printer._tcp" -"-_system-bridge._udp-_nut._tcp-244" ([`5d4aa28`](https://github.com/python-zeroconf/python-zeroconf/commit/5d4aa2800d1196274cfdd0bf3e631f49ab5b78bd)) - -* Update changelog for 0.32.0 (unreleased) (#372) ([`82fb26f`](https://github.com/python-zeroconf/python-zeroconf/commit/82fb26f14518a8e59f886b8d7b0708a68725bf48)) - -* Remove Callable quoting (#371) - -- The current minimum supported cpython is 3.6+ which does not need - the quoting ([`7f45bef`](https://github.com/python-zeroconf/python-zeroconf/commit/7f45bef8db444b0436c5f80b4f4b31b2f1d7ec2f)) - -* Abstract check to see if a record matches a type the ServiceBrowser wants (#369) ([`4819ef8`](https://github.com/python-zeroconf/python-zeroconf/commit/4819ef8c97ddbbadcd6e7cf1b5fee36f573bde45)) - -* Reduce complexity of ServiceBrowser enqueue_callback (#368) - -- The handler key was by name, however ServiceBrowser can have multiple - types which meant the check to see if a state change was an add - remove, or update was overly complex. Reduce the complexity by - making the key (name, type_) ([`4657a77`](https://github.com/python-zeroconf/python-zeroconf/commit/4657a773690a34c897c80894a10ac33b6edadf8b)) - -* Fix empty answers being added in ServiceInfo.request (#367) ([`5a4c1e4`](https://github.com/python-zeroconf/python-zeroconf/commit/5a4c1e46510956276de117d86bee9d2ccb602802)) - -* Ensure ServiceInfo populates all AAAA records (#366) - -- Use get_all_by_details to ensure all records are loaded - into addresses. - -- Only load A/AAAA records from cache once in load_from_cache - if there is a SRV record present - -- Move duplicate code that checked if the ServiceInfo was complete - into its own function ([`bae3a9b`](https://github.com/python-zeroconf/python-zeroconf/commit/bae3a9b97672581e77255c4937b815173c8547b4)) - -* Remove black python 3.5 exception block (#365) ([`6d29e6c`](https://github.com/python-zeroconf/python-zeroconf/commit/6d29e6c93bdcf6cf31fcfa133258257704945dfc)) - -* Small cleanup of ServiceInfo.update_record (#364) - -- Return as record is not viable (None or expired) - -- Switch checks to isinstance since its needed by mypy anyways - -- Prepares for supporting multiple AAAA records (via https://github.com/jstasiak/python-zeroconf/pull/361) ([`1b8b291`](https://github.com/python-zeroconf/python-zeroconf/commit/1b8b2917e7e70e3996e9a96204dd5df3dfb39072)) - -* Add new cache function get_all_by_details (#363) - -- When working with IPv6, multiple AAAA records can exist - for a given host. get_by_details would only return the - latest record in the cache. - -- Fix a case where the cache list can change during - iteration ([`d8c3240`](https://github.com/python-zeroconf/python-zeroconf/commit/d8c32401ada4f430cd75617324b6d8ecd1dbe1f2)) - -* Small cleanups to asyncio tests (#362) ([`7e960b7`](https://github.com/python-zeroconf/python-zeroconf/commit/7e960b78cac8008beca9c5451c6d465e2674a050)) - -* Improve test coverage for name conflicts (#357) ([`c0674e9`](https://github.com/python-zeroconf/python-zeroconf/commit/c0674e97aee4f61212389337340fc8ff4472eb25)) - -* Return task objects created by AsyncZeroconf (#360) ([`8c1c394`](https://github.com/python-zeroconf/python-zeroconf/commit/8c1c394e9b4aa01e08a2c3e240396b533792be55)) - -* Separate cache loading from I/O in ServiceInfo (#356) - -Provides a load_from_cache method on ServiceInfo that does no I/O - -- When a ServiceBrowser is running for a type there is no need - to make queries on the network since the entries will already - be in the cache. When discovering many devices making queries - that will almost certainly fail for offline devices delays the - startup of online devices. - -- The DNSEntry and ServiceInfo classes were matching on the name - instead of the key (lowercase name). These classes now treat dns - names the same reguardless of case. - - https://datatracker.ietf.org/doc/html/rfc6762#section-16 - > The simple rules for case-insensitivity in Unicast DNS [RFC1034] - > [RFC1035] also apply in Multicast DNS; that is to say, in name - > comparisons, the lowercase letters "a" to "z" (0x61 to 0x7A) match - > their uppercase equivalents "A" to "Z" (0x41 to 0x5A). Hence, if a - > querier issues a query for an address record with the name - > "myprinter.local.", then a responder having an address record with - > the name "MyPrinter.local." should issue a response. ([`87ba2a3`](https://github.com/python-zeroconf/python-zeroconf/commit/87ba2a3960576cfcf4207ea74a711b2c0cc584a7)) - -* Provide an asyncio class for service registration (#347) - -* Provide an AIO wrapper for service registration - -- When using zeroconf with async code, service registration can cause the - executor to overload when registering multiple services since each one - will have to wait a bit between sending the broadcast. An aio subclass - is now available as aio.AsyncZeroconf that implements the following - - - async_register_service - - async_unregister_service - - async_update_service - - async_close - - I/O is currently run in the executor to provide backwards compat with - existing use cases. - - These functions avoid overloading the executor by waiting in the event - loop instead of the executor threads. ([`a41d7b8`](https://github.com/python-zeroconf/python-zeroconf/commit/a41d7b8aa5572f3faf29eb087cc18a1343bbcdfa)) - -* Eliminate the reaper thread (#349) - -- Cache is now purged between reads when the interval is reached - -- Reduce locking since we are already making a copy of the readers - and not reading under the lock - -- Simplify shutdown process ([`7816278`](https://github.com/python-zeroconf/python-zeroconf/commit/781627864efbb3c8285e1b75144d688083414cf3)) - -* Return early when already closed (#350) - -- Reduce indentation with a return early guard in close ([`523aefb`](https://github.com/python-zeroconf/python-zeroconf/commit/523aefb0b0c477489e4e1e4ab763ce56c57295b7)) - -* Skip socket creation if add_multicast_member fails (windows) (#341) - -Co-authored-by: Timothee 'TTimo' Besset ([`beccad1`](https://github.com/python-zeroconf/python-zeroconf/commit/beccad1f0b41730f541b2e90ea2eaa2496de5044)) - -* Simplify cache iteration (#340) - -- Remove the need to trap runtime error -- Only copy the names of the keys when iterating the cache -- Fixes RuntimeError: list changed size during iterating entries_from_name -- Cache services -- The Repear thread is no longer aware of the cache internals ([`fe94810`](https://github.com/python-zeroconf/python-zeroconf/commit/fe948105cc0923336ffa6d93cbe7d45470612a36)) - - -## v0.29.0 (2021-03-25) - -### Unknown - -* Release version 0.29.0 ([`203ec2e`](https://github.com/python-zeroconf/python-zeroconf/commit/203ec2e26e6f0f676e7d88b4a1b0c80ad74659f1)) - -* Fill a missing changelog entry ([`53cb804`](https://github.com/python-zeroconf/python-zeroconf/commit/53cb8044bfb4256f570d438817fd37acc8b78511)) - -* Make mypy configuration more lenient - -We want to be able to call untyped modules. ([`f871b90`](https://github.com/python-zeroconf/python-zeroconf/commit/f871b90d25c0f788590ceb14237b08a6b5e6eeeb)) - -* Silence a flaky test on PyPy ([`bc6ef8c`](https://github.com/python-zeroconf/python-zeroconf/commit/bc6ef8c65b22d982798104d5bdf11b78746a8ddd)) - -* Silence a mypy false-positive ([`6482da0`](https://github.com/python-zeroconf/python-zeroconf/commit/6482da05344e6ae8c4da440da4a704a20c344bb6)) - -* Switch from Travis CI/Coveralls to GH Actions/Codecov - -Travis CI free tier is going away and Codecov is my go-to code coverage -service now. - -Closes GH-332. ([`bd80d20`](https://github.com/python-zeroconf/python-zeroconf/commit/bd80d20682c0af5e15a4b7102dcfe814cdba3a01)) - -* Drop Python 3.5 compatibilty, it reached its end of life ([`ab67a7a`](https://github.com/python-zeroconf/python-zeroconf/commit/ab67a7aecd63042178061f0d1a76f9a7f6e1559a)) - -* Use a single socket for InterfaceChoice.Default - -When using multiple sockets with multi-cast, the outgoing -socket's responses could be read back on the incoming socket, -which leads to duplicate processing and could fill up the -incoming buffer before it could be processed. - -This behavior manifested with error similar to -`OSError: [Errno 105] No buffer space available` - -By using a single socket with InterfaceChoice.Default -we avoid this case. ([`6beefbb`](https://github.com/python-zeroconf/python-zeroconf/commit/6beefbbe76a0e261394b308c8cc68545be653019)) - -* Simplify read_name - -(venv) root@ha-dev:~/python-zeroconf# python3 -m timeit -s 'result=""' -u usec 'result = "".join((result, "thisisaname" + "."))' -20000 loops, best of 5: 16.4 usec per loop -(venv) root@ha-dev:~/python-zeroconf# python3 -m timeit -s 'result=""' -u usec 'result += "thisisaname" + "."' -2000000 loops, best of 5: 0.105 usec per loop ([`5e268fa`](https://github.com/python-zeroconf/python-zeroconf/commit/5e268faeaa99f0a513c7bbeda8f447f4eb36a747)) - -* Fix link to readme md --> rst (#324) ([`c5a675d`](https://github.com/python-zeroconf/python-zeroconf/commit/c5a675d22788aa905a4e47feb1d4c30f30416356)) - - -## v0.28.8 (2021-01-04) - -### Unknown - -* Release version 0.28.8 ([`1d726b5`](https://github.com/python-zeroconf/python-zeroconf/commit/1d726b551a49e945b134df6e29b352697030c5a9)) - -* Ensure the name cache is rolled back when the packet reaches maximum size - -If the packet was too large, it would be rolled back at the end of write_record. -We need to remove the names that were added to the name cache (self.names) -as well to avoid a case were we would create a pointer to a name that was -rolled back. - -The size of the packet was incorrect at the end after the inserts because -insert_short would increase self.size even though it was already accounted -before. To resolve this insert_short_at_start was added which does not -increase self.size. This did not cause an actual bug, however it sure -made debugging this problem far more difficult. - -Additionally the size now inserted and then replaced when the actual -size is known because it made debugging quite difficult since the size -did not previously agree with the data. ([`86b4e11`](https://github.com/python-zeroconf/python-zeroconf/commit/86b4e11434d44e2f9a42354109a10f601c44d66a)) - - -## v0.28.7 (2020-12-13) - -### Unknown - -* Release version 0.28.7 ([`8f7effd`](https://github.com/python-zeroconf/python-zeroconf/commit/8f7effd2f89c542162d0e5ac257c561501690d16)) - -* Refactor to move service registration into a registry - -This permits removing the broad exception catch that -was expanded to avoid a crash in when adding or -removing a service ([`2708fef`](https://github.com/python-zeroconf/python-zeroconf/commit/2708fef6052f7e6e6eb36a157438b316e6d38b21)) - -* Prevent crash when a service is added or removed during handle_response - -Services are now modified under a lock. The service processing -is now done in a try block to ensure RuntimeError is caught -which prevents the zeroconf engine from unexpectedly -terminating. ([`4136858`](https://github.com/python-zeroconf/python-zeroconf/commit/41368588e5fcc6ec9596f306e39e2eaac2a9ec18)) - -* Restore IPv6 addresses output - -Before this change, script `examples/browser.py` printed IPv4 only, even with `--v6` argument. -With this change, `examples/browser.py` prints both IPv4 + IPv6 by default, and IPv6 only with `--v6-only` argument. - -I took the idea from the fork -https://github.com/ad3angel1s/python-zeroconf/blob/master/examples/browser.py ([`4da1612`](https://github.com/python-zeroconf/python-zeroconf/commit/4da1612b728acbcf2ab0c4bee09891c46f387bfb)) - - -## v0.28.6 (2020-10-13) - -### Unknown - -* Release version 0.28.6 ([`4744427`](https://github.com/python-zeroconf/python-zeroconf/commit/474442750d5d529436a118fda98a0b5f4680dc4d)) - -* Merge strict and allow_underscores (#309) - -Those really serve the same purpose -- are we receiving data (and want -to be flexible) or registering services (and want to be strict). ([`6a0c5dd`](https://github.com/python-zeroconf/python-zeroconf/commit/6a0c5dd4e84c30264747847e8f1045ece2a14288)) - -* Loosen validation to ensure get_service_info can handle production devices (#307) - -Validation of names was too strict and rejected devices that are otherwise -functional. A partial list of devices that unexpectedly triggered -a BadTypeInNameException: - - Bose Soundtouch - Yeelights - Rachio Sprinklers - iDevices ([`6ab0cd0`](https://github.com/python-zeroconf/python-zeroconf/commit/6ab0cd0a0446f158a1d8a64a3bc548cf9e103179)) - - -## v0.28.5 (2020-09-11) - -### Unknown - -* Release version 0.28.5 ([`eda1b3d`](https://github.com/python-zeroconf/python-zeroconf/commit/eda1b3dd17329c40a59b628b4bbca15c42af43b7)) - -* Fix AttributeError: module 'unittest' has no attribute 'mock' (#302) - -We only had module-level unittest import before now, but code accessing -mock through unittest.mock was working because we have a test-level -import from unittest.mock which causes unittest to gain the mock -attribute and if the test was run before other tests (those using -unittest.mock.patch) all was good. If the test was not run before them, -though, they'd fail. - -Closes GH-295. ([`2db7fff`](https://github.com/python-zeroconf/python-zeroconf/commit/2db7fff033937a929cdfee1fc7c93c594872799e)) - -* Ignore duplicate messages (#299) - -When watching packet captures, I noticed that zeroconf was processing -incoming data 3x on a my Home Assistant OS install because there are -three interfaces. - -We can skip processing duplicate packets in order to reduce the overhead -of decoding data we have already processed. - -Before - -Idle cpu ~8.3% - -recvfrom 4 times - - 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 - 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 - 267 recvfrom(8, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 - 267 recvfrom(8, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 - -sendto 8 times - - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - 267 sendto(8, "\0\0\204\0\0\0\0\1\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300K\0\1\200\1\0\0\0x\0\4\300\250\325\232", 335, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 335 - -After - -Idle cpu ~4.1% - -recvfrom 4 times (no change): - - 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 - 267 recvfrom(9, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("192.168.210.102")}, [16]) = 71 - 267 recvfrom(7, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 - 267 recvfrom(9, "\0\0\204\0\0\0\0\1\0\0\0\0\v_esphomelib\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\31\26masterbed_tvcabinet_32\300\f", 8966, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("172.30.32.1")}, [16]) = 71 - -sendto 2 times (reduced by 4x): - - 267 sendto(9, "\0\0\204\0\0\0\0\2\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\t_services\7_dns-sd\4_udp\300!\0\f\0\1\0\0\21\224\0\2\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300p\0\1\200\1\0\0\0x\0\4\300\250\325\232", 372, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 372 - 267 sendto(9, "\0\0\204\0\0\0\0\2\0\0\0\3\17_home-assistant\4_tcp\5local\0\0\f\0\1\0\0\21\224\0\7\4Home\300\f\t_services\7_dns-sd\4_udp\300!\0\f\0\1\0\0\21\224\0\2\300\f\3002\0!\200\1\0\0\0x\0)\0\0\0\0\37\273 66309dfc726446799c8a2c0f1cb0480f\300!\3002\0\20\200\1\0\0\21\224\0\305\22location_name=Home%uuid=66309dfc726446799c8a2c0f1cb0480f\24version=0.116.0.dev0\rexternal_url=(internal_url=http://192.168.213.154:8123$base_url=http://192.168.213.154:8123\32requires_api_password=True\300p\0\1\200\1\0\0\0x\0\4\300\250\325\232", 372, 0, {sa_family=AF_INET, sin_port=htons(5353), sin_addr=inet_addr("224.0.0.251")}, 16) = 372 - -With debug logging on for ~5 minutes - - bash-5.0# grep 'Received from' home-assistant.log |wc - 11458 499196 19706165 - bash-5.0# grep 'Ignoring' home-assistant.log |wc - 9357 210562 9299687 ([`f321932`](https://github.com/python-zeroconf/python-zeroconf/commit/f3219326e65f4410d45ace05f88082354a2f7525)) - -* Test with the development version of Python 3.9 (#300) - -There've been reports of test failures on Python 3.9, let's verify this. -Allowing failures for now until it goes stable. ([`1f81e0b`](https://github.com/python-zeroconf/python-zeroconf/commit/1f81e0bcad1cae735ba532758d167368925c8ede)) - - -## v0.28.4 (2020-09-06) - -### Unknown - -* Release version 0.28.4 ([`fb876d6`](https://github.com/python-zeroconf/python-zeroconf/commit/fb876d6013979cdaa8c0ddebe81e7520e9ee8cc9)) - -* Add ServiceListener to __all__ for Zeroconf module (#298) - -It's part of the public API. ([`0265a9d`](https://github.com/python-zeroconf/python-zeroconf/commit/0265a9d57630a4a19bcd3638a6bb3f4b18eba01b)) - -* Avoid copying the entires cache and reduce frequency of Reaper - -The cache reaper was running at least every 10 seconds, making -a copy of the cache, and iterated all the entries to -check if they were expired so they could be removed. - -In practice the reaper was actually running much more frequently -because it used self.zc.wait which would unblock any time -a record was updated, a listener was added, or when a -listener was removed. - -This change ensures the reaper frequency is only every 10s, and -will first attempt to iterate the cache before falling back to -making a copy. - -Previously it made sense to expire the cache more frequently -because we had places were we frequently had to enumerate -all the cache entries. With #247 and #232 we no longer -have to account for this concern. - -On a mostly idle RPi running HomeAssistant and a busy -network the total time spent reaping the cache was -more than the total time spent processing the mDNS traffic. - -Top 10 functions, idle RPi (before) - - %Own %Total OwnTime TotalTime Function (filename:line) - 0.00% 0.00% 2.69s 2.69s handle_read (zeroconf/__init__.py:1367) <== Incoming mDNS - 0.00% 0.00% 1.51s 2.98s run (zeroconf/__init__.py:1431) <== Reaper - 0.00% 0.00% 1.42s 1.42s is_expired (zeroconf/__init__.py:502) <== Reaper - 0.00% 0.00% 1.12s 1.12s entries (zeroconf/__init__.py:1274) <== Reaper - 0.00% 0.00% 0.620s 0.620s do_execute (sqlalchemy/engine/default.py:593) - 0.00% 0.00% 0.620s 0.620s read_utf (zeroconf/__init__.py:837) - 0.00% 0.00% 0.610s 0.610s do_commit (sqlalchemy/engine/default.py:546) - 0.00% 0.00% 0.540s 1.16s read_name (zeroconf/__init__.py:853) - 0.00% 0.00% 0.380s 0.380s do_close (sqlalchemy/engine/default.py:549) - 0.00% 0.00% 0.340s 0.340s write (asyncio/selector_events.py:908) - -After this change, the Reaper code paths do not show up in the top -10 function sample. - - %Own %Total OwnTime TotalTime Function (filename:line) - 4.00% 4.00% 2.72s 2.72s handle_read (zeroconf/__init__.py:1378) <== Incoming mDNS - 4.00% 4.00% 1.81s 1.81s read_utf (zeroconf/__init__.py:837) - 1.00% 5.00% 1.68s 3.51s read_name (zeroconf/__init__.py:853) - 0.00% 0.00% 1.32s 1.32s do_execute (sqlalchemy/engine/default.py:593) - 0.00% 0.00% 0.960s 0.960s readinto (socket.py:669) - 0.00% 0.00% 0.950s 0.950s create_connection (urllib3/util/connection.py:74) - 0.00% 0.00% 0.910s 0.910s do_commit (sqlalchemy/engine/default.py:546) - 1.00% 1.00% 0.880s 0.880s write (asyncio/selector_events.py:908) - 0.00% 0.00% 0.700s 0.810s __eq__ (zeroconf/__init__.py:606) - 2.00% 2.00% 0.670s 0.670s unpack (zeroconf/__init__.py:737) ([`1e4aaea`](https://github.com/python-zeroconf/python-zeroconf/commit/1e4aaeaa10c306b9447dacefa03b89ce1e9d7493)) - -* Add an author in the last changelog entry ([`9e27d12`](https://github.com/python-zeroconf/python-zeroconf/commit/9e27d126d75c73466584c417ab35c1d6cf47ca8b)) - - -## v0.28.3 (2020-08-31) - -### Unknown - -* Release version 0.28.3 ([`0e49aec`](https://github.com/python-zeroconf/python-zeroconf/commit/0e49aeca6497ede18a3f0c71ea69f2343934ba19)) - -* Reduce the time window that the handlers lock is held - -Only hold the lock if we have an update. ([`5a359bb`](https://github.com/python-zeroconf/python-zeroconf/commit/5a359bb0931fbda8444e30d07a50e59cf4ccca8e)) - -* Reformat using the latest black (20.8b1) ([`57d89d8`](https://github.com/python-zeroconf/python-zeroconf/commit/57d89d85e52dea1f8cb7f6d4b02c0281d5ba0540)) - - -## v0.28.2 (2020-08-27) - -### Unknown - -* Release version 0.28.2 ([`f64768a`](https://github.com/python-zeroconf/python-zeroconf/commit/f64768a7253829f9d8f7796a6a5c8129b92f2aad)) - -* Increase test coverage for dns cache ([`3be96b0`](https://github.com/python-zeroconf/python-zeroconf/commit/3be96b014d61c94d71ae3aa23ba223eead4f4cb7)) - -* Don't ask already answered questions (#292) - -Fixes GH-288. - -Co-authored-by: Erik ([`fca090d`](https://github.com/python-zeroconf/python-zeroconf/commit/fca090db06a0d481ad7f608c4fde3e936ad2f80e)) - -* Remove initial delay before querying for service info ([`0f73664`](https://github.com/python-zeroconf/python-zeroconf/commit/0f7366423fab8369700be086f3007c20897fde1f)) - - -## v0.28.1 (2020-08-17) - -### Unknown - -* Release version 0.28.1 ([`3c5d385`](https://github.com/python-zeroconf/python-zeroconf/commit/3c5d3856e286824611712de13aa0fcbe94e4313f)) - -* Ensure all listeners are cleaned up on ServiceBrowser cancelation (#290) - -When creating listeners for a ServiceBrowser with multiple types -they would not all be removed on cancelation. This led -to a build up of stale listeners when ServiceBrowsers were -frequently added and removed. ([`c9f3c91`](https://github.com/python-zeroconf/python-zeroconf/commit/c9f3c91da568fdbd26d571eed8a636a49e527b15)) - -* Gitignore some build artifacts ([`19e33a6`](https://github.com/python-zeroconf/python-zeroconf/commit/19e33a6829846008b50f408c77ac3e8e73176529)) - - -## v0.28.0 (2020-07-07) - -### Unknown - -* Release version 0.28.0 ([`0fdbf5e`](https://github.com/python-zeroconf/python-zeroconf/commit/0fdbf5e197a9f76e9e9c91a5e0908a0c66370dbd)) - -* Advertise Python 3.8 compatibility ([`02bcad9`](https://github.com/python-zeroconf/python-zeroconf/commit/02bcad902c516a5a2d2aa3302bca9871900da6e3)) - -* Fix an OS X edge case (#270, #188) - -This contains two major changes: - -* Listen on data from respond_sockets in addition to listen_socket -* Do not bind respond sockets to 0.0.0.0 or ::/0 - -The description of the original change by Emil: - -<<< -Without either of these changes, I get no replies at all when browsing for -services using the browser example. I'm on a corporate network, and when -connecting to a different network it works without these changes, so maybe -it's something about the network configuration in this particular network -that breaks the previous behavior. - -Unfortunately, I have no idea how this affects other platforms, or what -the changes really mean. However, it works for me and it seems reasonable -to get replies back on the same socket where they are sent. ->>> - -The tests pass and it's been confirmed to a reasonable degree that this -doesn't break the previously working use cases. - -Additionally this removes a memory leak where data sent to some of the -respond sockets would not be ever read from them (#171). - -Co-authored-by: Emil Styrke ([`fc92b1e`](https://github.com/python-zeroconf/python-zeroconf/commit/fc92b1e2635868792aa7ebe937a9cfef2e2f0418)) - -* Stop using socket.if_nameindex (#282) - -This improves Windows compatibility ([`a7f9823`](https://github.com/python-zeroconf/python-zeroconf/commit/a7f9823cbed254b506a09cc514d86d9f5dc61ad3)) - -* Make Mypy happy (#281) - -Otherwise it'd complain: - - % make mypy - mypy examples/*.py zeroconf/*.py - zeroconf/__init__.py:2039: error: Returning Any from function declared to return "int" - Found 1 error in 1 file (checked 6 source files) - make: *** [mypy] Error 1 ([`4381784`](https://github.com/python-zeroconf/python-zeroconf/commit/4381784150e07625b4acd2034b253bf2ed320c5f)) - -* Use Adapter.index from ifaddr. (#280) - -Co-authored-by: PhilippSelenium ([`64056ab`](https://github.com/python-zeroconf/python-zeroconf/commit/64056ab4aa55eb11c185c9879462ba1f82c7e886)) - -* Exclude a problematic pep8-naming version ([`023e72d`](https://github.com/python-zeroconf/python-zeroconf/commit/023e72d821faed9513ee0ef3a22a00231d87389e)) - -* Log listen and respond sockets just in case ([`3b6906a`](https://github.com/python-zeroconf/python-zeroconf/commit/3b6906ab94f8d9ebeb1c97b6026ab7f9be226eab)) - -* Fix one log format string (we use a socket object here) ([`328abfc`](https://github.com/python-zeroconf/python-zeroconf/commit/328abfc54138e68e36a9f5381650bd6997701e73)) - -* Add support for passing text addresses to ServiceInfo - -Not sure if parsed_addresses is the best way to name the parameter, but -we already have a parsed_addresses property so for the sake of -consistency let's stick to that. ([`0a9aa8d`](https://github.com/python-zeroconf/python-zeroconf/commit/0a9aa8d31bffec5d7b7291b84fbc95222b10d189)) - -* Support Windows when using socket errno checks (#274) - -Windows reports errno.WSAEINVAL(10022) instead of errno.EINVAL(22). -This issue is triggered when a device has two IP's assigned under -windows. - -This fixes #189 ([`c31ae7f`](https://github.com/python-zeroconf/python-zeroconf/commit/c31ae7fd519df04f41939d3c60c2b88960737fd6)) - - -## v0.27.1 (2020-06-05) - -### Unknown - -* Release version 0.27.1 ([`0538abf`](https://github.com/python-zeroconf/python-zeroconf/commit/0538abf135f5502d94dd883475bcb2781ce5ddd2)) - -* Fix false warning (#273) - -When there is nothing to write, we don't need to warn about not making progress. ([`10065b9`](https://github.com/python-zeroconf/python-zeroconf/commit/10065b976247ae9247cddaff8f3e9d7b331e66d7)) - -* Improve logging (mainly include sockets in some messages) (#271) ([`beff998`](https://github.com/python-zeroconf/python-zeroconf/commit/beff99897f0a5ece17e224a7ea9b12ebd420044f)) - -* Simplify DNSHinfo constructor, cpu and os are always text (#266) ([`d6593af`](https://github.com/python-zeroconf/python-zeroconf/commit/d6593af2a3811b262d70bbc75c2c91613de41b21)) - -* Improve ImportError message (wrong supported Python version) ([`8045191`](https://github.com/python-zeroconf/python-zeroconf/commit/8045191ae6300da47d38e5cd82957965139359d2)) - -* Remove old Python 2-specific code ([`6f876a7`](https://github.com/python-zeroconf/python-zeroconf/commit/6f876a7f14f0b172860005b0d6d959d82f7c1bbf)) - - -## v0.27.0 (2020-05-27) - -### Unknown - -* Release version 0.27.0 ([`0502f19`](https://github.com/python-zeroconf/python-zeroconf/commit/0502f1904b0a8b9134ea2a09333232b30b3b6897)) - -* Remove no longer needed typing dependency - -We don't support Python older than 3.5. ([`d881aba`](https://github.com/python-zeroconf/python-zeroconf/commit/d881abaf591f260ad019f4ff86e7f70a6f018a64)) - -* Add --find option to example/browser.py (#263, rebased #175) - -Co-authored-by: Perry Kundert ([`781ac83`](https://github.com/python-zeroconf/python-zeroconf/commit/781ac834da38708d95bfe6e5f5ec7dd0f31efc54)) - -* Restore missing warnings import ([`178cec7`](https://github.com/python-zeroconf/python-zeroconf/commit/178cec75bd9a065b150b3542dfdb40682f6745b6)) - -* Warn on every call to missing update_service() listener method - -This is in order to provide visibility to the library users that this -method exists - without it the client code may be missing data. ([`488ee1e`](https://github.com/python-zeroconf/python-zeroconf/commit/488ee1e85762dc5856d8e132da54762e5e712c5a)) - -* Separately send large mDNS responses to comply with RFC 6762 (#248) - -This fixes issue #245 - -Split up large multi-response packets into separate packets instead of relying on IP Fragmentation. IP Fragmentation of mDNS packets causes ChromeCast Audios to -crash their mDNS responder processes and RFC 6762 -(https://tools.ietf.org/html/rfc6762) section 17 states some -requirements for Multicast DNS Message Size, and the fourth paragraph reads: - -"A Multicast DNS packet larger than the interface MTU, which is sent -using fragments, MUST NOT contain more than one resource record." - -This change makes this implementation conform with this MUST NOT clause. ([`87a0fe2`](https://github.com/python-zeroconf/python-zeroconf/commit/87a0fe27a7be9d96af08f8a007f37a16105c64a0)) - -* Remove deprecated ServiceInfo address parameter/property (#260) ([`ab72aa8`](https://github.com/python-zeroconf/python-zeroconf/commit/ab72aa8e5a6a83e50d24d7fb187e8fa8a549a847)) - - -## v0.26.3 (2020-05-26) - -### Unknown - -* Release version 0.26.3 ([`fbcefca`](https://github.com/python-zeroconf/python-zeroconf/commit/fbcefca592632304579c1b3f9c7bd3dd342e1618)) - -* Don't call callbacks when holding _handlers_lock (#258) - -Closes #255 - -Background: -#239 adds the lock _handlers_lock: - -python-zeroconf/zeroconf/__init__.py - - self._handlers_lock = threading.Lock() # ensure we process a full message in one go - -Which is used in the engine thread: - - def handle_response(self, msg: DNSIncoming) -> None: - """Deal with incoming response packets. All answers - are held in the cache, and listeners are notified.""" - - with self._handlers_lock: - - -And also by the service browser when issuing the state change callbacks: - - if len(self._handlers_to_call) > 0 and not self.zc.done: - with self.zc._handlers_lock: - handler = self._handlers_to_call.popitem(False) - self._service_state_changed.fire( - zeroconf=self.zc, service_type=self.type, name=handler[0], state_change=handler[1] - ) - -Both pychromecast and Home Assistant calls Zeroconf.get_service_info from the service callbacks which means the lock may be held for several seconds which will starve the engine thread. ([`fe86566`](https://github.com/python-zeroconf/python-zeroconf/commit/fe865667e4610d57067a8f710f4d818eaa5e14dc)) - -* Give threads unique names (#257) ([`54d116f`](https://github.com/python-zeroconf/python-zeroconf/commit/54d116fd69a66062f91be04d84ceaebcfb13cc43)) - -* Use equality comparison instead of identity comparison for ints - -Integers aren't guaranteed to have the same identity even though they -may be equal. ([`445d7f5`](https://github.com/python-zeroconf/python-zeroconf/commit/445d7f5dbe38947bd0bd1e3a5b8d649c1819c21f)) - -* Merge 0.26.2 release commit - -I accidentally only pushed 0.26.2 tag (commit ffb42e5836bd) without -pushing the commit to master and now I merged aa9de4de7202 so this is -the best I can do without force-pushing to master. Tag 0.26.2 will -continue to point to that dangling commit. ([`1c4d3fc`](https://github.com/python-zeroconf/python-zeroconf/commit/1c4d3fcbf34b09364e52a773783dc9c924a7b17a)) - -* Improve readability of logged incoming data (#254) ([`aa9de4d`](https://github.com/python-zeroconf/python-zeroconf/commit/aa9de4de7202b3ab0a60f14532d227f63d7d981b)) - -* Add support for multiple types to ServiceBrowsers - -As each ServiceBrowser runs in its own thread there -is a scale problem when listening for many types. - -ServiceBrowser can now accept a list of types -in addition to a single type. ([`a6ad100`](https://github.com/python-zeroconf/python-zeroconf/commit/a6ad100a60e8434cef6b411208eef98f68d594d3)) - -* Fix race condition where a listener gets -a message before the lock is created. ([`24a0619`](https://github.com/python-zeroconf/python-zeroconf/commit/24a06191ea35469948d12124a07429207b3c1b3b)) - -* Fix flake8 E741 in setup.py (#252) ([`4b1d953`](https://github.com/python-zeroconf/python-zeroconf/commit/4b1d953979287e08f914857867da1000634ca3af)) - - -## v0.26.1 (2020-05-06) - -### Unknown - -* Release version 0.26.1 ([`4c359e2`](https://github.com/python-zeroconf/python-zeroconf/commit/4c359e2e7cdf104efca90ffd9912ea7c7792e3bf)) - -* Remove unwanted pylint directives - -Those are results of a bad conflict resolution I did when merging [1]. - -[1] 552a030eb592 ("Call UpdateService on SRV & A/AAAA updates as well as TXT (#239)") ([`0dd6fe4`](https://github.com/python-zeroconf/python-zeroconf/commit/0dd6fe44ca3895375ba447fed5f138042ab12ebf)) - -* Avoid iterating the entire cache when an A/AAAA address has not changed (#247) - -Iterating the cache is an expensive operation -when there is 100s of devices generating zeroconf -traffic as there can be 1000s of entries in the -cache. ([`0540342`](https://github.com/python-zeroconf/python-zeroconf/commit/0540342bacd859f38f6d2a3743a7959cd3ae4d02)) - -* Update .gitignore for Visual Studio config files (#244) ([`16431b6`](https://github.com/python-zeroconf/python-zeroconf/commit/16431b6cb51f561a4c5d2897e662b254ca4243ec)) - - -## v0.26.0 (2020-04-26) - -### Unknown - -* Release version 0.26.0 ([`36941ae`](https://github.com/python-zeroconf/python-zeroconf/commit/36941aeb72711f7954d40f0abeab4802174636df)) - -* Call UpdateService on SRV & A/AAAA updates as well as TXT (#239) - -Fix https://github.com/jstasiak/python-zeroconf/issues/235 - -Contains: - -* Add lock around handlers list -* Reverse DNSCache order to ensure newest records take precedence - - When there are multiple records in the cache, the behaviour was - inconsistent. Whilst the DNSCache.get() method returned the newest, - any function which iterated over the entire cache suffered from - a last write winds issue. This change makes this behaviour consistent - and allows the removal of an (incorrect) wait from one of the unit tests. ([`552a030`](https://github.com/python-zeroconf/python-zeroconf/commit/552a030eb592a0c07feaa7a01ece1464da4b1d0b)) - - -## v0.25.1 (2020-04-14) - -### Unknown - -* Release version 0.25.1 ([`f8fe400`](https://github.com/python-zeroconf/python-zeroconf/commit/f8fe400e4be833728f015a3d6396bfc3f7c185c0)) - -* Update Engine to immediately notify its worker thread (#243) ([`976e3dc`](https://github.com/python-zeroconf/python-zeroconf/commit/976e3dcf9d6d897b063ab6f0b7831bcfa6ac1814)) - -* Remove unstable IPv6 tests from Travis (#241) ([`cf0382b`](https://github.com/python-zeroconf/python-zeroconf/commit/cf0382ba771bcc22284fd719c80a26eaa05ba5cd)) - -* Switch to pytest for test running (#240) - -Nose is dead for all intents and purposes (last release in 2015) and -pytest provide a very valuable feature of printing relevant extra -information in case of assertion failure (from[1]): - - ================================= FAILURES ================================= - _______________________________ test_answer ________________________________ - - def test_answer(): - > assert func(3) == 5 - E assert 4 == 5 - E + where 4 = func(3) - - test_sample.py:6: AssertionError - ========================= short test summary info ========================== - FAILED test_sample.py::test_answer - assert 4 == 5 - ============================ 1 failed in 0.12s ============================= - -This should be helpful in debugging tests intermittently failing on -PyPy. - -Several TestCase.assertEqual() calls have been replaced by plain -assertions now that that method no longer provides anything we can't get -without it. Few assertions have been modified to not explicitly provide -extra information in case of failure – pytest will provide this -automatically. - -Dev dependencies are forced to be the latest versions to make sure -we don't fail because of outdated ones on Travis. - -[1] https://docs.pytest.org/en/latest/getting-started.html#create-your-first-test ([`f071f3d`](https://github.com/python-zeroconf/python-zeroconf/commit/f071f3d49d82ab212b86f889532200c94b36aea6)) - - -## v0.25.0 (2020-04-03) - -### Unknown - -* Release version 0.25.0 ([`0cbced8`](https://github.com/python-zeroconf/python-zeroconf/commit/0cbced809989283893e02914e251a94739a41062)) - -* Improve ServiceInfo documentation ([`e839c40`](https://github.com/python-zeroconf/python-zeroconf/commit/e839c40081ba15e228d447969b725ee42f1ef2ad)) - -* Remove uniqueness assertions - -The assertions, added in [1] and modified in [2] introduced a -regression. When browsing in the presence of devices advertising SRV -records not marked as unique there would be an undesired crash (from [3]): - - Exception in thread zeroconf-ServiceBrowser__hap._tcp.local.: - Traceback (most recent call last): - File "/usr/lib/python3.7/threading.py", line 917, in _bootstrap_inner - self.run() - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1504, in run - handler(self.zc) - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1444, in - zeroconf=zeroconf, service_type=self.type, name=name, state_change=state_change - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1322, in fire - h(**kwargs) - File "browser.py", line 20, in on_service_state_change - info = zeroconf.get_service_info(service_type, name) - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 2191, in get_service_info - if info.request(self, timeout): - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 1762, in request - out.add_answer_at_time(zc.cache.get_by_details(self.name, _TYPE_SRV, _CLASS_IN), now) - File "/home/pi/homekit-debugging/venv/lib/python3.7/site-packages/zeroconf/__init__.py", line 907, in add_answer_at_time - assert record.unique - AssertionError - -The intention is to bring those assertions back in a way that only -enforces uniqueness when sending records, not when receiving them. - -[1] bef8f593ae82 ("Ensure all TXT, SRV, A records are unique") -[2] 5e4f496778d9 ("Refactor out unique assertion") -[3] https://github.com/jstasiak/python-zeroconf/issues/236 ([`a79015e`](https://github.com/python-zeroconf/python-zeroconf/commit/a79015e7c4bdc843d97bd5c82ef8ed4eeae01a34)) - -* Rationalize handling of values in TXT records - -* Do not interpret received values; use None if a property has no value -* When encoding values, use either raw bytes or UTF-8 ([`8e3adf8`](https://github.com/python-zeroconf/python-zeroconf/commit/8e3adf8300a6f2b0bc0dcc4cde54d8890e0727e9)) - - -## v0.24.5 (2020-03-08) - -### Unknown - -* Release version 0.24.5 ([`aba2858`](https://github.com/python-zeroconf/python-zeroconf/commit/aba28583f5431f584587770b6c149e4a607a987e)) - -* Resolve memory leak in DNSCache - -When all the records for a given name were removed from the cache, the -name itself that contain the list was never removed. This left an empty list -in memory for every device that was no longer broadcasting on the -network. ([`eac53f4`](https://github.com/python-zeroconf/python-zeroconf/commit/eac53f45bddb8d3d559b1d4672a926b746435771)) - -* Optimize handle_response cache check - -The handle_response loop would encounter a unique record -it would search the cache in order to remove keys that -matched the DNSEntry for the record. - -Since the cache is stored as a list of records with the key as the record name, - we can avoid searching the entire cache each time and on -search for the DNSEntry of the record. In practice this means -with 5000 entries and records in the cache we now only need to search -4 or 5. - -When looping over the cache entries for the name, we now check the expire time -first as its cheaper than calling DNSEntry.__eq__ - -Test environment: - - Home Assistant running on home networking with a /22 - and a significant amount of broadcast traffic - - Testing was done with py-spy v0.3.3 - (https://github.com/benfred/py-spy/releases) - - # py-spy top --pid - -Before: -``` -Collecting samples from '/usr/local/bin/python3 -m homeassistant --config /config' (python v3.7.6) -Total Samples 10200 -GIL: 0.00%, Active: 0.00%, Threads: 35 - - %Own %Total OwnTime TotalTime Function (filename:line) - 0.00% 0.00% 18.13s 18.13s _worker (concurrent/futures/thread.py:78) - 0.00% 0.00% 2.51s 2.56s run (zeroconf/__init__.py:1221) - 0.00% 0.00% 0.420s 0.420s __eq__ (zeroconf/__init__.py:394) - 0.00% 0.00% 0.390s 0.390s handle_read (zeroconf/__init__.py:1260) - 0.00% 0.00% 0.240s 0.670s handle_response (zeroconf/__init__.py:2452) - 0.00% 0.00% 0.230s 0.230s __eq__ (zeroconf/__init__.py:606) - 0.00% 0.00% 0.200s 0.810s handle_response (zeroconf/__init__.py:2449) - 0.00% 0.00% 0.140s 0.150s __eq__ (zeroconf/__init__.py:632) - 0.00% 0.00% 0.130s 0.130s entries (zeroconf/__init__.py:1185) - 0.00% 0.00% 0.090s 0.090s notify (threading.py:352) - 0.00% 0.00% 0.080s 0.080s read_utf (zeroconf/__init__.py:818) - 0.00% 0.00% 0.080s 0.080s __eq__ (zeroconf/__init__.py:678) - 0.00% 0.00% 0.070s 0.080s __eq__ (zeroconf/__init__.py:533) - 0.00% 0.00% 0.060s 0.060s __eq__ (zeroconf/__init__.py:677) - 0.00% 0.00% 0.050s 0.050s get (zeroconf/__init__.py:1146) - 0.00% 0.00% 0.050s 0.050s do_commit (sqlalchemy/engine/default.py:541) - 0.00% 0.00% 0.040s 2.86s run (zeroconf/__init__.py:1226) -``` - -After -``` -Collecting samples from '/usr/local/bin/python3 -m homeassistant --config /config' (python v3.7.6) -Total Samples 10200 -GIL: 7.00%, Active: 61.00%, Threads: 35 - - %Own %Total OwnTime TotalTime Function (filename:line) - 47.00% 47.00% 24.84s 24.84s _worker (concurrent/futures/thread.py:78) - 5.00% 5.00% 2.97s 2.97s run (zeroconf/__init__.py:1226) - 1.00% 1.00% 0.390s 0.390s handle_read (zeroconf/__init__.py:1265) - 1.00% 1.00% 0.200s 0.200s read_utf (zeroconf/__init__.py:818) - 0.00% 0.00% 0.120s 0.120s unpack (zeroconf/__init__.py:723) - 0.00% 1.00% 0.120s 0.320s read_name (zeroconf/__init__.py:834) - 0.00% 0.00% 0.100s 0.240s update_record (zeroconf/__init__.py:2440) - 0.00% 0.00% 0.090s 0.090s notify (threading.py:352) - 0.00% 0.00% 0.070s 0.070s update_record (zeroconf/__init__.py:1469) - 0.00% 0.00% 0.060s 0.070s __eq__ (zeroconf/__init__.py:606) - 0.00% 0.00% 0.050s 0.050s acquire (logging/__init__.py:843) - 0.00% 0.00% 0.050s 0.050s unpack (zeroconf/__init__.py:722) - 0.00% 0.00% 0.050s 0.050s read_name (zeroconf/__init__.py:828) - 0.00% 0.00% 0.050s 0.050s is_expired (zeroconf/__init__.py:494) - 0.00% 0.00% 0.040s 0.040s emit (logging/__init__.py:1028) - 1.00% 1.00% 0.040s 0.040s __init__ (zeroconf/__init__.py:386) - 0.00% 0.00% 0.040s 0.040s __enter__ (threading.py:241) -``` ([`37fa0a0`](https://github.com/python-zeroconf/python-zeroconf/commit/37fa0a0d59a5b5d09295a462bf911e82d2d770ed)) - -* Support cooperating responders (#224) ([`1ca023f`](https://github.com/python-zeroconf/python-zeroconf/commit/1ca023fae4b586679446ceaf3e2e9955ea5bf180)) - -* Remove duplciate update messages sent to listeners - -The prior code used to send updates even when the new record was identical to the old. - -This resulted in duplciate update messages when there was in fact no update (apart from TTL refresh) ([`d8caa4e`](https://github.com/python-zeroconf/python-zeroconf/commit/d8caa4e2d71025ed42b33abb4d329329437b44fb)) - -* Refactor out unique assertion ([`5e4f496`](https://github.com/python-zeroconf/python-zeroconf/commit/5e4f496778d91ccfc65e946d3d94c39ab6388b29)) - -* Fix representation of IPv6 DNSAddress (#230) ([`f6690d2`](https://github.com/python-zeroconf/python-zeroconf/commit/f6690d2048cb87cb0fb3a7c3b832cf1a1f40e61a)) - -* Do not exclude interfaces with host-only netmasks from InterfaceChoice.All (#227) - -Host-only netmasks do not forbid multicast. - -Tested on Debian 10 running in Qubes and on Ubuntu 18.04. ([`ca8e53d`](https://github.com/python-zeroconf/python-zeroconf/commit/ca8e53de55a563f5c7049be2eda14ae0ecd1a7cf)) - -* Ensure all TXT, SRV, A records are unique - -Fixes issues with shared records being used where they shouldn't be. - -PTR records should be shared, but SRV, TXT and A/AAAA records should be unique. - -Whilst mDNS and DNS-SD in theory support shared records for these types of record, they are not implemented in python-zeroconf at the moment. - -See zeroconf.check_service() method which verifies the service is unique on the network before registering. ([`bef8f59`](https://github.com/python-zeroconf/python-zeroconf/commit/bef8f593ae820eb8465934de91eb27468edf6444)) - - -## v0.24.4 (2019-12-30) - -### Unknown - -* Release version 0.24.4 ([`29432bf`](https://github.com/python-zeroconf/python-zeroconf/commit/29432bfffd057cf4da7636ba0c28c9d8a7ad4357)) - -* Clean up output of ttl remaining to be whole seconds only ([`ba1b78d`](https://github.com/python-zeroconf/python-zeroconf/commit/ba1b78dbdcc64f8d35c951e7ca53d2898e7d7900)) - -* Clean up format to cleanly separate [question]=ttl,answer ([`4b735dc`](https://github.com/python-zeroconf/python-zeroconf/commit/4b735dc5411f7b563f23b60b5c2aa806151cca1a)) - -* Update DNS entries so all subclasses of DNSRecord use to_string for display - -All records based on DNSRecord now properly use to_string in repr, some were -only dumping the answer without the question (inconsistent). ([`8ccad54`](https://github.com/python-zeroconf/python-zeroconf/commit/8ccad54dab4a0ab7f573996f6fc0c2f2bad7eafe)) - -* Fix resetting of TTL (#209) - -Fix resetting of TTL - -Previously the reset_ttl method changed the time created and the TTL value, but did not change the expiration time or stale times. As a result a record would expire even when this method had been called. ([`b47efd8`](https://github.com/python-zeroconf/python-zeroconf/commit/b47efd8eed0b5ed9d3b6bca8573a6ed1916c982a)) - - -## v0.24.3 (2019-12-23) - -### Unknown - -* Release version 0.24.3 ([`2316027`](https://github.com/python-zeroconf/python-zeroconf/commit/2316027e5e96d8f10fae7607da5b72a9bab819fc)) - -* Fix import-time TypeError on CPython 3.5.2 - -The error: TypeError: 'ellipsis' object is not iterable." - -Explanation can be found here: https://github.com/jstasiak/python-zeroconf/issues/208 - -Closes GH-208. ([`f53e24b`](https://github.com/python-zeroconf/python-zeroconf/commit/f53e24bddb3a6cb242cace2a541ed507e823be33)) - - -## v0.24.2 (2019-12-17) - -### Unknown - -* Release version 0.24.2 ([`76bc675`](https://github.com/python-zeroconf/python-zeroconf/commit/76bc67532ad26f54c194e1e6537d2da4390f83e2)) - -* Provide and enforce type hints everywhere except for tests - -The tests' time will come too in the future, though, I think. I believe -nose has problems with running annotated tests right now so let's leave -it for later. - -DNSEntry.to_string renamed to entry_to_string because DNSRecord -subclasses DNSEntry and overrides to_string with a different signature, -so just to be explicit and obvious here I renamed it – I don't think any -client code will break because of this. - -I got rid of ServicePropertiesType in favor of generic Dict because -having to type all the properties got annoying when variance got -involved – maybe it'll be restored in the future but it seems like too -much hassle now. ([`f771587`](https://github.com/python-zeroconf/python-zeroconf/commit/f7715874c2242b95cf9815549344ea66ac107b6e)) - -* Fix get_expiration_time percent parameter annotation - -It takes integer percentage values at the moment so let's document that. ([`5986bf6`](https://github.com/python-zeroconf/python-zeroconf/commit/5986bf66e77e77f9e0b6ba43a4758ecb0da04ff6)) - -* Add support for AWDL interface on macOS - -The API is inspired by Apple's NetService.includesPeerToPeer -(see https://developer.apple.com/documentation/foundation/netservice/1414086-includespeertopeer) ([`fcafdc1`](https://github.com/python-zeroconf/python-zeroconf/commit/fcafdc1e285cc5c3c1f2c413ac9309d3426179f4)) - - -## v0.24.1 (2019-12-16) - -### Unknown - -* Release version 0.24.1 ([`53dd06c`](https://github.com/python-zeroconf/python-zeroconf/commit/53dd06c37f6205129e81f5c6b69e508a54f94d07)) - -* Bugfix: TXT record's name is never equal to Service Browser's type. - -TXT record's name is never equal to Service Browser's type. We should -check whether TXT record's name ends with Service Browser's type. -Otherwise, we never get updates of TXT records. ([`2a597ee`](https://github.com/python-zeroconf/python-zeroconf/commit/2a597ee80906a27effd442d033de10b5129e6900)) - -* Bugfix: Flush outdated cache entries when incoming record is unique. - -According to RFC 6762 10.2. Announcements to Flush Outdated Cache Entries, -when the incoming record's cache-flush bit is set (record.unique == True -in this module), "Instead of merging this new record additively into the -cache in addition to any previous records with the same name, rrtype, and -rrclass, all old records with that name, rrtype, and rrclass that were -received more than one second ago are declared invalid, and marked to -expire from the cache in one second." ([`1d39b3e`](https://github.com/python-zeroconf/python-zeroconf/commit/1d39b3edd141093f9e579ab83377fe8f5ecb357d)) - -* Change order of equality check to favor cheaper checks first - -Comparing two strings is much cheaper than isinstance, so we should try -those first - -A performance test was run on a network with 170 devices running Zeroconf. -There was a ServiceBrowser running on a separate thread while a timer ran -on the main thread that forced a thread switch every 2 seconds (to include -the effect of thread switching in the measurements). Every minute, -a Zeroconf broadcast was made on the network. - -This was ran this for an hour on a Macbook Air from 2015 (Intel Core -i7-5650U) using Ubuntu 19.10 and Python 3.7, both before this commit and -after. - -These are the results of the performance tests: -Function Before count Before time Before time per count After count After time After time per count Time reduction -DNSEntry.__eq__ 528 0.001s 1.9μs 538 0.001s 1.9μs 1.9% -DNSPointer.__eq__ 24369256 (24.3M) 134.641s 5.5μs 25989573 (26.0M) 86.405s 3.3μs 39.8% -DNSText.__eq__ 52966716 (53.0M) 190.640s 3.6μs 53604915 (53.6M) 169.104s 3.2μs 12.4% -DNSService.__eq__ 52620538 (52.6M) 171.660s 3.3μs 56557448 (56.6M) 170.222s 3.0μs 7.8% ([`815ac77`](https://github.com/python-zeroconf/python-zeroconf/commit/815ac77e9146c37afd7c5389ed45adee9f1e2e36)) - -* Dont recalculate the expiration and stale time every update - -I have a network with 170 devices running Zeroconf. Every minute -a zeroconf request for broadcast is cast out. Then we were listening for -Zeroconf devices on that network. - -To get a more realistic test, the Zeroconf ServiceBrowser is ran on -a separate thread from a main thread. On the main thread an I/O limited -call to QNetworkManager is made every 2 seconds, - -in order to include performance penalties due to thread switching. The -experiment was ran on a MacBook Air 2015 (Intel Core i7-5650U) through -Ubuntu 19.10 and Python 3.7. - -This was left running for exactly one hour, both before and after this commit. - -Before this commit, there were 132107499 (132M) calls to the -get_expiration_time function, totalling 141.647s (just over 2 minutes). - -After this commit, there were 1661203 (1.6M) calls to the -get_expiration_time function, totalling 2.068s. - -This saved about 2 minutes of processing time out of the total 60 minutes, -on average 3.88% processing power on the tested CPU. It is expected to see -similar improvements on all CPU architectures. ([`2e9699c`](https://github.com/python-zeroconf/python-zeroconf/commit/2e9699c542f691fc605e4a1c03cbf496273a9835)) - -* Significantly improve the speed of the entries function of the cache - -Tested this with Python 3.6.8, Fedora 28. This was done in a network with -a lot of discoverable devices. - -before: -Total time: 1.43086 s - -Line # Hits Time Per Hit % Time Line Contents -============================================================== - 1138 @profile - 1139 def entries(self): - 1140 """Returns a list of all entries""" - 1141 2063 3578.0 1.7 0.3 if not self.cache: - 1142 2 3.0 1.5 0.0 return [] - 1143 else: - 1144 # avoid size change during iteration by copying the cache - 1145 2061 22051.0 10.7 1.5 values = list(self.cache.values()) - 1146 2061 1405227.0 681.8 98.2 return reduce(lambda a, b: a + b, values) - -After: -Total time: 0.43725 s - -Line # Hits Time Per Hit % Time Line Contents -============================================================== - 1138 @profile - 1139 def entries(self): - 1140 """Returns a list of all entries""" - 1141 3651 10171.0 2.8 2.3 if not self.cache: - 1142 2 7.0 3.5 0.0 return [] - 1143 else: - 1144 # avoid size change during iteration by copying the cache - 1145 3649 67054.0 18.4 15.3 values = list(self.cache.values()) - 1146 3649 360018.0 98.7 82.3 return list(itertools.chain.from_iterable(values)) ([`157fc20`](https://github.com/python-zeroconf/python-zeroconf/commit/157fc2003318d785d07b362e1fd2ba3fe5d373f0)) - -* The the formatting of the IPv6 section in the readme ([`6ab7dbf`](https://github.com/python-zeroconf/python-zeroconf/commit/6ab7dbf27a2086e20f4486e693e2091d043af1db)) - - -## v0.24.0 (2019-11-19) - -### Unknown - -* Release version 0.24.0 ([`f03dc42`](https://github.com/python-zeroconf/python-zeroconf/commit/f03dc42d6234419053bda18ca6f2b90bec1b9257)) - -* Improve type hint coverage ([`c827f9f`](https://github.com/python-zeroconf/python-zeroconf/commit/c827f9fdc4c58433143ea8815029c3387b500ff5)) - -* Add py.typed marker (closes #199) - -This required changing to a proper package. ([`41b31cb`](https://github.com/python-zeroconf/python-zeroconf/commit/41b31cb338e8a8a7d1a548662db70d9014e8a352)) - -* Link to the documentation ([`3db9d82`](https://github.com/python-zeroconf/python-zeroconf/commit/3db9d82d888abe880bfdd2fb2c3fe3eddcb48ae9)) - -* Setup basic Sphinx documentation - -Closes #200 ([`1c33e5f`](https://github.com/python-zeroconf/python-zeroconf/commit/1c33e5f5b44732d446d629cc13000cff3527afef)) - -* ENOTCONN is not an error during shutdown - -When `python-zeroconf` is used in conjunction with `eventlet`, `select.select()` will return with an error code equal to `errno.ENOTCONN` instead of `errno.EBADF`. As a consequence, an exception is shown in the console during shutdown. I believe that it should not cause any harm to treat `errno.ENOTCONN` the same way as `errno.EBADF` to prevent this exception. ([`c86423a`](https://github.com/python-zeroconf/python-zeroconf/commit/c86423ab0223bab682614e18a6a09050dfc80087)) - -* Rework exposing IPv6 addresses on ServiceInfo - -* Return backward compatibility for ServiceInfo.addresses by making - it return V4 addresses only -* Add ServiceInfo.parsed_addresses for convenient access to addresses -* Raise TypeError if addresses are not provided as bytes (otherwise - an ugly assertion error is raised when sending) -* Add more IPv6 unit tests ([`98a1ce8`](https://github.com/python-zeroconf/python-zeroconf/commit/98a1ce8b99ddb03de9f6cccca49396fcf177e0d0)) - -* Finish AAAA records support - -The correct record type was missing in a few places. Also use -addresses_by_version(All) in preparation for switching addresses -to V4 by default. ([`aae7fd3`](https://github.com/python-zeroconf/python-zeroconf/commit/aae7fd3ba851d1894732c4270cef745127cc03da)) - -* Test with pypy3.6 - -Right now this is available as pypy3 in Travis CI. Running black on PyPy -needs to be disabled for now because of an issue[1] that's been patched -only recently and it's not available in Travis yet. - -[1] https://bitbucket.org/pypy/pypy/issues/2985/pypy36-osreplace-pathlike-typeerror ([`fec839a`](https://github.com/python-zeroconf/python-zeroconf/commit/fec839ae4fdcb870066fff855809583dcf7d7a17)) - -* Stop specifying precise pypy3.5 version - -This allows us to test with the latest available one. ([`c2e8bde`](https://github.com/python-zeroconf/python-zeroconf/commit/c2e8bdebc6cec128d01197d53c3402278a4b62ed)) - -* Simplify Travis CI configuration regarding Python 3.7 - -Selecting xenial manually is no longer needed. ([`5359ea0`](https://github.com/python-zeroconf/python-zeroconf/commit/5359ea0a0b4cdca0854ae97c5d11036633102c67)) - -* Test with Python 3.8 ([`15118c8`](https://github.com/python-zeroconf/python-zeroconf/commit/15118c837a148a37edd29a20294e598ecf09c3cf)) - -* Make AAAA records work (closes #52) (#191) - -This PR incorporates changes from the earlier PR #179 (thanks to Mikael Pahmp), adding tests and a few more fixes to make AAAA records work in practice. - -Note that changing addresses to container IPv6 addresses may be considered a breaking change, for example, for consumers that unconditionally apply inet_aton to them. I'm introducing a new function to be able to retries only addresses from one family. ([`5bb9531`](https://github.com/python-zeroconf/python-zeroconf/commit/5bb9531be48f6f1e119643677c36d9e714204a8b)) - -* Improve static typing coverage ([`e5323d8`](https://github.com/python-zeroconf/python-zeroconf/commit/e5323d8c9795c59019173b8d202a50a49c415039)) - -* Add additional recommended records to PTR responses (#184) - -RFC6763 indicates a server should include the SRV/TXT/A/AAAA records -when responding to a PTR record request. This optimization ensures -the client doesn't have to then query for these additional records. - -It has been observed that when multiple Windows 10 machines are monitoring -for the same service, this unoptimized response to the PTR record -request can cause extremely high CPU usage in both the DHCP Client -& Device Association service (I suspect due to all clients having to -then sending/receiving the additional queries/responses). ([`ea64265`](https://github.com/python-zeroconf/python-zeroconf/commit/ea6426547f79c32c6d5d3bcc2d0a261bf503197a)) - -* Rename IpVersion to IPVersion - -A follow up to 3d5787b8c5a92304b70c04f48dc7d5cec8d9aac8. ([`ceb602c`](https://github.com/python-zeroconf/python-zeroconf/commit/ceb602c0d1bc1d3a269fd233b072a9b929076438)) - -* First stab at supporting listening on IPv6 interfaces - -This change adds basic support for listening on IPv6 interfaces. -Some limitations exist for non-POSIX platforms, pending fixes in -Python and in the ifaddr library. Also dual V4-V6 sockets may not -work on all BSD platforms. As a result, V4-only is used by default. - -Unfortunately, Travis does not seem to support IPv6, so the tests -are disabled on it, which also leads to coverage decrease. ([`3d5787b`](https://github.com/python-zeroconf/python-zeroconf/commit/3d5787b8c5a92304b70c04f48dc7d5cec8d9aac8)) - - -## v0.23.0 (2019-06-04) - -### Unknown - -* Release version 0.23.0 ([`7bd0436`](https://github.com/python-zeroconf/python-zeroconf/commit/7bd04363c7ff0f583a17cc2fac42f9a9c1724769)) - -* Add support for multiple addresses when publishing a service (#170) - -This is a rebased and fixed version of PR #27, which also adds compatibility shim for ServiceInfo.address and does a proper deprecation for it. - -* Present all addresses that are available. - -* Add support for publishing multiple addresses. - -* Add test for backwards compatibility. - -* Provide proper deprecation of the "address" argument and field - -* Raise deprecation warnings when address is used -* Add a compatibility property to avoid breaking existing code - (based on suggestion by Bas Stottelaar in PR #27) -* Make addresses keyword-only, so that address can be eventually - removed and replaced with it without breaking consumers -* Raise TypeError instead of an assertion on conflicting address - and addresses - -* Disable black on ServiceInfo.__init__ until black is fixed - -Due to https://github.com/python/black/issues/759 black produces -code that is invalid Python 3.5 syntax even with --target-version py35. -This patch disables reformatting for this call (it doesn't seem to be -possible per line) until it's fixed. ([`c787610`](https://github.com/python-zeroconf/python-zeroconf/commit/c7876108150cd251786db4ab52dadd1b2283d262)) - -* Makefile: be specific which files to check with black (#169) - -Otherwise black tries to check the "env" directory, which fails. ([`6b85a33`](https://github.com/python-zeroconf/python-zeroconf/commit/6b85a333de21fa36187f081c3c115c8af40d7055)) - -* Run black --check as part of CI to enforce code style ([`12477c9`](https://github.com/python-zeroconf/python-zeroconf/commit/12477c954e7f051d10152f9ab970e28fd4222b30)) - -* Refactor the CI script a bit to make adding black check easier ([`69ad22c`](https://github.com/python-zeroconf/python-zeroconf/commit/69ad22cf852a12622f78aa2f4e7cf20c2d395db2)) - -* Reformat the code using Black - -We could use some style consistency in the project and Black looks like -the best tool for the job. - -Two flake8 errors are being silenced from now on: - -* E203 whitespace before : -* W503 line break before binary operator - -Both are to satisfy Black-formatted code (and W503 is somemwhat against -the latest PEP8 recommendations regarding line breaks and binary -operators in new code). ([`beb596c`](https://github.com/python-zeroconf/python-zeroconf/commit/beb596c345b0764bdfe1a828cfa744bcc560cf32)) - -* Add support for MyListener call getting updates to service TXT records (2nd attempt) (#166) - -Add support for MyListener call getting updates to service TXT records - -At the moment, the implementation supports notification to the ServiceListener class for additions and removals of service, but for service updates to the TXT record, the client must poll the ServiceInfo class. This draft PR provides a mechanism to have a callback on the ServiceListener class be invoked when the TXT record changes. ([`d4e06bc`](https://github.com/python-zeroconf/python-zeroconf/commit/d4e06bc54098bfa7a863bcc11bb9e2035738c8f5)) - -* Remove Python 3.4 from the Python compatibility section - -I forgot to do this in 4a02d0489da80e8b9e8d012bb7451cd172c753ca. ([`e1c2b00`](https://github.com/python-zeroconf/python-zeroconf/commit/e1c2b00c772a1538a6682c45884bbe89c8efba60)) - -* Drop Python 3.4 support (it's dead now) - -See https://devguide.python.org/#status-of-python-branches ([`4a02d04`](https://github.com/python-zeroconf/python-zeroconf/commit/4a02d0489da80e8b9e8d012bb7451cd172c753ca)) - - -## v0.22.0 (2019-04-27) - -### Unknown - -* Prepare release 0.22.0 ([`db1dcf6`](https://github.com/python-zeroconf/python-zeroconf/commit/db1dcf682e453766b53773d70c0091b81a87a192)) - -* Add arguments to set TTLs via ServiceInfo ([`ecc021b`](https://github.com/python-zeroconf/python-zeroconf/commit/ecc021b7a3cec863eed5a3f71a1f28e3026c25b0)) - -* Use recommended TTLs with overrides via ServiceInfo ([`a7aedb5`](https://github.com/python-zeroconf/python-zeroconf/commit/a7aedb58649f557a5e372fc776f98457ce84eb39)) - -* ttl: modify default used to respond to _services queries ([`f25989d`](https://github.com/python-zeroconf/python-zeroconf/commit/f25989d8cdae8f77e19eba70f236dd8103b33e8f)) - -* Fix service removal packets not being sent on shutdown ([`57310e1`](https://github.com/python-zeroconf/python-zeroconf/commit/57310e185a4f924dd257edd64f866da685a786c6)) - -* Adjust query intervals to match RFC 6762 (#159) - -* Limit query backoff time to one hour as-per rfc6762 section 5.2 -* tests: monkey patch backoff limit to focus testing on TTL expiry -* tests: speed up integration test -* tests: add test of query backoff interval and limit -* Set initial query interval to 1 second as-per rfc6762 sec 5.2 -* Add comments around timing constants -* tests: fix linting errors -* tests: fix float assignment to integer var - - -Sets the repeated query backoff limit to one hour as opposed to 20 seconds, reducing unnecessary network traffic -Adds a test for the behaviour of the backoff procedure -Sets the first repeated query to happen after one second as opposed to 500ms ([`bee8abd`](https://github.com/python-zeroconf/python-zeroconf/commit/bee8abdba49e2275d203e3b0b4a3afac330ec4ea)) - -* Turn on and address mypy check_untyped_defs ([`4218d75`](https://github.com/python-zeroconf/python-zeroconf/commit/4218d757994467ee710b0cad034ea1fb6035d3ea)) - -* Turn on and address mypy warn-return-any ([`006e614`](https://github.com/python-zeroconf/python-zeroconf/commit/006e614315c12e5232e6168ce0bacf0dc056ba8a)) - -* Turn on and address mypy no-implicit-optional ([`071c6ed`](https://github.com/python-zeroconf/python-zeroconf/commit/071c6edb924b6bc9b67859dc9860cfe09cc98d07)) - -* Add reminder to enable disallow_untyped_calls for mypy ([`24bb44f`](https://github.com/python-zeroconf/python-zeroconf/commit/24bb44f858cd325d7ff2892c53dc1dd9f26ed768)) - -* Enable some more mypy warnings ([`183a846`](https://github.com/python-zeroconf/python-zeroconf/commit/183a84636a9d4fec6306d065a4f855fec95086e4)) - -* Run mypy on test_zeroconf.py too - -This will reveal issues with current type hints as demonstrated by a -commit/issue to be submitted later, as well as prevent some others -from cropping up meanwhile. ([`74391d5`](https://github.com/python-zeroconf/python-zeroconf/commit/74391d5c124bf6f899059db93bbf7e99b96d8aad)) - -* Move mypy config to setup.cfg - -Removes need for a separate file, better to have more in one place. ([`2973931`](https://github.com/python-zeroconf/python-zeroconf/commit/29739319ccf71f48c06bc1b74cd193f17fb6b272)) - -* Don't bother with a universal wheel as we're Python >= 3 only ([`9c0f1ab`](https://github.com/python-zeroconf/python-zeroconf/commit/9c0f1ab03b90f87ff1d58278a0b9b77c16195185)) - -* Add unit tests for default ServiceInfo properties. ([`a12c3b2`](https://github.com/python-zeroconf/python-zeroconf/commit/a12c3b2a3b4300849e0a4dcdd4df5386286b88d3)) - -* Modify ServiceInfo's __init__ properties' default value. - -This commit modifies the default value of the argument properties of -ServiceInfo’s __init__() to byte array (properties=b’’). This enables -to instantiate it without setting the properties argument. As it is, -and because properties is not mandatory, if a user does not specify -the argument, an exception (AssertionError) is thrown: - -Traceback (most recent call last): - File "src/zeroconf-test.py", line 72, in - zeroconf.register_service(service) - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 1864, in register_service - self.send(out) - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 2091, in send - packet = out.packet() - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 1026, in packet - overrun_answers += self.write_record(answer, time_) - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 998, in write_record - record.write(self) - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 579, in write - out.write_string(self.text) - File "/home/jmpcm/zeroconf-test/src/zeroconf.py", line 903, in write_string - assert isinstance(value, bytes) -AssertionError - -The argument can be either a dictionary or a byte array. The function -_set_properties() will always create a byte array with the user's -properties. Changing the default value to a byte array, avoids the -conversion to byte array and avoids the exception. ([`9321007`](https://github.com/python-zeroconf/python-zeroconf/commit/93210079259bd0973e3b54a90dff971e14abf595)) - -* Fix some spelling errors ([`88fb0e3`](https://github.com/python-zeroconf/python-zeroconf/commit/88fb0e34f902498f6ceb583ce6fa9346745a14ca)) - -* Require flake8 >= 3.6.0, drop pycodestyle restriction - -Fixes current build breakage related to flake8 dependencies. - -The breakage: - -$ make flake8 -flake8 --max-line-length=110 examples *.py -Traceback (most recent call last): - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2329, in resolve - return functools.reduce(getattr, self.attrs, module) -AttributeError: module 'pycodestyle' has no attribute 'break_after_binary_operator' -During handling of the above exception, another exception occurred: -Traceback (most recent call last): - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 182, in load_plugin - self._load(verify_requirements) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 154, in _load - self._plugin = resolve() - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/pkg_resources/__init__.py", line 2331, in resolve - raise ImportError(str(exc)) -ImportError: module 'pycodestyle' has no attribute 'break_after_binary_operator' -During handling of the above exception, another exception occurred: -Traceback (most recent call last): - File "/home/travis/virtualenv/python3.5.6/bin/flake8", line 11, in - sys.exit(main()) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/cli.py", line 16, in main - app.run(argv) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 412, in run - self._run(argv) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 399, in _run - self.initialize(argv) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 381, in initialize - self.find_plugins() - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/main/application.py", line 197, in find_plugins - self.check_plugins.load_plugins() - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 434, in load_plugins - plugins = list(self.manager.map(load_plugin)) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 319, in map - yield func(self.plugins[name], *args, **kwargs) - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 432, in load_plugin - return plugin.load_plugin() - File "/home/travis/virtualenv/python3.5.6/lib/python3.5/site-packages/flake8/plugins/manager.py", line 189, in load_plugin - raise failed_to_load -flake8.exceptions.FailedToLoadPlugin: Flake8 failed to load plugin "pycodestyle.break_after_binary_operator" due to module 'pycodestyle' has no attribute 'break_after_binary_operator'. ([`73b3620`](https://github.com/python-zeroconf/python-zeroconf/commit/73b3620908cb5e2f54231692c17f6bbb8a42d09d)) - -* Drop flake8-blind-except - -Obsoleted by pycodestyle 2.1's E722. ([`e3b7e40`](https://github.com/python-zeroconf/python-zeroconf/commit/e3b7e40af52d05264794e2e4d37dfdb1c5d3814a)) - -* Test with PyPy 3.5 5.10.1 ([`51a6f70`](https://github.com/python-zeroconf/python-zeroconf/commit/51a6f7081bd5590ca5ea5418b39172714b7ef1fe)) - -* Fix a changelog typo ([`e08db28`](https://github.com/python-zeroconf/python-zeroconf/commit/e08db282edd8459e35d17ae4e7278106056a0c94)) - - -## v0.21.3 (2018-09-21) - -### Unknown - -* Prepare release 0.21.3 ([`059530d`](https://github.com/python-zeroconf/python-zeroconf/commit/059530d075fe1575ebbab535be67ac7d5ae7caed)) - -* Actually allow underscores in incoming service names - -This was meant to be released earlier, but I failed to merge part of my -patch. - -Fixes: ff4a262adc69 ("Allow underscores in incoming service names") -Closes #102 ([`ae3bd51`](https://github.com/python-zeroconf/python-zeroconf/commit/ae3bd517d84aae631db1cc294caf22541a7f4bd5)) - - -## v0.21.2 (2018-09-20) - -### Unknown - -* Prepare release 0.21.2 ([`af33c83`](https://github.com/python-zeroconf/python-zeroconf/commit/af33c83e72d6fa4171342f78d15b2f28038f1318)) - -* Fix typing-related TypeError - -Older typing versions don't allow what we did[1]. We don't really need -to be that precise here anyway. - -The error: - - $ python - Python 3.5.2 (default, Nov 23 2017, 16:37:01) - [GCC 5.4.0 20160609] on linux - Type "help", "copyright", "credits" or "license" for more information. - >>> import zeroconf - Traceback (most recent call last): - File "", line 1, in - File "/scraper/venv/lib/python3.5/site-packages/zeroconf.py", line 320, in - OptionalExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] - File "/usr/lib/python3.5/typing.py", line 649, in __getitem__ - return Union[arg, type(None)] - File "/usr/lib/python3.5/typing.py", line 552, in __getitem__ - dict(self.__dict__), parameters, _root=True) - File "/usr/lib/python3.5/typing.py", line 512, in __new__ - for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): - File "/usr/lib/python3.5/typing.py", line 512, in - for t2 in all_params - {t1} if not isinstance(t2, TypeVar)): - File "/usr/lib/python3.5/typing.py", line 1077, in __subclasscheck__ - if super().__subclasscheck__(cls): - File "/usr/lib/python3.5/abc.py", line 225, in __subclasscheck__ - for scls in cls.__subclasses__(): - TypeError: descriptor '__subclasses__' of 'type' object needs an argument - -Closes #141 -Fixes: 1f33c4f8a805 ("Introduce some static type analysis to the codebase") - -[1] https://github.com/python/typing/issues/266 ([`627c22e`](https://github.com/python-zeroconf/python-zeroconf/commit/627c22e19166c123244567410adc390ed368eca7)) - - -## v0.21.1 (2018-09-17) - -### Unknown - -* Prepare release 0.21.1 ([`1684a46`](https://github.com/python-zeroconf/python-zeroconf/commit/1684a46d57a437fc8cc7b5887d51440424c6ded5)) - -* Bringing back compatibility with python 3.4 (#140) - -The latest release of zeroconf in PyPI (0.21.0) breaks compatibility with python 3.4 due to an unstated dependency on the typing package. ([`919191c`](https://github.com/python-zeroconf/python-zeroconf/commit/919191ca266d8d589ad33cc6dd2c197f75092634)) - - -## v0.21.0 (2018-09-16) - -### Unknown - -* Prepare release 0.21.0 ([`b03cee3`](https://github.com/python-zeroconf/python-zeroconf/commit/b03cee348973469e9ebfce6e9b0e0a367c146401)) - -* Allow underscores in incoming service names - -There are real world cases of services broadcasting names with -underscores in them so tough luck, let's accept those to be compatible. -Registering service names with underscores in them continues to be -disallowed. - -Closes https://github.com/jstasiak/python-zeroconf/issues/102 ([`ff4a262`](https://github.com/python-zeroconf/python-zeroconf/commit/ff4a262adc6926905c71e2952b3159b84a974d02)) - -* Don't mention unsupported Python versions ([`208ec1b`](https://github.com/python-zeroconf/python-zeroconf/commit/208ec1ba58a6ebf7160a760feffe62cf366137e5)) - -* using ifaddr instead of netifaces as ifaddr is a pure python lib ([`7c0500e`](https://github.com/python-zeroconf/python-zeroconf/commit/7c0500ee19869ce0e85e58a26b8fdb0868e0b142)) - -* Show that we actually support Python 3.7 - -We can't just add Python 3.7 like earlier versions because Travis -doesn't support it at the moment[1]. - -[1] https://github.com/travis-ci/travis-ci/issues/9815 ([`418b4b8`](https://github.com/python-zeroconf/python-zeroconf/commit/418b4b814e6483a20a5cac2178a2cd815d5b91c0)) - -* Introduce some static type analysis to the codebase - -The main purpose of this change is to make the code easier to read and -explore. Preventing some classes of bugs is a bonus. - -On top of just adding type hints and enabling mypy to verify them the -following changes were needed: -* casts in places where we know what we're doing but mypy can't know it -* RecordUpdateListener interfaces extracted out of ServiceBrowser and - ServiceInfo classes so that we have a common name we can use in places - where we only need an instance of one of those classes to call to call - update_record() on it. This way we can keep mypy happy -* assert isinstance(...) blocks to provide hints for mypy as to what - concrete types we're dealing with -* some local type mixing removed (previously we'd first assign a value - of one type to a variable and then overwrite with another type) -* explicit "return None" in case of function that returns optionally - - mypy enforces explicit return in this case ([`1f33c4f`](https://github.com/python-zeroconf/python-zeroconf/commit/1f33c4f8a8050cdfb051c0da7ebe80a9ff24cf25)) - -* Fix a logging call - -The format string expects three parameters, one of them was accidentally -passed to the log_warning_once() method instead. - -Fixes: aa1f48433cbd ("Improve test coverage, and fix issues found") ([`23fdcce`](https://github.com/python-zeroconf/python-zeroconf/commit/23fdcce35fa020d09267e6fa57cf21cfb744a2c4)) - -* Fix UTF-8 multibyte name compression ([`e11700f`](https://github.com/python-zeroconf/python-zeroconf/commit/e11700ff9ea9eb429c701dfb73c4cf2c45994015)) - -* Remove some legacy cruft - -The latest versions of flake8 and flake8-import-order can be used just -fine now (they've been ok for some time). - -Since with google style flake8-import-order would generate more issues -than with the cryptography style I decided to switch and fix one thing -it complained about. - -We switch to pycodestyle instead of pinned pep8 version as that pep8 -version can't be installed with latest flake8 and the name of the -package has been changed to pycodestyle. We still pin the version though -as there's a bad interaction between the latest pycodestyle and the -latest flake8. ([`6fe8132`](https://github.com/python-zeroconf/python-zeroconf/commit/6fe813212f46576cf305c17ee815536a83128fce)) - -* Fix UnboundLocalError for count after loop - -This code throws an `UnboundLocalError` as `count` doesn't exist in the `else` branch of the for loop. ([`42c8662`](https://github.com/python-zeroconf/python-zeroconf/commit/42c866298725a8e9667bf1230be845e856cb382a)) - -* examples: Add an example of resolving a known service by service name - -To use: -* `avahi-publish-service -s 'My Service Name' _test._tcp 0` -* `./examples/resolver.py` should print a `ServiceInfo` -* Kill the `avahi-publish-service` process -* `./examples/resolver.py` should print `None` - -Signed-off-by: Simon McVittie ([`703d971`](https://github.com/python-zeroconf/python-zeroconf/commit/703d97150de1c74b7c1a62b59c1ff7081dec8256)) - -* Handle Interface Quirck to make it work on WSL (Windows Service for Linux) ([`374f45b`](https://github.com/python-zeroconf/python-zeroconf/commit/374f45b783caf35b26f464130fbd1ff62591af2e)) - -* Make some variables PEP 8-compatible - -Previously pep8-naming would complain about those: - -test_zeroconf.py:221:10: N806 variable 'numQuestions' in function should be lowercase - (numQuestions, numAnswers, numAuthorities, ([`49fc106`](https://github.com/python-zeroconf/python-zeroconf/commit/49fc1067245b2d3a7bcc1e7611f36ba8d9a36598)) - -* Fix flake8 (#131) - -* flake8 and therefore Travis should be happy now - -* attempt to fix flake8 - -* happy flake8 ([`53bc65a`](https://github.com/python-zeroconf/python-zeroconf/commit/53bc65af14ed979a5234bfa03c1295a2b27f6e40)) - -* implementing unicast support (#124) - -* implementing unicast support - -* use multicast=False for outgoing dns requests in unicast mode ([`826c961`](https://github.com/python-zeroconf/python-zeroconf/commit/826c9619797e4cf1f2c39b95ed1c93faed7eee2a)) - -* Remove unwanted whitespace ([`d0d1cfb`](https://github.com/python-zeroconf/python-zeroconf/commit/d0d1cfbb31f0ea6bd08b0c8ffa97ba3d7604bccc)) - -* Fix TTL handling for published service, align default TTL with RFC6762 (#113) - -Honor TTL passed in service registration -Set default TTL to 120 s as recommended by RFC6762 ([`14e3ad5`](https://github.com/python-zeroconf/python-zeroconf/commit/14e3ad5f15f5a0f5235ad7dbb22924b4b5ae1c77)) - -* add import error for Python <= 3.3 (#123) ([`fe62ba3`](https://github.com/python-zeroconf/python-zeroconf/commit/fe62ba31a8ab05a948ed6036dc319b1a1fa14e66)) - - -## v0.20.0 (2018-02-21) - -### Unknown - -* Release version 0.20.0 ([`0622570`](https://github.com/python-zeroconf/python-zeroconf/commit/0622570645116b0c45ee03d38b7b308be2026bd4)) - -* Add some missing release information ([`5978bdb`](https://github.com/python-zeroconf/python-zeroconf/commit/5978bdbdab017d06ea496ea6d7c66c672751b255)) - -* Drop support for Python 2 and 3.3 - -This simplifies the code slightly, reduces the number of dependencies -and otherwise speeds up the CI process. If someone *really* needs to use -really old Python they have the option of using older versions of the -package. ([`f22f421`](https://github.com/python-zeroconf/python-zeroconf/commit/f22f421e4e6bf1ca7671b1eb540ba09fbf1e04b1)) - -* Add license and readme file to source tarball (#108) - -Closes #97 ([`6ad04a5`](https://github.com/python-zeroconf/python-zeroconf/commit/6ad04a5d7f6d63c1f48b5948b6ade0e56cafe258)) - -* Allow the usage of newer netifaces in development - -We're being consistent with c5e1f65c19b2f63a09b6517f322d600911fa1e13 -here. ([`7123f8e`](https://github.com/python-zeroconf/python-zeroconf/commit/7123f8ed7dfd9277245748271d8870f18299b035)) - -* Correct broken __eq__ in child classes to DNSRecord ([`4d6dd73`](https://github.com/python-zeroconf/python-zeroconf/commit/4d6dd73a8313b81bbfef8b074d6fe4878bce4f74)) - -* Refresh ServiceBrowser entries already when 'stale' -Updated integration testcase to test for this. ([`37c5211`](https://github.com/python-zeroconf/python-zeroconf/commit/37c5211980548ab701bba725feeb5395ed1af0a7)) - -* Add new records first in cache entry instead of last (#110) - -* Add new records first in cache entry instead of last - -* Added DNSCache unit test ([`8101b55`](https://github.com/python-zeroconf/python-zeroconf/commit/8101b557199c4d3d001c75a717eafa4d5544142f)) - - -## v0.19.1 (2017-06-13) - -### Unknown - -* Use more recent PyPy3 on Travis CI - -The default PyPy3 is really old (implements Python 3.2) and some -packages won't cooperate with it anymore. ([`d0e4712`](https://github.com/python-zeroconf/python-zeroconf/commit/d0e4712eaa696ff13470b719cb6842260a3ada11)) - -* Release version 0.19.1 ([`1541191`](https://github.com/python-zeroconf/python-zeroconf/commit/1541191090a92ef23b8e3747933c95f7233aa2de)) - -* Allow newer netifaces releases - -The bug that was concerning us[1] is fixed now. - -[1] https://bitbucket.org/al45tair/netifaces/issues/39/netmask-is-always-255255255255 ([`c5e1f65`](https://github.com/python-zeroconf/python-zeroconf/commit/c5e1f65c19b2f63a09b6517f322d600911fa1e13)) - - -## v0.19.0 (2017-03-21) - -### Unknown - -* Release version 0.19.0 ([`ecadb8c`](https://github.com/python-zeroconf/python-zeroconf/commit/ecadb8c30cd8e75da5b6d3e0e93d024f013dbfa2)) - -* Fix a whitespace issue flake8 doesn't like ([`87aa4e5`](https://github.com/python-zeroconf/python-zeroconf/commit/87aa4e587221e982902233ed2c8990ed27a2290f)) - -* Remove outdated example ([`d8686b5`](https://github.com/python-zeroconf/python-zeroconf/commit/d8686b5642d66b2c9ecc6f40b92e1a1a28279f79)) - -* Remove outdated comment ([`5aa6e85`](https://github.com/python-zeroconf/python-zeroconf/commit/5aa6e8546438d76b3fba5e91f9e4d4e3a3901757)) - -* Work around netifaces Windows netmask bug ([`6231d6d`](https://github.com/python-zeroconf/python-zeroconf/commit/6231d6d48d89240d95de9644570baf1b07ab04b0)) - - -## v0.18.0 (2017-02-03) - -### Unknown - -* Release version 0.18.0 ([`48b1949`](https://github.com/python-zeroconf/python-zeroconf/commit/48b19498724825237d3002ee7681b6296c625b12)) - -* Add a missing changelog entry ([`5343510`](https://github.com/python-zeroconf/python-zeroconf/commit/53435104d5fb29847ac561f58e16cb48dd97b9f8)) - -* Handle select errors when closing Zeroconf - -Based on a pull request by someposer[1] (code adapted to work on -Python 3). - -Fixes two pychromecast issues[2][3]. - -[1] https://github.com/jstasiak/python-zeroconf/pull/88 -[2] https://github.com/balloob/pychromecast/issues/59 -[3] https://github.com/balloob/pychromecast/issues/120 ([`6e229f2`](https://github.com/python-zeroconf/python-zeroconf/commit/6e229f2714c8aff6555dfee2bdff34bda980a0c3)) - -* Explicitly support Python 3.6 ([`0a5ea31`](https://github.com/python-zeroconf/python-zeroconf/commit/0a5ea31543941033bcb4b2cb76fa7e125cb33550)) - -* Pin flake8 because flake8-import-order is pinned ([`9f0d8fe`](https://github.com/python-zeroconf/python-zeroconf/commit/9f0d8fe87dedece1365149911ce9587482fe1501)) - -* Drop Python 2.6 support, no excuse to use 2.6 these days ([`56ea542`](https://github.com/python-zeroconf/python-zeroconf/commit/56ea54245eeab9d544d96c38d136f9f47eedcda4)) - - -## v0.17.7 (2017-02-01) - -### Unknown - -* Prepare the 0.17.7 release ([`376e011`](https://github.com/python-zeroconf/python-zeroconf/commit/376e011ad60c051f27632c77e6d50b64cf1defec)) - -* Merge pull request #77 from stephenrauch/fix-instance-name-with-dot - -Allow dots in service instance name ([`9035c6a`](https://github.com/python-zeroconf/python-zeroconf/commit/9035c6a246b6856b5087b1bba9a9f3ce5873fcda)) - -* Allow dots in service instance name ([`e46af83`](https://github.com/python-zeroconf/python-zeroconf/commit/e46af83d35b4430d4577481b371d569797427858)) - -* Merge pull request #75 from stephenrauch/Fix-name-change - -Fix for #29 ([`136dce9`](https://github.com/python-zeroconf/python-zeroconf/commit/136dce985fd66c81159d48b5f40e44349d1070ef)) - -* Fix/Implement duplicate name change (Issue 29) ([`788a48f`](https://github.com/python-zeroconf/python-zeroconf/commit/788a48f78466e048bdfc3028618bc4eaf807ef5b)) - -* some docs, cleanup and a couple of small test cases ([`b629ffb`](https://github.com/python-zeroconf/python-zeroconf/commit/b629ffb9c860a30366fa83b71487b546d6edd15b)) - -* Merge pull request #73 from stephenrauch/simplify-and-fix-pr-70 - -Simplify and fix PR 70 ([`6b67c0d`](https://github.com/python-zeroconf/python-zeroconf/commit/6b67c0d562866e63b81d1ec1c7f540c56244ade1)) - -* Simplify and fix PR 70 ([`2006cdd`](https://github.com/python-zeroconf/python-zeroconf/commit/2006cddf99377f43b528fbafea7d98be9d6282f0)) - -* Merge pull request #72 from stephenrauch/Catch-and-log-sendto-exceptions - -Catch and log sendto() exceptions ([`c3f563f`](https://github.com/python-zeroconf/python-zeroconf/commit/c3f563f6d108d46732a380b7912f8f5c23d5e548)) - -* Catch and log sendto() exceptions ([`0924310`](https://github.com/python-zeroconf/python-zeroconf/commit/0924310415b79f0fa2523494d8a60803ec295e09)) - -* Merge pull request #71 from stephenrauch/improved-test-coverage - -Improve test coverage, and fix issues found ([`254c207`](https://github.com/python-zeroconf/python-zeroconf/commit/254c2077f727d5e130aab2aaec111d58c134bd79)) - -* Improve test coverage, and fix issues found ([`aa1f484`](https://github.com/python-zeroconf/python-zeroconf/commit/aa1f48433cbd4dbf52565ec0c2635e5d52a37086)) - -* Merge pull request #70 from stephenrauch/Limit-size-of-packet - -Limit the size of the packet that can be built ([`208e221`](https://github.com/python-zeroconf/python-zeroconf/commit/208e2219a1268e637e3cf02e1838cb94a6de2f31)) - -* Limit the size of the packet that can be built ([`8355c85`](https://github.com/python-zeroconf/python-zeroconf/commit/8355c8556929fcdb777705c97fc99de6012367b4)) - -* Merge pull request #69 from stephenrauch/name-compression - -Help for oversized packets ([`5d9f40d`](https://github.com/python-zeroconf/python-zeroconf/commit/5d9f40de1a8549633cb5592fafc34d34df172965)) - -* Implement Name Compression ([`59877eb`](https://github.com/python-zeroconf/python-zeroconf/commit/59877ebb1b20ccd2747a0601e30329162ddcba4c)) - -* Drop oversized packets in send() ([`035605a`](https://github.com/python-zeroconf/python-zeroconf/commit/035605ab000fc8a8af94b4b9e1be9b81880b6bca)) - -* Add exception handler for oversized packets ([`af19c12`](https://github.com/python-zeroconf/python-zeroconf/commit/af19c12ec2286ee49e789a11599551dc43391383)) - -* Add QuietLogger mixin ([`0b77872`](https://github.com/python-zeroconf/python-zeroconf/commit/0b77872f7bb06ba6949c69bbfb70e8ae21f8ff9b)) - -* Improve service name validation error messages ([`fad66ca`](https://github.com/python-zeroconf/python-zeroconf/commit/fad66ca696530d39d8d5ae598e1724077eba8a5e)) - -* Merge pull request #68 from stephenrauch/Handle-dnsincoming-exceptions - -Handle DNSIncoming exceptions ([`6c0a32d`](https://github.com/python-zeroconf/python-zeroconf/commit/6c0a32d6e4bd7be0b7573b95a5325b19dfd509d2)) - -* Make all test cases localhost only ([`080d0c0`](https://github.com/python-zeroconf/python-zeroconf/commit/080d0c09f1e58d4f8c430dac513948e5919e3f3b)) - -* Handle DNS Incoming Exception - -This fixes a regression from removal of some overly broad exception -handling in 0.17.6. This change adds an explicit handler for -DNSIncoming(). Will also log at warn level the first time it sees a -particular parsing exception. ([`061a2aa`](https://github.com/python-zeroconf/python-zeroconf/commit/061a2aa3c6e8a7c954a313c8a7d396f26f544c2b)) - - -## v0.17.6 (2016-07-08) - -### Testing - -* test: added test for DNS-SD subtype discovery ([`914241b`](https://github.com/python-zeroconf/python-zeroconf/commit/914241b92c3097669e1e8c1a380f6c2f23a14cf8)) - -### Unknown - -* Fix readme to valid reStructuredText, ([`94570b7`](https://github.com/python-zeroconf/python-zeroconf/commit/94570b730aaab606db820b9c4d48b1c313fdaa98)) - -* Prepare release 0.17.6 ([`e168a6f`](https://github.com/python-zeroconf/python-zeroconf/commit/e168a6fa5486d92114fb02d4c40b36f8298a022f)) - -* Merge pull request #61 from stephenrauch/add-python3.5 - -Add python 3.5 to Travis ([`617d9fd`](https://github.com/python-zeroconf/python-zeroconf/commit/617d9fd0db5bef350eaebd13cfcc73803900ad24)) - -* Add python 3.5 to Travis ([`6198e89`](https://github.com/python-zeroconf/python-zeroconf/commit/6198e8909b968430ddac9261f4dd9c508d96db65)) - -* Merge pull request #60 from stephenrauch/delay_ServiceBrowser_connect - -Delay connecting ServiceBrowser() until it is running ([`56d9ac1`](https://github.com/python-zeroconf/python-zeroconf/commit/56d9ac13381a3ae205cb2b9339981a50f0a2eb62)) - -* Delay connecting ServiceBrowser() until it is running ([`6d1370c`](https://github.com/python-zeroconf/python-zeroconf/commit/6d1370cc2aa6d2c125aa924342e224b6b92ef8d9)) - -* Merge pull request #57 from herczy/master - -resolve issue #56: service browser initialization race ([`0225a18`](https://github.com/python-zeroconf/python-zeroconf/commit/0225a18957a26855720d7ab002f3983cb9d76e0e)) - -* resolve issue #56: service browser initialization race ([`1567016`](https://github.com/python-zeroconf/python-zeroconf/commit/15670161c597bc035c0e9411d0bb830b9520589f)) - -* Merge pull request #58 from strahlex/subtype-test - -added test for DNS-SD subtype discovery ([`4a569fe`](https://github.com/python-zeroconf/python-zeroconf/commit/4a569fe389d2fb5fd4b4f294ae9ebc0e38164e4a)) - -* Merge pull request #53 from stephenrauch/validate_service_names - -Validate service names ([`76a5e99`](https://github.com/python-zeroconf/python-zeroconf/commit/76a5e99f2e772a9462d0f4b3ab4c80f1b0a3b542)) - -* Service Name Validation - -This change validates service, instance and subtype names against -rfc6763. - -Also adds test code for subtypes and provides a fix for issue 37. ([`88fa059`](https://github.com/python-zeroconf/python-zeroconf/commit/88fa0595cd880b6d82ac8580512461e64eb32d6b)) - -* Test Case and fixes for DNSHInfo (#49) - -* Fix ability for a cache lookup to match properly - -When querying for a service type, the response is processed. During the -processing, an info lookup is performed. If the info is not found in -the cache, then a query is sent. Trouble is that the info requested is -present in the same packet that triggered the lookup, and a query is not -necessary. But two problems caused the cache lookup to fail. - -1) The info was not yet in the cache. The call back was fired before -all answers in the packet were cached. - -2) The test for a cache hit did not work, because the cache hit test -uses a DNSEntry as the comparison object. But some of the objects in -the cache are descendents of DNSEntry and have their own __eq__() -defined which accesses fields only present on the descendent. Thus the -test can NEVER work since the descendent's __eq__() will be used. - -Also continuing the theme of some other recent pull requests, add three -_GLOBAL_DONE tests to avoid doing work after the attempted stop, and -thus avoid generating (harmless, but annoying) exceptions during -shutdown - -* Remove unnecessary packet send in ServiceInfo.request() - -When performing an info query via request(), a listener is started, and -a packet is formed. As the packet is formed, known answers are taken -from the cache and placed into the packet. Then the packet is sent. -The packet is self received (via multicast loopback, I assume). At that -point the listener is fired and the answers in the packet are propagated -back to the object that started the request. This is a really long way -around the barn. - -The PR queries the cache directly in request() and then calls -update_record(). If all of the information is in the cache, then no -packet is formed or sent or received. This approach was taken because, -for whatever reason, the reception of the packets on windows via the -loopback was proving to be unreliable. The method has the side benefit -of being a whole lot faster. - -This PR also incorporates the joins() from PR #30. In addition it moves -the two joins() in close() to their own thread because they can take -quite a while to execute. - -* Fix locking race condition in Engine.run() - -This fixes a race condition in which the receive engine was waiting -against its condition variable under a different lock than the one it -used to determine if it needed to wait. This was causing the code to -sometimes take 5 seconds to do anything useful. - -When fixing the race condition, decided to also fix the other -correctness issues in the loop which was likely causing the errors that -led to the inclusion of the 'except Exception' catch all. This in turn -allowed the use of EBADF error due to closing the socket during exit to -be used to get out of the select in a timely manner. - -Finally, this allowed reorganizing the shutdown code to shutdown from -the front to the back. That is to say, shutdown the recv socket first, -which then allows a clean join with the engine thread. After the engine -thread exits most everything else is inert as all callbacks have been -unwound. - -* Remove a now invalid test case - -With the restructure of shutdown, Listener() now needs to throw EBADF on -a closed socket to allow a timely and graceful shutdown. - -* Shutdown the service listeners in an organized fashion - -Also adds names to the various threads to make debugging easier. - -* Improve test coverage - -Add more needed shutdown cleanup found via additional test coverage. - -Force timeout calculation from milli to seconds to use floating point. - -* init ServiceInfo._properties - -* Add query support and test case for _services._dns-sd._udp.local. - -* pep8 cleanup - -* Add testcase and fixes for HInfo Record Generation - -The DNSHInfo packet generation code was broken. There was no test case for that -functionality, and adding a test case showed four issues. Two of which were -relative to PY3 string, one of which was a typoed reference to an attribute, -and finally the two fields present in the HInfo record were using the wrong -encoding, which is what necessitated the change from write_string() to -write_character_string(). ([`6b39c70`](https://github.com/python-zeroconf/python-zeroconf/commit/6b39c70fa1ed7cfac89e02e2b3764a9038b87267)) - -* Merge pull request #48 from stephenrauch/Find-Service-Types - -Find service types ([`1dfc40f`](https://github.com/python-zeroconf/python-zeroconf/commit/1dfc40f4da145a55d60a952df90301ee0e5d65c4)) - -* Add query support and test case for _services._dns-sd._udp.local. ([`cfbb157`](https://github.com/python-zeroconf/python-zeroconf/commit/cfbb1572e44c4d8af1b50cb62abc0d426fc8e3ea)) - -* Merge pull request #45 from stephenrauch/master - -Multiple fixes to speed up querys and remove exceptions at shutdown ([`183cd81`](https://github.com/python-zeroconf/python-zeroconf/commit/183cd81d9274bf28c642314df2f9e32f1f60020b)) - -* init ServiceInfo._properties ([`d909942`](https://github.com/python-zeroconf/python-zeroconf/commit/d909942e2c9479819e9113ffb3a354b1d99d6814)) - -* Improve test coverage - -Add more needed shutdown cleanup found via additional test coverage. - -Force timeout calculation from milli to seconds to use floating point. ([`75232cc`](https://github.com/python-zeroconf/python-zeroconf/commit/75232ccf28a820ee723db072951078eba31145a5)) - -* Shutdown the service listeners in an organized fashion - -Also adds names to the various threads to make debugging easier. ([`ad3c248`](https://github.com/python-zeroconf/python-zeroconf/commit/ad3c248e4b67d5d2e9a4448a56b4e4648284ecd4)) - -* Remove a now invalid test case - -With the restructure of shutdown, Listener() now needs to throw EBADF on -a closed socket to allow a timely and graceful shutdown. ([`7bbee59`](https://github.com/python-zeroconf/python-zeroconf/commit/7bbee590e553a1ff0e4dde3b1fdcf614b7e1ecd5)) - -* Fix locking race condition in Engine.run() - -This fixes a race condition in which the receive engine was waiting -against its condition variable under a different lock than the one it -used to determine if it needed to wait. This was causing the code to -sometimes take 5 seconds to do anything useful. - -When fixing the race condition, decided to also fix the other -correctness issues in the loop which was likely causing the errors that -led to the inclusion of the 'except Exception' catch all. This in turn -allowed the use of EBADF error due to closing the socket during exit to -be used to get out of the select in a timely manner. - -Finally, this allowed reorganizing the shutdown code to shutdown from -the front to the back. That is to say, shutdown the recv socket first, -which then allows a clean join with the engine thread. After the engine -thread exits most everything else is inert as all callbacks have been -unwound. ([`8a110f5`](https://github.com/python-zeroconf/python-zeroconf/commit/8a110f58b02825100f5bdb56c119495ae42ae54c)) - -* Remove unnecessary packet send in ServiceInfo.request() - -When performing an info query via request(), a listener is started, and -a packet is formed. As the packet is formed, known answers are taken -from the cache and placed into the packet. Then the packet is sent. -The packet is self received (via multicast loopback, I assume). At that -point the listener is fired and the answers in the packet are propagated -back to the object that started the request. This is a really long way -around the barn. - -The PR queries the cache directly in request() and then calls -update_record(). If all of the information is in the cache, then no -packet is formed or sent or received. This approach was taken because, -for whatever reason, the reception of the packets on windows via the -loopback was proving to be unreliable. The method has the side benefit -of being a whole lot faster. - -This PR also incorporates the joins() from PR #30. In addition it moves -the two joins() in close() to their own thread because they can take -quite a while to execute. ([`c49145c`](https://github.com/python-zeroconf/python-zeroconf/commit/c49145c35de09b2631d8a2b4751d787a6b4dc904)) - -* Fix ability for a cache lookup to match properly - -When querying for a service type, the response is processed. During the -processing, an info lookup is performed. If the info is not found in -the cache, then a query is sent. Trouble is that the info requested is -present in the same packet that triggered the lookup, and a query is not -necessary. But two problems caused the cache lookup to fail. - -1) The info was not yet in the cache. The call back was fired before -all answers in the packet were cached. - -2) The test for a cache hit did not work, because the cache hit test -uses a DNSEntry as the comparison object. But some of the objects in -the cache are descendents of DNSEntry and have their own __eq__() -defined which accesses fields only present on the descendent. Thus the -test can NEVER work since the descendent's __eq__() will be used. - -Also continuing the theme of some other recent pull requests, add three -_GLOBAL_DONE tests to avoid doing work after the attempted stop, and -thus avoid generating (harmless, but annoying) exceptions during -shutdown ([`d8562fd`](https://github.com/python-zeroconf/python-zeroconf/commit/d8562fd3546d6cd27b1ba9e95105ea534649a43e)) - - -## v0.17.5 (2016-03-14) - -### Unknown - -* Prepare release 0.17.5 ([`f33b8f9`](https://github.com/python-zeroconf/python-zeroconf/commit/f33b8f9c182245b14b9b73a86aefedcee4520eb5)) - -* resolve issue #38: size change during iteration ([`fd9d531`](https://github.com/python-zeroconf/python-zeroconf/commit/fd9d531f294e7fa5b9b934f192b061f56eaf1d37)) - -* Installation on system with ASCII encoding - -The default open function in python2 made a best effort to open text files of any encoding. -After 3.0 the encoding has to be set correctly and it defaults to the user preferences. ([`6007537`](https://github.com/python-zeroconf/python-zeroconf/commit/60075379d57664f94fa41a96dea7c7c64489ef3d)) - -* Revert "Switch from netifaces to psutil" - -psutil doesn't seem to work on pypy3: - - Traceback (most recent call last): - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/failure.py", line 39, in runTest - raise self.exc_val.with_traceback(self.tb) - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/loader.py", line 414, in loadTestsFromName - addr.filename, addr.module) - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/importer.py", line 47, in importFromPath - return self.importFromDir(dir_path, fqname) - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/nose/importer.py", line 94, in importFromDir - mod = load_module(part_fqname, fh, filename, desc) - File "/home/travis/build/jstasiak/python-zeroconf/test_zeroconf.py", line 17, in - import zeroconf as r - File "/home/travis/build/jstasiak/python-zeroconf/zeroconf.py", line 35, in - import psutil - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/__init__.py", line 62, in - from . import _pslinux as _psplatform - File "/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_pslinux.py", line 23, in - from . import _psutil_linux as cext - ImportError: unable to load extension module - '/home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_psutil_linux.pypy3-24.so': - /home/travis/virtualenv/pypy3-2.4.0/site-packages/psutil/_psutil_linux.pypy3-24.so: undefined symbol: PyModule_GetState - -Additionally netifaces turns out to be possible to install on Python 3, -therefore making it necessary to investigate the original issue. - -This reverts commit dd907f2eed3768a3c1e3889af84b5dbeb700a1e7. ([`6349d19`](https://github.com/python-zeroconf/python-zeroconf/commit/6349d197b442209331a0ff8676541967f7142991)) - -* fix issue #23 race-condition on ServiceBrowser startup ([`30bd44f`](https://github.com/python-zeroconf/python-zeroconf/commit/30bd44f04f94a9b26622a7213dd9950ae57df21c)) - -* Switch from netifaces to psutil - -netifaces installation on Python 3.x is broken and there doesn't seem to -be any plan to release a working version on PyPI, instead of using its -fork I decided to use another package providing the required -information. - -This closes https://github.com/jstasiak/python-zeroconf/issues/31 - -[1] https://bitbucket.org/al45tair/netifaces/issues/13/0104-install-is-broken-on-python-3x ([`dd907f2`](https://github.com/python-zeroconf/python-zeroconf/commit/dd907f2eed3768a3c1e3889af84b5dbeb700a1e7)) - -* Fix multicast TTL and LOOP options on OpenBSD - -IP_MULTICAST_TTL and IP_MULTICAST_LOOP socket options on OpenBSD don't -accept int, only unsigned char. Otherwise you will get an error: -[Errno 22] Invalid argument. ([`0f46a06`](https://github.com/python-zeroconf/python-zeroconf/commit/0f46a0609931e6dc299c0473312e434e84abe7b0)) - - -## v0.17.4 (2015-09-22) - -### Unknown - -* Prepare release 0.17.4 ([`0b9093d`](https://github.com/python-zeroconf/python-zeroconf/commit/0b9093de863928d7f13092aaf2be1f0a33f4ead2)) - -* Support kernel versions <3.9 - -added catch of OSError -added catch of socket.error for python2 ([`023426e`](https://github.com/python-zeroconf/python-zeroconf/commit/023426e0f8982640f46bca3dfcd3abeee2cb832f)) - -* Make it explicit who says what in the readme ([`ddb1048`](https://github.com/python-zeroconf/python-zeroconf/commit/ddb10485ef17aec3f37ef70dcb37af167271bfe1)) - - -## v0.17.3 (2015-08-19) - -### Unknown - -* Make the package's status explicit ([`f29c0f4`](https://github.com/python-zeroconf/python-zeroconf/commit/f29c0f475be76f70ecbb1586deb4618180dd1969)) - -* Prepare release 0.17.3 ([`9c3a81a`](https://github.com/python-zeroconf/python-zeroconf/commit/9c3a81af84c3450459795e5fc5142300f9680804)) - -* Add a DNSText __repr__ test - -The test helps making sure the situation fixed by -e8299c0527c965f83c1326b18e484652a9eb829c doesn't happen again. ([`c7567d6`](https://github.com/python-zeroconf/python-zeroconf/commit/c7567d6b065d7460e2022b8cde5dd0b52a3828a7)) - -* Fix DNSText repr Python 3 issue - -Prevents following exception: -``` - File "/Users/paulus/dev/python/netdisco/lib/python3.4/site-packages/zeroconf.py", line 412, in __repr__ - return self.to_string(self.text[:7] + "...") -TypeError: can't concat bytes to str -``` ([`e8299c0`](https://github.com/python-zeroconf/python-zeroconf/commit/e8299c0527c965f83c1326b18e484652a9eb829c)) - - -## v0.17.2 (2015-07-12) - -### Unknown - -* Release version 0.17.2 ([`d1ee5ce`](https://github.com/python-zeroconf/python-zeroconf/commit/d1ee5ce7558060ea8d92f804172f67f960f814bb)) - -* Fix a typo, meant strictly lesser than 0.6 :< ([`dadbbfc`](https://github.com/python-zeroconf/python-zeroconf/commit/dadbbfc9e1787561981807d3e008433a107c1e5e)) - -* Restrict flake8-import-order version - -There seems to be a bug in 0.6.x, see -https://github.com/public/flake8-import-order/issues/42 ([`4435a2a`](https://github.com/python-zeroconf/python-zeroconf/commit/4435a2a4ae1c0b0877785f1a5047f65bb80a14bd)) - -* Use enum-compat instead of enum34 directly - -This is in order for the package's installation to work on Python 3.4+, -solves the same issue as -https://github.com/jstasiak/python-zeroconf/pull/22. ([`ba89455`](https://github.com/python-zeroconf/python-zeroconf/commit/ba894559f43fa6955989b92533c06fd8e8b92c74)) - - -## v0.17.1 (2015-04-10) - -### Unknown - -* Restrict pep8 version as something depends on it ([`4dbd04b`](https://github.com/python-zeroconf/python-zeroconf/commit/4dbd04b807813384108ff8e4cb5291c2560eed6b)) - -* Bump version to 0.17.1 ([`0b8936b`](https://github.com/python-zeroconf/python-zeroconf/commit/0b8936b94011c0783c7d0469b9ebae76cd4d1976)) - -* Fix some typos in the readme ([`7c64ebf`](https://github.com/python-zeroconf/python-zeroconf/commit/7c64ebf6129fb6c0c533a1fed618c9d5926d5100)) - -* Update README.rst ([`44fa62a`](https://github.com/python-zeroconf/python-zeroconf/commit/44fa62a738335781ecdd789ad636f82e6542ecd2)) - -* Update README.rst ([`a22484a`](https://github.com/python-zeroconf/python-zeroconf/commit/a22484af90c7c4cbdee849d2b75efab2772c3592)) - -* Getting an EADDRNOTAVAIL error when adding an address to the multicast group on windows. ([`93d34f9`](https://github.com/python-zeroconf/python-zeroconf/commit/93d34f925cd8913ff6836f9393cdce15679e4794)) - - -## v0.17.0 (2015-04-10) - -### Unknown - -* Do 0.17.0 release ([`a6d75b3`](https://github.com/python-zeroconf/python-zeroconf/commit/a6d75b3d63a0c13c63473910b832e6db12635e79)) - -* Advertise pypy3 support ([`4783611`](https://github.com/python-zeroconf/python-zeroconf/commit/4783611de72ac11bdbfea9e4324e58746a91e70a)) - -* Handle recent flake8 change ([`0009b5e`](https://github.com/python-zeroconf/python-zeroconf/commit/0009b5ea2bca77f395eb2bacc69d0dcfa5dd37dc)) - -* Describe recent changes ([`5c32a27`](https://github.com/python-zeroconf/python-zeroconf/commit/5c32a27a6ae0cccf7af25961cd98560a5173b065)) - -* Add pypy3 build ([`a298785`](https://github.com/python-zeroconf/python-zeroconf/commit/a298785cf63d26b184495f972c619d31515a1468)) - -* Restore old listener interface (and example) for now ([`c748294`](https://github.com/python-zeroconf/python-zeroconf/commit/c748294fdc6f3bf527f62d4c0cb76ace32890128)) - -* Fix test breakage ([`b5fb3e8`](https://github.com/python-zeroconf/python-zeroconf/commit/b5fb3e86a688f6161c1292ccdffeec9f455c1fbd)) - -* Prepare for new release ([`275a22b`](https://github.com/python-zeroconf/python-zeroconf/commit/275a22b997331d499526293b98faff11ca6edea5)) - -* Move self test example out of main module ([`ac5a63e`](https://github.com/python-zeroconf/python-zeroconf/commit/ac5a63ece96fbf9d64e41e7a4867cc1d8b2f6b96)) - -* Fix using binary strings as property values - -Previously it'd fall trough and set the value to False ([`b443027`](https://github.com/python-zeroconf/python-zeroconf/commit/b4430274ba8355ceaadc2d89a84752f1ac1485e7)) - -* Reformat a bit ([`2190818`](https://github.com/python-zeroconf/python-zeroconf/commit/219081860d28e49b1ae71a78e1a0da459689ab9c)) - -* Make examples' output quiet by default ([`08e0dc2`](https://github.com/python-zeroconf/python-zeroconf/commit/08e0dc2c7c1551ffa9a1e7297112b0f46b7ccc4e)) - -* Change ServiceBrowser interface experimentally ([`d162e54`](https://github.com/python-zeroconf/python-zeroconf/commit/d162e54c6aad175505028aa7beb8a1a0cb7a231d)) - -* Handle exceptions better ([`7cad7a4`](https://github.com/python-zeroconf/python-zeroconf/commit/7cad7a43179e3f547796b125e3ed8169ef3f4157)) - -* Add some debug logging ([`451c072`](https://github.com/python-zeroconf/python-zeroconf/commit/451c0729e2490ac6283010ddcbbcc723d86e6765)) - -* Make the code nicer - -This includes: - -* rearranging code to make it more readable -* catching KeyError instead of all exceptions and making it obvious what - can possibly raise there -* renaming things ([`df88670`](https://github.com/python-zeroconf/python-zeroconf/commit/df88670963e8c3a1f11a6af026b484ff4343d271)) - -* Remove redundant parentheses ([`3775c47`](https://github.com/python-zeroconf/python-zeroconf/commit/3775c47d8cf3c941603fa393265b86d05f61b915)) - -* Make examples nicer and make them show all logs ([`193ee64`](https://github.com/python-zeroconf/python-zeroconf/commit/193ee64d6212ff9a814b76b13f9ef46676025dc3)) - -* Remove duplicates from all interfaces list - -It has been mentioned in GH #12 that the list of all machine's network -interfaces can contain duplicates; it shouldn't break anything but -there's no need to open multiple sockets in such case. ([`af5e363`](https://github.com/python-zeroconf/python-zeroconf/commit/af5e363e7fcb392081dc98915defd93c5002c3fc)) - -* Don't fail when the netmask is unknown ([`463428f`](https://github.com/python-zeroconf/python-zeroconf/commit/463428ff8550a4f0e12b60e6f6a35efedca31271)) - -* Skip host only network interfaces - -On Ubuntu Linux treating such interface (network mask 255.255.255.255) -would result in: - -* EADDRINUSE "Address already in use" when trying to add multicast group - membership using IP_ADD_MEMBERSHIP -* success when setting the interface as outgoing multicast interface - using IP_MULTICAST_IF -* EINVAL "Invalid argument" when trying to send multicast datagram using - socket with that interface set as the multicast outgoing interface ([`b5e9e94`](https://github.com/python-zeroconf/python-zeroconf/commit/b5e9e944e6f3c990862b3b03831bb988579ed340)) - -* Configure logging during the tests ([`0208228`](https://github.com/python-zeroconf/python-zeroconf/commit/0208228d8c760f3672954f5434c2ea54d7fd4196)) - -* Use all network interfaces by default ([`193cf47`](https://github.com/python-zeroconf/python-zeroconf/commit/193cf47a1144afc9158f0075a886c1f754d96f18)) - -* Ignore EADDRINUSE when appropriate - -On some systems it's necessary to do so ([`0f7c64f`](https://github.com/python-zeroconf/python-zeroconf/commit/0f7c64f8cdacae34c227edd5da4f445ece12da89)) - -* Export Error and InterfaceChoice ([`500a76b`](https://github.com/python-zeroconf/python-zeroconf/commit/500a76bb1332fe34b45e681c767baddfbece4916)) - -* Fix ServiceInfo repr and text on Python 3 - -Closes #1 ([`f3fd4cd`](https://github.com/python-zeroconf/python-zeroconf/commit/f3fd4cd69e9707221d8bd5ee6b3bb86b0985f604)) - -* Add preliminary support for mulitple net interfaces ([`442a599`](https://github.com/python-zeroconf/python-zeroconf/commit/442a59967f7b0f2d5c2ef512874ad2ab13dedae4)) - -* Rationalize error handling when sending data ([`a0ee3d6`](https://github.com/python-zeroconf/python-zeroconf/commit/a0ee3d62db7b5350a21091e37824e187ebf99348)) - -* Make Zeroconf.socket private ([`78449ef`](https://github.com/python-zeroconf/python-zeroconf/commit/78449ef1e07dc68b63bb68038cb66f22e083fdfe)) - -* Refactor Condition usage to use context manager interface ([`8d32fa4`](https://github.com/python-zeroconf/python-zeroconf/commit/8d32fa4b12e1b52d72a7ba9588437c4c787e0ffd)) - -* Use six for Python 2/3 compatibility ([`f0c3979`](https://github.com/python-zeroconf/python-zeroconf/commit/f0c39797869175cf88d76c75d39835abb2052f88)) - -* Use six for Python 2/3 compatibility ([`54ed4b7`](https://github.com/python-zeroconf/python-zeroconf/commit/54ed4b79bb8de9523b5a5b74a79b01c8aa2291a7)) - -* Refactor version detection in the setup script - -This doesn't depend on zeroconf module being importable when setup is -ran ([`1c2205d`](https://github.com/python-zeroconf/python-zeroconf/commit/1c2205d5c9b364a825d51acd03add4de91cb645a)) - -* Drop "zero dependencies" feature ([`d8c1ec8`](https://github.com/python-zeroconf/python-zeroconf/commit/d8c1ec8ee13191e8ec4412770994f0676ace442c)) - -* Stop dropping multicast group membership - -It'll be taken care of by socket being closed ([`f6425d1`](https://github.com/python-zeroconf/python-zeroconf/commit/f6425d1d727edfa124264bcabeffd77397809965)) - -* Remove dead code ([`88f5a51`](https://github.com/python-zeroconf/python-zeroconf/commit/88f5a5193ba2ab0eefc99481ccc6a1b911d8dbea)) - -* Stop using Zeroconf.group attribute ([`903cb78`](https://github.com/python-zeroconf/python-zeroconf/commit/903cb78d3ff7bc8762bf23910562b8f5042c2f85)) - -* Remove some unused methods ([`80e8e10`](https://github.com/python-zeroconf/python-zeroconf/commit/80e8e1008bc28c8ab9ca966b89109146112d0edd)) - -* Refactor exception handling here ([`4b8f68b`](https://github.com/python-zeroconf/python-zeroconf/commit/4b8f68b39230bb9cc3c202395b58cc822b8fe862)) - -* Update README.rst ([`8f18609`](https://github.com/python-zeroconf/python-zeroconf/commit/8f1860956ee9c86b7ba095fc1293919933e1c0ad)) - -* Release as 0.16.0 ([`4e54b67`](https://github.com/python-zeroconf/python-zeroconf/commit/4e54b6738a490dcc7d2f9e7e1040c5da53727155)) - -* Tune logging ([`05c3c02`](https://github.com/python-zeroconf/python-zeroconf/commit/05c3c02044d2b4bff946e00803d0ddb2619f0927)) - -* Migrate from clazz to class_ ([`4a67e12`](https://github.com/python-zeroconf/python-zeroconf/commit/4a67e124cd8f8c4d19f8c6c4a455d075bb948362)) - -* Migrate more camel case names to snake case ([`92e4713`](https://github.com/python-zeroconf/python-zeroconf/commit/92e47132dc761a9a722caec261ae53de1785838f)) - -* Switch to snake case and clean up import order - -Closes #2 ([`5429748`](https://github.com/python-zeroconf/python-zeroconf/commit/5429748190950a5daf7e9cf91de824dfbd06ee7a)) - -* Rationalize exception handling a bit and setup logging ([`ada563c`](https://github.com/python-zeroconf/python-zeroconf/commit/ada563c5a1f6d7c54f2ae5c495503079c395438f)) - -* Update README.rst ([`47ff62b`](https://github.com/python-zeroconf/python-zeroconf/commit/47ff62bae1fd69ffd953c82bd480e4770bfee97b)) - -* Update README.rst ([`b290965`](https://github.com/python-zeroconf/python-zeroconf/commit/b290965ecd589ca4feb1f88a4232d1ec2725dc44)) - -* Create universal wheels ([`bf97c14`](https://github.com/python-zeroconf/python-zeroconf/commit/bf97c1459a9d91d6aa88d7bf34c5f8b4cd3cedc5)) +## v0.17.0 (2015-04-10) ## v0.15.1 (2014-07-10) - -### Unknown - -* Bump version to 0.15.1 ([`9e81863`](https://github.com/python-zeroconf/python-zeroconf/commit/9e81863de37e2ab972d5a76a1dc2d5c517f83cc6)) - -* Update README.rst ([`161743e`](https://github.com/python-zeroconf/python-zeroconf/commit/161743ea387c961d3554488239f93df4b39be18c)) - -* Add coverage badge to the readme ([`8502a7e`](https://github.com/python-zeroconf/python-zeroconf/commit/8502a7e1c9770a42e44b4f1beb34c887212e7d48)) - -* Send coverage to coveralls ([`1d90a9f`](https://github.com/python-zeroconf/python-zeroconf/commit/1d90a9f91f87753a1ea649ce5da1bc6a7da4013d)) - -* Fix socket.error handling - -This closes #4 ([`475e80b`](https://github.com/python-zeroconf/python-zeroconf/commit/475e80b90e96364a183c63f09fa3858f34aa3646)) - -* Add test_coverage make target ([`89531e6`](https://github.com/python-zeroconf/python-zeroconf/commit/89531e641f15b24a60f9fb2e9f71a7aa8450363a)) - -* Add PyPI version badge to the readme ([`4c852d4`](https://github.com/python-zeroconf/python-zeroconf/commit/4c852d424d07925ae01c24a51ffc36ecae49b48d)) - -* Refactor integration test to use events ([`922eab0`](https://github.com/python-zeroconf/python-zeroconf/commit/922eab05596b72d141d459e83146a4cdb6c84389)) - -* Fix readme formatting ([`7b23734`](https://github.com/python-zeroconf/python-zeroconf/commit/7b23734356f85ccaa6ca66ffaeea8484a2d45d3d)) - -* Update README.rst ([`83fd618`](https://github.com/python-zeroconf/python-zeroconf/commit/83fd618328aff29892c71f9ba5b9ff983fe4a202)) - -* Refactor browser example ([`8328aed`](https://github.com/python-zeroconf/python-zeroconf/commit/8328aed1444781b6fac854eb722ae0fef14a3cc4)) - -* Update README.rst ([`49af263`](https://github.com/python-zeroconf/python-zeroconf/commit/49af26350390484bc6f4b66dab4f6b004040cd4a)) - -* Bump version to 0.15 ([`77bcadd`](https://github.com/python-zeroconf/python-zeroconf/commit/77bcaddbd1964fb0b494e98ec3ae6d66ea42c509)) - -* Add myself to authors ([`b9f886b`](https://github.com/python-zeroconf/python-zeroconf/commit/b9f886bf2815c86c7004e123146293c48ea68f1e)) - -* Reuse one Zeroconf instance in browser example ([`1ee00b3`](https://github.com/python-zeroconf/python-zeroconf/commit/1ee00b318eab386b709351ffae81c8293f4e6d4d)) - -* Update README.rst ([`fba4215`](https://github.com/python-zeroconf/python-zeroconf/commit/fba4215be1804a13e454e609ed6df2cf98e149f2)) - -* Update README.rst ([`c7bfe63`](https://github.com/python-zeroconf/python-zeroconf/commit/c7bfe63f9a7eff9a1ede0ac63a329a316d3192ab)) - -* Rename examples ([`3502198`](https://github.com/python-zeroconf/python-zeroconf/commit/3502198768062b49564121b48a792ce5e7b7b288)) - -* Refactor examples ([`2ce95f5`](https://github.com/python-zeroconf/python-zeroconf/commit/2ce95f52e7a02c7f1113ba7ebee3c89babb9a26e)) - -* Update README.rst ([`6a7cd31`](https://github.com/python-zeroconf/python-zeroconf/commit/6a7cd3197ee6ae5690b29b6543fc86d1b1a420d8)) - -* Advertise Python 3 support ([`d330918`](https://github.com/python-zeroconf/python-zeroconf/commit/d330918970d719d6b26a3f81e83dbb8b8adac0a4)) - -* Update README.rst ([`6aae20e`](https://github.com/python-zeroconf/python-zeroconf/commit/6aae20e1c1bef8413573139d62d3d2b889fe8776)) - -* Move examples to examples directory ([`c83891c`](https://github.com/python-zeroconf/python-zeroconf/commit/c83891c9dd2f20e8dee44f1b412a536d20cbcbe3)) - -* Fix regression introduced with Python 3 compat ([`0a0f7e0`](https://github.com/python-zeroconf/python-zeroconf/commit/0a0f7e0e72d7f9ed08231d94b66ff44bcff60151)) - -* Mark threads as daemonic (at least for now) ([`b8cfc79`](https://github.com/python-zeroconf/python-zeroconf/commit/b8cfc7996941afded5c9c7e7903378279590b20f)) - -* Update README.rst ([`cd7ca98`](https://github.com/python-zeroconf/python-zeroconf/commit/cd7ca98010044eb965bc988c23a8be59e09eb69a)) - -* Add Python 3 support ([`9a99aa7`](https://github.com/python-zeroconf/python-zeroconf/commit/9a99aa727f4e041a726aed3736c0a8ab625c4cb6)) - -* Update README.rst ([`09a1f4f`](https://github.com/python-zeroconf/python-zeroconf/commit/09a1f4f9d76f64cc8c85f0525e05bdac53de210c)) - -* Update README.rst ([`6feec34`](https://github.com/python-zeroconf/python-zeroconf/commit/6feec3459d2561f00402d627ea91a8a4981ad309)) - -* Tune package description ([`b819174`](https://github.com/python-zeroconf/python-zeroconf/commit/b8191741d4ef8e347f6dd138fa48da5aec9b6549)) - -* Gitignore build/ ([`0ef1b0d`](https://github.com/python-zeroconf/python-zeroconf/commit/0ef1b0d3481b68a752efe822ff4e9ce8356bcffa)) - -* Add setup.py ([`916bd38`](https://github.com/python-zeroconf/python-zeroconf/commit/916bd38ddb48a959c597ae1763193b4c2c74334f)) - -* Update README.rst ([`35eced3`](https://github.com/python-zeroconf/python-zeroconf/commit/35eced310fbe1782fd87eb33e7f4befcb0a78499)) - -* Run actual tests on Travis ([`f8cea82`](https://github.com/python-zeroconf/python-zeroconf/commit/f8cea82177cea3577d2b4f70fec32e85229abdce)) - -* Advertise Python 2.6 and PyPy support ([`43b182c`](https://github.com/python-zeroconf/python-zeroconf/commit/43b182cce40bcb21eb1e052a0bc42bf367a963ca)) - -* Move readme to README.rst ([`fd3401e`](https://github.com/python-zeroconf/python-zeroconf/commit/fd3401efb55ae91324d12ba80affd2f3b3ebcf5e)) - -* Move readme to README.rst ([`353b700`](https://github.com/python-zeroconf/python-zeroconf/commit/353b700df79b49c49db62e0a6e6eb0eae3ccb444)) - -* Stop catching BaseExceptions ([`41a013c`](https://github.com/python-zeroconf/python-zeroconf/commit/41a013c8a051b3f80018f37d4f254263cc890a68)) - -* Set up Travis build ([`a2a6125`](https://github.com/python-zeroconf/python-zeroconf/commit/a2a6125dd03d9a810dac72163d545e413387217b)) - -* PEP8ize and clean up ([`e2964ed`](https://github.com/python-zeroconf/python-zeroconf/commit/e2964ed48263e72159e95cb0691af0dcb9ba498b)) - -* Updated for 0.14. ([`83aa0f3`](https://github.com/python-zeroconf/python-zeroconf/commit/83aa0f3803cdf79470f4a754c7b9ab616544eea1)) - -* Although SOL_IP is considered more correct here, it's undefined on some -systems, where IPPROTO_IP is available. (Both equate to 0.) Reported by -Mike Erdely. ([`443aca8`](https://github.com/python-zeroconf/python-zeroconf/commit/443aca867d694432d466d20bdf7c49ebc7a4e684)) - -* Obsolete comment. ([`eee7196`](https://github.com/python-zeroconf/python-zeroconf/commit/eee7196626773eae2dc0dc1a68de03a99d778139)) - -* Really these should be network order. ([`5e10a20`](https://github.com/python-zeroconf/python-zeroconf/commit/5e10a20a9cb6bbc09356cbf957f3f7fa3e169ff2)) - -* Docstrings for examples; shorter timeout; struct.unpack() vs. ord(). ([`0884d6a`](https://github.com/python-zeroconf/python-zeroconf/commit/0884d6a56afc6fb559b6c90a923762393187e50a)) - -* Make examples executable. ([`5e5e78e`](https://github.com/python-zeroconf/python-zeroconf/commit/5e5e78e27240e7e03d1c8aa96ee0e1f7877d0d5d)) - -* Unneeded. ([`2ac738f`](https://github.com/python-zeroconf/python-zeroconf/commit/2ac738f84bbcf29d03bad289cb243182ecdf48d6)) - -* getText() is redundant with getProperties(). ([`a115187`](https://github.com/python-zeroconf/python-zeroconf/commit/a11518726321b15059be255b6329cba591887197)) - -* Allow graceful exit from announcement test. ([`0f3b413`](https://github.com/python-zeroconf/python-zeroconf/commit/0f3b413b269f8b95b6f8073ba39d11f156ae632c)) - -* More readable display in browser; automatically quit after giving ten -seconds to respond. ([`eee4530`](https://github.com/python-zeroconf/python-zeroconf/commit/eee4530d7b8216338634282f3097cb96932aa28e)) - -* New names, numbers. ([`2a000c5`](https://github.com/python-zeroconf/python-zeroconf/commit/2a000c589302147129eed990c842b38ac61f7514)) - -* Updated FSF address. ([`4e39602`](https://github.com/python-zeroconf/python-zeroconf/commit/4e396025ed666775973d54a50b69e8f635e28658)) - -* De-DOSification. ([`1dc3436`](https://github.com/python-zeroconf/python-zeroconf/commit/1dc3436e6357b66d0bb53f9b285f123b164984da)) - -* Lowercase imports. ([`e292868`](https://github.com/python-zeroconf/python-zeroconf/commit/e292868f9c7e817cb04dfce2d545f45db4041e5e)) - -* The great lowercasing. ([`5541813`](https://github.com/python-zeroconf/python-zeroconf/commit/5541813fbb8e1d7b233d09ee2d20ac0ca322a9f2)) - -* Renamed tests. ([`4bb88b0`](https://github.com/python-zeroconf/python-zeroconf/commit/4bb88b0952833b84c15c85190c0a9cac01922cbe)) - -* Replaced unwrapped "lgpl.txt" with traditional "COPYING". ([`ad6b1ec`](https://github.com/python-zeroconf/python-zeroconf/commit/ad6b1ecf9fa71a5ec14f7a08fc3d6a689a19e6d2)) - -* Don't need range() here. ([`b36e7d5`](https://github.com/python-zeroconf/python-zeroconf/commit/b36e7d5dd5922b1739911878b29aba921ec9ecb6)) - -* testNumbersAnswers() was identical to testNumbersQuestions(). -(Presumably it was intended to test addAnswer() instead...) ([`416054d`](https://github.com/python-zeroconf/python-zeroconf/commit/416054d407013af8678928b949d6579df4044d46)) - -* Extraneous spaces. ([`f6615a9`](https://github.com/python-zeroconf/python-zeroconf/commit/f6615a9d7632f3510d2f0a36cab155ac753141ab)) - -* Moved history to README; updated version number, etc. ([`015bae2`](https://github.com/python-zeroconf/python-zeroconf/commit/015bae258b5ce73a2a12361e4c9295107126963c)) - -* Meaningless. ([`6147a6e`](https://github.com/python-zeroconf/python-zeroconf/commit/6147a6ed20222851ba4438dd65366f907b4c189f)) - -* Also unexceptional. ([`c36e3af`](https://github.com/python-zeroconf/python-zeroconf/commit/c36e3af2f6e0ea857f383f9b014f50b65fca641c)) - -* If name isn't in self.names, it's unexceptional. (And yes, I actually -tested, and this is faster.) ([`f772d4e`](https://github.com/python-zeroconf/python-zeroconf/commit/f772d4e5e208431378bf01d75eddc7df5119dff7)) - -* Excess spaces; don't use "len" as a label. After eblot. ([`df986ee`](https://github.com/python-zeroconf/python-zeroconf/commit/df986eed46e3ec7dadc6604d0b26e4fcf0b6291a)) - -* Outdated docs. ([`21d7c95`](https://github.com/python-zeroconf/python-zeroconf/commit/21d7c950f50827bc8ac6dd18fb0577c11b5cefac)) - -* Untab the test programs. ([`c13e4fa`](https://github.com/python-zeroconf/python-zeroconf/commit/c13e4fab3b0b95674fbc93cd2ac30fd2ba462a24)) - -* Remove the comment about the test programs. ([`8adab79`](https://github.com/python-zeroconf/python-zeroconf/commit/8adab79a64a73e76841b37e53e55fe8aad8eb580)) - -* Allow for the failure of getServiceInfo(). Not sure why it's happening, -though. ([`0a05f42`](https://github.com/python-zeroconf/python-zeroconf/commit/0a05f423ad591454a25c515d811556d10e5fc99f)) - -* Don't test for NonLocalNameException, since I killed it. ([`d89ddfc`](https://github.com/python-zeroconf/python-zeroconf/commit/d89ddfcecc7b336aa59a4ff784cb8b810772d24f)) - -* Describe this fork. ([`656f959`](https://github.com/python-zeroconf/python-zeroconf/commit/656f959c26310629953cc661ffad681194295131)) - -* Write only a byte. ([`d346107`](https://github.com/python-zeroconf/python-zeroconf/commit/d34610768812906ff07974c1314f6073b431d96e)) - -* Although beacons _should_ fit within single packets, maybe we should allow for the possibility that they won't? (Or, does this even make sense with sendto()?) ([`ac91642`](https://github.com/python-zeroconf/python-zeroconf/commit/ac91642b0ea90a3c84b605e19d562b897e2cd1fd)) - -* Update the version to indicate a fork. ([`a81f3ab`](https://github.com/python-zeroconf/python-zeroconf/commit/a81f3ababc585acca4bacc51a832703286ec5cfb)) - -* HHHHHH -> 6H ([`9a94953`](https://github.com/python-zeroconf/python-zeroconf/commit/9a949532484a55e52f1d2f14eb27277a5133ce29)) - -* In Zeroconf, use the same method of determining the default IP as elsewhere, instead of the unreliable gethostbyname(gethostname()) method (but fall back to that). ([`f6d4731`](https://github.com/python-zeroconf/python-zeroconf/commit/f6d47316a47d9d04539f1a4215dd7eec06c33d4c)) - -* More again. ([`2420505`](https://github.com/python-zeroconf/python-zeroconf/commit/24205054309e110238fc5a986cdc27b17c44abef)) - -* More. ([`b8baed3`](https://github.com/python-zeroconf/python-zeroconf/commit/b8baed3a2876c126cac65a7d95bb88661b31483c)) - -* Minor style things for Zeroconf (use True/False instead of 1/0, etc.). ([`173350e`](https://github.com/python-zeroconf/python-zeroconf/commit/173350e415e66c9629d553f820677453bdbe5724)) - -* Clearer. ([`3e718b5`](https://github.com/python-zeroconf/python-zeroconf/commit/3e718b55becd883324bf40eda700431b302a0da8)) - -* 80-column fixes for Zeroconf. ([`e5d930b`](https://github.com/python-zeroconf/python-zeroconf/commit/e5d930bb681f5544827fc0c9f37daa778dec5930)) - -* Minor simplification of the pack/unpack routines in Zeroconf. ([`e814dd1`](https://github.com/python-zeroconf/python-zeroconf/commit/e814dd1e6848d8c7ec03660d347ea4a34390c37d)) - -* Skip unknown resource records in Zeroconf -- https://bugs.launchpad.net/pyzeroconf/+bug/498411 ([`488de88`](https://github.com/python-zeroconf/python-zeroconf/commit/488de8826ddd58646358900d057a4a1632492948)) - -* Some people are reporting bogus data coming back from Zeroconf scans, causing exceptions. ([`fe77e37`](https://github.com/python-zeroconf/python-zeroconf/commit/fe77e371cc68ea211508908e6180867c420ca042)) - -* Don't need the string module here. ([`f76529c`](https://github.com/python-zeroconf/python-zeroconf/commit/f76529c685868dcdb62b6477f15ecb1122310cc5)) - -* Suppress EBADF errors in Zeroconf.py. ([`4c8aac9`](https://github.com/python-zeroconf/python-zeroconf/commit/4c8aac95613df62d001bd7192ec75247a2bb9b9d)) - -* This doesn't seem to be necessary, and it's generating a lot of exceptions... ([`f80df7b`](https://github.com/python-zeroconf/python-zeroconf/commit/f80df7b0f8b9124970e109c51f7a49b7bd75906c)) - -* Untab Zeroconf. ([`892a4f0`](https://github.com/python-zeroconf/python-zeroconf/commit/892a4f095c23379a6cf5a0ef31521f9f90cb5276)) - -* has_key() is deprecated. ([`f998e39`](https://github.com/python-zeroconf/python-zeroconf/commit/f998e39cbb8d2c5556c10203957ff6a9ab2f546d)) - -* The initial version I committed to HME for Python back in 2008. This is -a step back in some respects (re-inserting tabs that will be undone a -couple patches hence), so that I can apply the patches going forward. ([`d952a9c`](https://github.com/python-zeroconf/python-zeroconf/commit/d952a9c117ae539cf4778d76618fe813b10a9a34)) - -* Remove the executable bit. ([`f0d095d`](https://github.com/python-zeroconf/python-zeroconf/commit/f0d095d0f1c2767be6da47f885f5ed019e9fa363)) - -* Removed pyc file ([`38d0a18`](https://github.com/python-zeroconf/python-zeroconf/commit/38d0a184c13772dae3c14d3c46a30c68497c54db)) - -* First commit ([`c3a39f8`](https://github.com/python-zeroconf/python-zeroconf/commit/c3a39f874a5c10e91ee2315271f13ae74ee381fd)) diff --git a/pyproject.toml b/pyproject.toml index 9ea7f9cf..931cfe05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.140.1" +version = "0.141.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 22434e47..b3361a19 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -83,7 +83,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.140.1" +__version__ = "0.141.0" __license__ = "LGPL" From 74f971252060eb8a621d82015c230b2a50adc801 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 27 Jan 2025 07:11:45 -1000 Subject: [PATCH 327/434] chore(pre-commit.ci): pre-commit autoupdate (#1497) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 87c38083..246493ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.1.0 + rev: v4.1.1 hooks: - id: commitizen stages: [commit-msg] @@ -39,13 +39,13 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.2 + rev: v0.9.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - id: ruff-format - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.0 hooks: - id: codespell - repo: https://github.com/PyCQA/flake8 From 56634466a73445e9a2e3c09163049c87732cf930 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 27 Jan 2025 18:14:09 +0100 Subject: [PATCH 328/434] chore: add badge for rtd build status (#1495) --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index 297d8080..c27833f8 100644 --- a/README.rst +++ b/README.rst @@ -14,6 +14,10 @@ python-zeroconf :target: https://codspeed.io/python-zeroconf/python-zeroconf :alt: Codspeed.io status for python-zeroconf +.. image:: https://readthedocs.org/projects/python-zeroconf/badge/?version=latest + :target: https://python-zeroconf.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + `Documentation `_. This is fork of pyzeroconf, Multicast DNS Service Discovery for Python, From 7eb6141a428510029b3a7ed0e1a4e4ba2c22ca7b Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 27 Jan 2025 18:14:35 +0100 Subject: [PATCH 329/434] chore(docs): add readthedocs config (#1496) --- .readthedocs.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..675f11ec --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,16 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details +version: 2 + +build: + os: ubuntu-24.04 + tools: + python: "3.12" + jobs: + post_install: + # https://docs.readthedocs.com/platform/stable/build-customization.html#install-dependencies-with-poetry + - pip install poetry + - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --with docs + +sphinx: + configuration: docs/conf.py From d8e7057f73de267a9263e8a15249c65ff07afb5d Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 27 Jan 2025 18:15:13 +0100 Subject: [PATCH 330/434] chore(docs): refactor docs config and dependencies (#1493) --- docs/Makefile | 183 +---------- docs/_ext/zeroconfautodocfix.py | 19 ++ docs/conf.py | 267 +++------------- poetry.lock | 531 +++++++++++++++++++++++++++++++- pyproject.toml | 4 + 5 files changed, 607 insertions(+), 397 deletions(-) create mode 100644 docs/_ext/zeroconfautodocfix.py diff --git a/docs/Makefile b/docs/Makefile index a8d581c2..d4bb2cbb 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,177 +1,20 @@ -# Makefile for Sphinx documentation +# Minimal makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . BUILDDIR = _build -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - +# Put it first so that "make" without argument is like "make help". help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/zeroconf.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/zeroconf.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/zeroconf" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/zeroconf" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." +.PHONY: help Makefile -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_ext/zeroconfautodocfix.py b/docs/_ext/zeroconfautodocfix.py new file mode 100644 index 00000000..8163a9c6 --- /dev/null +++ b/docs/_ext/zeroconfautodocfix.py @@ -0,0 +1,19 @@ +""" +Must be included after 'sphinx.ext.autodoc'. Fixes unwanted 'alias of' behavior. +""" + +# pylint: disable=import-error +from sphinx.application import Sphinx + + +def skip_member(app, what, name, obj, skip: bool, options) -> bool: # type: ignore[no-untyped-def] + return ( + skip + or getattr(obj, "__doc__", None) is None + or getattr(obj, "__private__", False) is True + or getattr(getattr(obj, "__func__", None), "__private__", False) is True + ) + + +def setup(app: Sphinx) -> None: + app.connect("autodoc-skip-member", skip_member) diff --git a/docs/conf.py b/docs/conf.py index 647742e6..c3bce671 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,247 +1,66 @@ +# Configuration file for the Sphinx documentation builder. # -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -from typing import Any - -import zeroconf - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx"] +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +import sys +from collections.abc import Sequence +from pathlib import Path -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix of source filenames. -source_suffix = ".rst" +# If your extensions are in another directory, add it here. If the directory +# is relative to the documentation root, use Path.absolute to make it absolute. +sys.path.append(str(Path(__file__).parent / "_ext")) +sys.path.insert(0, str(Path(__file__).parent.parent)) -# The encoding of source files. -# source_encoding = 'utf-8-sig' +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information -# The master toctree document. -master_doc = "index" - -# General information about the project. project = "python-zeroconf" -copyright = "python-zeroconf authors" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = zeroconf.__version__ -# The full version, including alpha/beta/rc tags. -release = version - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] +project_copyright = "python-zeroconf authors" +author = "python-zeroconf authors" -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None +try: + import zeroconf -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True + # The short X.Y version. + version = zeroconf.__version__ + # The full version, including alpha/beta/rc tags. + release = version +except ImportError: + version = "" + release = "" -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False +extensions = [ + "sphinx.ext.autodoc", + "zeroconfautodocfix", # Must be after "sphinx.ext.autodoc" + "sphinx.ext.intersphinx", +] -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "default" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". +html_theme = "sphinx_rtd_theme" html_static_path = ["_static"] -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - # Custom sidebar templates, maps document names to template names. -html_sidebars = { +html_sidebars: dict[str, Sequence[str]] = { "index": ("sidebar.html", "sourcelink.html", "searchbox.html"), "**": ("localtoc.html", "relations.html", "sourcelink.html", "searchbox.html"), } -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False +# -- Options for HTML help output -------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-help-output -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. htmlhelp_basename = "zeroconfdoc" +# -- Options for intersphinx extension --------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html#configuration -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements: dict[str, Any] = {} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -# latex_documents = [] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -# man_pages = [] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -# texinfo_documents = [] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"http://docs.python.org/": None} - - -def setup(app): # type: ignore[no-untyped-def] - app.connect("autodoc-skip-member", skip_member) - - -def skip_member(app, what, name, obj, skip, options): # type: ignore[no-untyped-def] - return ( - skip - or getattr(obj, "__doc__", None) is None - or getattr(obj, "__private__", False) is True - or getattr(getattr(obj, "__func__", None), "__private__", False) is True - ) +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), +} diff --git a/poetry.lock b/poetry.lock index bf39f792..14c79f61 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,16 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] [[package]] name = "async-timeout" @@ -6,17 +18,47 @@ version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] +[[package]] +name = "babel" +version = "2.16.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["docs"] +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + [[package]] name = "cffi" version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -90,12 +132,116 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["docs"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev", "docs"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -107,6 +253,7 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -184,6 +331,7 @@ version = "3.0.11" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["dev"] files = [ {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, @@ -253,12 +401,26 @@ files = [ {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, ] +[[package]] +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -267,23 +429,53 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["docs"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "ifaddr" version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] +markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -307,17 +499,37 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["docs"] +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -336,12 +548,84 @@ profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -353,6 +637,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -364,6 +649,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -379,6 +665,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -390,6 +677,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -404,6 +692,7 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -426,6 +715,7 @@ version = "0.25.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, @@ -444,6 +734,7 @@ version = "3.1.2" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, @@ -476,6 +767,7 @@ version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -494,6 +786,7 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -502,12 +795,35 @@ files = [ [package.dependencies] pytest = ">=7.0.0" +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "rich" version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -527,6 +843,7 @@ version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, @@ -541,12 +858,197 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +groups = ["docs"] +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +description = "Read the Docs theme for Sphinx" +optional = false +python-versions = ">=3.8" +groups = ["docs"] +files = [ + {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, + {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, +] + +[package.dependencies] +docutils = ">0.18,<0.22" +sphinx = ">=6,<9" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "transifex-client", "twine", "wheel"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +groups = ["docs"] +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +groups = ["docs"] +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -581,6 +1083,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {dev = "python_full_version <= \"3.11.0a6\"", docs = "python_version < \"3.11\""} [[package]] name = "typing-extensions" @@ -588,17 +1091,39 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["docs"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] +markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -613,6 +1138,6 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" -content-hash = "748c1d5a24ec0b6c1561daace768193ce87acc53d4cabf06c82551a45c079c94" +content-hash = "eb91a0dd1c260f37d2579b4793f537f8017f9e1801e2a372849439f5c9132245" diff --git a/pyproject.toml b/pyproject.toml index 931cfe05..8b6895b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,6 +82,10 @@ setuptools = ">=65.6.3,<76.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" +[tool.poetry.group.docs.dependencies] +sphinx = "^7.4.7 || ^8.1.3" +sphinx-rtd-theme = "^3.0.2" + [tool.ruff] target-version = "py38" line-length = 110 From 69f7c13e67e39365b1756393a6a51af0e8a4f9f5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 27 Jan 2025 07:19:33 -1000 Subject: [PATCH 331/434] chore: disable cython when building docs (#1498) --- .readthedocs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 675f11ec..aee2616a 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -10,7 +10,7 @@ build: post_install: # https://docs.readthedocs.com/platform/stable/build-customization.html#install-dependencies-with-poetry - pip install poetry - - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --with docs + - SKIP_CYTHON=1 VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --with docs sphinx: configuration: docs/conf.py From ae3c3523e5f2896989d0b932d53ef1e24ef4aee8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 30 Jan 2025 11:30:08 -0600 Subject: [PATCH 332/434] feat: add simple address resolvers and examples (#1499) --- examples/resolve_address.py | 38 +++++++++++++++++ src/zeroconf/__init__.py | 3 ++ src/zeroconf/_services/info.pxd | 13 ++++++ src/zeroconf/_services/info.py | 76 +++++++++++++++++++++++++++------ tests/services/test_info.py | 74 ++++++++++++++++++++++++++++++++ 5 files changed, 192 insertions(+), 12 deletions(-) create mode 100755 examples/resolve_address.py diff --git a/examples/resolve_address.py b/examples/resolve_address.py new file mode 100755 index 00000000..eeecfda0 --- /dev/null +++ b/examples/resolve_address.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +"""Example of resolving a name to an IP address.""" + +import asyncio +import logging +import sys + +from zeroconf import AddressResolver, IPVersion +from zeroconf.asyncio import AsyncZeroconf + + +async def resolve_name(name: str) -> None: + aiozc = AsyncZeroconf() + await aiozc.zeroconf.async_wait_for_start() + resolver = AddressResolver(name) + if await resolver.async_request(aiozc.zeroconf, 3000): + print(f"{name} IP addresses:", resolver.ip_addresses_by_version(IPVersion.All)) + else: + print(f"Name {name} not resolved") + await aiozc.async_close() + + +if __name__ == "__main__": + logging.basicConfig(level=logging.DEBUG) + argv = sys.argv.copy() + if "--debug" in argv: + logging.getLogger("zeroconf").setLevel(logging.DEBUG) + argv.remove("--debug") + + if len(argv) < 2 or not argv[1]: + raise ValueError("Usage: resolve_address.py [--debug] ") + + name = argv[1] + if not name.endswith("."): + name += "." + + asyncio.run(resolve_name(name)) diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b3361a19..d3e74dfe 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -58,6 +58,9 @@ from ._services.browser import ServiceBrowser from ._services.info import ( # noqa # import needed for backwards compat ServiceInfo, + AddressResolver, + AddressResolverIPv4, + AddressResolverIPv6, instance_name_from_service_info, ) from ._services.registry import ( # noqa # import needed for backwards compat diff --git a/src/zeroconf/_services/info.pxd b/src/zeroconf/_services/info.pxd index 53abe62a..3f65bc0a 100644 --- a/src/zeroconf/_services/info.pxd +++ b/src/zeroconf/_services/info.pxd @@ -22,6 +22,9 @@ from .._utils.ipaddress cimport ( ) from .._utils.time cimport current_time_millis +cdef cython.set _TYPE_AAAA_RECORDS +cdef cython.set _TYPE_A_RECORDS +cdef cython.set _TYPE_A_AAAA_RECORDS cdef object _resolve_all_futures_to_none @@ -75,6 +78,7 @@ cdef class ServiceInfo(RecordUpdateListener): cdef public DNSText _dns_text_cache cdef public cython.list _dns_address_cache cdef public cython.set _get_address_and_nsec_records_cache + cdef public cython.set _query_record_types @cython.locals(record_update=RecordUpdate, update=bint, cache=DNSCache) cpdef void async_update_records(self, object zc, double now, cython.list records) @@ -155,3 +159,12 @@ cdef class ServiceInfo(RecordUpdateListener): cdef double _get_initial_delay(self) cdef double _get_random_delay(self) + +cdef class AddressResolver(ServiceInfo): + pass + +cdef class AddressResolverIPv6(ServiceInfo): + pass + +cdef class AddressResolverIPv4(ServiceInfo): + pass diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index fd51eee1..a6e815b5 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -88,6 +88,10 @@ # the A/AAAA/SRV records for a host. _AVOID_SYNC_DELAY_RANDOM_INTERVAL = (20, 120) +_TYPE_AAAA_RECORDS = {_TYPE_AAAA} +_TYPE_A_RECORDS = {_TYPE_A} +_TYPE_A_AAAA_RECORDS = {_TYPE_A, _TYPE_AAAA} + bytes_ = bytes float_ = float int_ = int @@ -146,6 +150,7 @@ class ServiceInfo(RecordUpdateListener): "_name", "_new_records_futures", "_properties", + "_query_record_types", "host_ttl", "interface_index", "key", @@ -210,6 +215,7 @@ def __init__( self._dns_service_cache: Optional[DNSService] = None self._dns_text_cache: Optional[DNSText] = None self._get_address_and_nsec_records_cache: Optional[Set[DNSRecord]] = None + self._query_record_types = {_TYPE_SRV, _TYPE_TXT, _TYPE_A, _TYPE_AAAA} @property def name(self) -> str: @@ -917,18 +923,22 @@ def _generate_request_query( cache = zc.cache history = zc.question_history qu_question = question_type is QU_QUESTION - self._add_question_with_known_answers( - out, qu_question, history, cache, now, name, _TYPE_SRV, _CLASS_IN, True - ) - self._add_question_with_known_answers( - out, qu_question, history, cache, now, name, _TYPE_TXT, _CLASS_IN, True - ) - self._add_question_with_known_answers( - out, qu_question, history, cache, now, server, _TYPE_A, _CLASS_IN, False - ) - self._add_question_with_known_answers( - out, qu_question, history, cache, now, server, _TYPE_AAAA, _CLASS_IN, False - ) + if _TYPE_SRV in self._query_record_types: + self._add_question_with_known_answers( + out, qu_question, history, cache, now, name, _TYPE_SRV, _CLASS_IN, True + ) + if _TYPE_TXT in self._query_record_types: + self._add_question_with_known_answers( + out, qu_question, history, cache, now, name, _TYPE_TXT, _CLASS_IN, True + ) + if _TYPE_A in self._query_record_types: + self._add_question_with_known_answers( + out, qu_question, history, cache, now, server, _TYPE_A, _CLASS_IN, False + ) + if _TYPE_AAAA in self._query_record_types: + self._add_question_with_known_answers( + out, qu_question, history, cache, now, server, _TYPE_AAAA, _CLASS_IN, False + ) return out def __repr__(self) -> str: @@ -954,3 +964,45 @@ def __repr__(self) -> str: class AsyncServiceInfo(ServiceInfo): """An async version of ServiceInfo.""" + + +class AddressResolver(ServiceInfo): + """Resolve a host name to an IP address.""" + + def __init__(self, server: str) -> None: + """Initialize the AddressResolver.""" + super().__init__(server, server, server=server) + self._query_record_types = _TYPE_A_AAAA_RECORDS + + @property + def _is_complete(self) -> bool: + """The ServiceInfo has all expected properties.""" + return bool(self._ipv4_addresses) or bool(self._ipv6_addresses) + + +class AddressResolverIPv6(ServiceInfo): + """Resolve a host name to an IPv6 address.""" + + def __init__(self, server: str) -> None: + """Initialize the AddressResolver.""" + super().__init__(server, server, server=server) + self._query_record_types = _TYPE_AAAA_RECORDS + + @property + def _is_complete(self) -> bool: + """The ServiceInfo has all expected properties.""" + return bool(self._ipv6_addresses) + + +class AddressResolverIPv4(ServiceInfo): + """Resolve a host name to an IPv4 address.""" + + def __init__(self, server: str) -> None: + """Initialize the AddressResolver.""" + super().__init__(server, server, server=server) + self._query_record_types = _TYPE_A_RECORDS + + @property + def _is_complete(self) -> bool: + """The ServiceInfo has all expected properties.""" + return bool(self._ipv4_addresses) diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 1f8924a3..3d4c5302 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -1797,3 +1797,77 @@ async def test_service_info_nsec_records(): assert nsec_record.type == const._TYPE_NSEC assert nsec_record.ttl == 50 assert nsec_record.rdtypes == [const._TYPE_A, const._TYPE_AAAA] + + +@pytest.mark.asyncio +async def test_address_resolver(): + """Test that the address resolver works.""" + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) + await aiozc.zeroconf.async_wait_for_start() + resolver = r.AddressResolver("address_resolver_test.local.") + resolve_task = asyncio.create_task(resolver.async_request(aiozc.zeroconf, 3000)) + outgoing = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + outgoing.add_answer_at_time( + r.DNSAddress( + "address_resolver_test.local.", + const._TYPE_A, + const._CLASS_IN, + 10000, + b"\x7f\x00\x00\x01", + ), + 0, + ) + + aiozc.zeroconf.async_send(outgoing) + assert await resolve_task + assert resolver.addresses == [b"\x7f\x00\x00\x01"] + + +@pytest.mark.asyncio +async def test_address_resolver_ipv4(): + """Test that the IPv4 address resolver works.""" + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) + await aiozc.zeroconf.async_wait_for_start() + resolver = r.AddressResolverIPv4("address_resolver_test_ipv4.local.") + resolve_task = asyncio.create_task(resolver.async_request(aiozc.zeroconf, 3000)) + outgoing = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + outgoing.add_answer_at_time( + r.DNSAddress( + "address_resolver_test_ipv4.local.", + const._TYPE_A, + const._CLASS_IN, + 10000, + b"\x7f\x00\x00\x01", + ), + 0, + ) + + aiozc.zeroconf.async_send(outgoing) + assert await resolve_task + assert resolver.addresses == [b"\x7f\x00\x00\x01"] + + +@pytest.mark.asyncio +@unittest.skipIf(not has_working_ipv6(), "Requires IPv6") +@unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") +async def test_address_resolver_ipv6(): + """Test that the IPv6 address resolver works.""" + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) + await aiozc.zeroconf.async_wait_for_start() + resolver = r.AddressResolverIPv6("address_resolver_test_ipv6.local.") + resolve_task = asyncio.create_task(resolver.async_request(aiozc.zeroconf, 3000)) + outgoing = r.DNSOutgoing(const._FLAGS_QR_RESPONSE) + outgoing.add_answer_at_time( + r.DNSAddress( + "address_resolver_test_ipv6.local.", + const._TYPE_AAAA, + const._CLASS_IN, + 10000, + socket.inet_pton(socket.AF_INET6, "fe80::52e:c2f2:bc5f:e9c6"), + ), + 0, + ) + + aiozc.zeroconf.async_send(outgoing) + assert await resolve_task + assert resolver.ip_addresses_by_version(IPVersion.All) == [ip_address("fe80::52e:c2f2:bc5f:e9c6")] From 6f5cfb643dd42997c31ad54426548f6be52bf82b Mon Sep 17 00:00:00 2001 From: semantic-release Date: Thu, 30 Jan 2025 17:39:27 +0000 Subject: [PATCH 333/434] 0.142.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e5874ebb..174b0d7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.142.0 (2025-01-30) + +### Features + +- Add simple address resolvers and examples + ([#1499](https://github.com/python-zeroconf/python-zeroconf/pull/1499), + [`ae3c352`](https://github.com/python-zeroconf/python-zeroconf/commit/ae3c3523e5f2896989d0b932d53ef1e24ef4aee8)) + + ## v0.141.0 (2025-01-22) ### Features diff --git a/pyproject.toml b/pyproject.toml index 8b6895b8..f5084253 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.141.0" +version = "0.142.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d3e74dfe..1a41ddd3 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -86,7 +86,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.141.0" +__version__ = "0.142.0" __license__ = "LGPL" From 9d383f597c89df4c70ee59d9fd481b174aaeff90 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 31 Jan 2025 13:32:06 -0600 Subject: [PATCH 334/434] chore: add tests for circular imports (#1501) --- tests/test_circular_imports.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 tests/test_circular_imports.py diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py new file mode 100644 index 00000000..8bd443a4 --- /dev/null +++ b/tests/test_circular_imports.py @@ -0,0 +1,30 @@ +"""Test to check for circular imports.""" + +import asyncio +import sys + +import pytest + + +@pytest.mark.asyncio +@pytest.mark.timeout(30) # cloud can take > 9s +@pytest.mark.parametrize( + "module", + [ + "zeroconf", + "zeroconf.asyncio", + "zeroconf._protocol.incoming", + "zeroconf._protocol.outgoing", + "zeroconf.const", + "zeroconf._logger", + "zeroconf._transport", + "zeroconf._record_update", + "zeroconf._services.browser", + "zeroconf._services.info", + ], +) +async def test_circular_imports(module: str) -> None: + """Check that components can be imported without circular imports.""" + process = await asyncio.create_subprocess_exec(sys.executable, "-c", f"import {module}") + await process.communicate() + assert process.returncode == 0 From 64138a393f5395cd34bcfde05088773cdaa86662 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 31 Jan 2025 13:50:48 -0600 Subject: [PATCH 335/434] chore: update to modern typing (#1502) --- examples/browser.py | 2 + examples/registration.py | 2 + examples/resolve_address.py | 2 + examples/resolver.py | 2 + examples/self_test.py | 1 + src/zeroconf/__init__.py | 4 +- src/zeroconf/_cache.py | 38 ++--- src/zeroconf/_core.py | 66 ++++----- src/zeroconf/_dns.py | 70 +++++----- src/zeroconf/_engine.py | 26 ++-- src/zeroconf/_exceptions.py | 2 + src/zeroconf/_handlers/__init__.py | 2 + src/zeroconf/_handlers/answers.py | 8 +- .../_handlers/multicast_outgoing_queue.py | 4 +- src/zeroconf/_handlers/query_handler.py | 60 ++++---- src/zeroconf/_handlers/record_manager.pxd | 2 +- src/zeroconf/_handlers/record_manager.py | 26 ++-- src/zeroconf/_history.py | 10 +- src/zeroconf/_logger.py | 6 +- src/zeroconf/_protocol/__init__.py | 2 + src/zeroconf/_protocol/incoming.py | 32 +++-- src/zeroconf/_protocol/outgoing.py | 26 ++-- src/zeroconf/_record_update.py | 8 +- src/zeroconf/_services/__init__.py | 20 +-- src/zeroconf/_services/browser.py | 104 +++++++------- src/zeroconf/_services/info.py | 130 +++++++++--------- src/zeroconf/_services/registry.py | 24 ++-- src/zeroconf/_services/types.py | 13 +- src/zeroconf/_transport.py | 5 +- src/zeroconf/_updates.py | 8 +- src/zeroconf/_utils/__init__.py | 2 + src/zeroconf/_utils/asyncio.py | 14 +- src/zeroconf/_utils/ipaddress.py | 14 +- src/zeroconf/_utils/name.py | 5 +- src/zeroconf/_utils/net.py | 34 ++--- src/zeroconf/_utils/time.py | 2 + src/zeroconf/asyncio.py | 56 ++++---- src/zeroconf/const.py | 2 + tests/benchmarks/__init__.py | 1 + tests/benchmarks/helpers.py | 2 + tests/benchmarks/test_cache.py | 2 + tests/benchmarks/test_incoming.py | 2 + tests/benchmarks/test_outgoing.py | 2 + tests/benchmarks/test_send.py | 2 + tests/benchmarks/test_txt_properties.py | 2 + tests/conftest.py | 2 + tests/services/__init__.py | 2 + tests/services/test_browser.py | 4 +- tests/services/test_registry.py | 2 + tests/services/test_types.py | 2 + tests/test_asyncio.py | 2 + tests/test_cache.py | 2 + tests/test_circular_imports.py | 2 + tests/test_dns.py | 2 + tests/test_engine.py | 2 + tests/test_exceptions.py | 2 + tests/test_handlers.py | 10 +- tests/test_history.py | 2 + tests/test_init.py | 2 + tests/test_logger.py | 2 + tests/test_protocol.py | 2 + tests/test_services.py | 2 + tests/test_updates.py | 4 +- tests/utils/__init__.py | 2 + tests/utils/test_ipaddress.py | 2 + tests/utils/test_name.py | 2 + tests/utils/test_net.py | 2 + 67 files changed, 510 insertions(+), 393 deletions(-) diff --git a/examples/browser.py b/examples/browser.py index 107be452..92adc949 100755 --- a/examples/browser.py +++ b/examples/browser.py @@ -5,6 +5,8 @@ The default is HTTP and HAP; use --find to search for all available services in the network """ +from __future__ import annotations + import argparse import logging from time import sleep diff --git a/examples/registration.py b/examples/registration.py index 1c42d890..1ba19b16 100755 --- a/examples/registration.py +++ b/examples/registration.py @@ -2,6 +2,8 @@ """Example of announcing a service (in this case, a fake HTTP server)""" +from __future__ import annotations + import argparse import logging import socket diff --git a/examples/resolve_address.py b/examples/resolve_address.py index eeecfda0..88ce825b 100755 --- a/examples/resolve_address.py +++ b/examples/resolve_address.py @@ -2,6 +2,8 @@ """Example of resolving a name to an IP address.""" +from __future__ import annotations + import asyncio import logging import sys diff --git a/examples/resolver.py b/examples/resolver.py index 1b74f97e..a52050f4 100755 --- a/examples/resolver.py +++ b/examples/resolver.py @@ -2,6 +2,8 @@ """Example of resolving a service with a known name""" +from __future__ import annotations + import logging import sys diff --git a/examples/self_test.py b/examples/self_test.py index b12a8518..3d1fa050 100755 --- a/examples/self_test.py +++ b/examples/self_test.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +from __future__ import annotations import logging import socket diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 1a41ddd3..26f60cde 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + from ._cache import DNSCache # noqa # import needed for backwards compat from ._core import Zeroconf from ._dns import ( # noqa # import needed for backwards compat @@ -57,10 +59,10 @@ ) from ._services.browser import ServiceBrowser from ._services.info import ( # noqa # import needed for backwards compat - ServiceInfo, AddressResolver, AddressResolverIPv4, AddressResolverIPv6, + ServiceInfo, instance_name_from_service_info, ) from ._services.registry import ( # noqa # import needed for backwards compat diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 1b7aae38..5ac43f30 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -20,8 +20,10 @@ USA """ +from __future__ import annotations + from heapq import heapify, heappop, heappush -from typing import Dict, Iterable, List, Optional, Set, Tuple, Union, cast +from typing import Dict, Iterable, Union, cast from ._dns import ( DNSAddress, @@ -66,8 +68,8 @@ class DNSCache: def __init__(self) -> None: self.cache: _DNSRecordCacheType = {} - self._expire_heap: List[Tuple[float, DNSRecord]] = [] - self._expirations: Dict[DNSRecord, float] = {} + self._expire_heap: list[tuple[float, DNSRecord]] = [] + self._expirations: dict[DNSRecord, float] = {} self.service_cache: _DNSRecordCacheType = {} # Functions prefixed with async_ are NOT threadsafe and must @@ -135,7 +137,7 @@ def async_remove_records(self, entries: Iterable[DNSRecord]) -> None: for entry in entries: self._async_remove(entry) - def async_expire(self, now: _float) -> List[DNSRecord]: + def async_expire(self, now: _float) -> list[DNSRecord]: """Purge expired entries from the cache. This function must be run in from event loop. @@ -145,7 +147,7 @@ def async_expire(self, now: _float) -> List[DNSRecord]: if not (expire_heap_len := len(self._expire_heap)): return [] - expired: List[DNSRecord] = [] + expired: list[DNSRecord] = [] # Find any expired records and add them to the to-delete list while self._expire_heap: when_record = self._expire_heap[0] @@ -182,7 +184,7 @@ def async_expire(self, now: _float) -> List[DNSRecord]: self.async_remove_records(expired) return expired - def async_get_unique(self, entry: _UniqueRecordsType) -> Optional[DNSRecord]: + def async_get_unique(self, entry: _UniqueRecordsType) -> DNSRecord | None: """Gets a unique entry by key. Will return None if there is no matching entry. @@ -194,7 +196,7 @@ def async_get_unique(self, entry: _UniqueRecordsType) -> Optional[DNSRecord]: return None return store.get(entry) - def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DNSRecord]: + def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> list[DNSRecord]: """Gets all matching entries by details. This function is not thread-safe and must be called from @@ -202,7 +204,7 @@ def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DN """ key = name.lower() records = self.cache.get(key) - matches: List[DNSRecord] = [] + matches: list[DNSRecord] = [] if records is None: return matches for record in records.values(): @@ -210,7 +212,7 @@ def async_all_by_details(self, name: _str, type_: _int, class_: _int) -> List[DN matches.append(record) return matches - def async_entries_with_name(self, name: str) -> List[DNSRecord]: + def async_entries_with_name(self, name: str) -> list[DNSRecord]: """Returns a dict of entries whose key matches the name. This function is not threadsafe and must be called from @@ -218,7 +220,7 @@ def async_entries_with_name(self, name: str) -> List[DNSRecord]: """ return self.entries_with_name(name) - def async_entries_with_server(self, name: str) -> List[DNSRecord]: + def async_entries_with_server(self, name: str) -> list[DNSRecord]: """Returns a dict of entries whose key matches the server. This function is not threadsafe and must be called from @@ -230,7 +232,7 @@ def async_entries_with_server(self, name: str) -> List[DNSRecord]: # event loop, however they all make copies so they significantly # inefficient. - def get(self, entry: DNSEntry) -> Optional[DNSRecord]: + def get(self, entry: DNSEntry) -> DNSRecord | None: """Gets an entry by key. Will return None if there is no matching entry.""" if isinstance(entry, _UNIQUE_RECORD_TYPES): @@ -240,7 +242,7 @@ def get(self, entry: DNSEntry) -> Optional[DNSRecord]: return cached_entry return None - def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRecord]: + def get_by_details(self, name: str, type_: _int, class_: _int) -> DNSRecord | None: """Gets the first matching entry by details. Returns None if no entries match. Calling this function is not recommended as it will only @@ -261,7 +263,7 @@ def get_by_details(self, name: str, type_: _int, class_: _int) -> Optional[DNSRe return cached_entry return None - def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRecord]: + def get_all_by_details(self, name: str, type_: _int, class_: _int) -> list[DNSRecord]: """Gets all matching entries by details.""" key = name.lower() records = self.cache.get(key) @@ -269,19 +271,19 @@ def get_all_by_details(self, name: str, type_: _int, class_: _int) -> List[DNSRe return [] return [entry for entry in list(records.values()) if type_ == entry.type and class_ == entry.class_] - def entries_with_server(self, server: str) -> List[DNSRecord]: + def entries_with_server(self, server: str) -> list[DNSRecord]: """Returns a list of entries whose server matches the name.""" if entries := self.service_cache.get(server.lower()): return list(entries.values()) return [] - def entries_with_name(self, name: str) -> List[DNSRecord]: + def entries_with_name(self, name: str) -> list[DNSRecord]: """Returns a list of entries whose key matches the name.""" if entries := self.cache.get(name.lower()): return list(entries.values()) return [] - def current_entry_with_name_and_alias(self, name: str, alias: str) -> Optional[DNSRecord]: + def current_entry_with_name_and_alias(self, name: str, alias: str) -> DNSRecord | None: now = current_time_millis() for record in reversed(self.entries_with_name(name)): if ( @@ -292,13 +294,13 @@ def current_entry_with_name_and_alias(self, name: str, alias: str) -> Optional[D return record return None - def names(self) -> List[str]: + def names(self) -> list[str]: """Return a copy of the list of current cache names.""" return list(self.cache) def async_mark_unique_records_older_than_1s_to_expire( self, - unique_types: Set[Tuple[_str, _int, _int]], + unique_types: set[tuple[_str, _int, _int]], answers: Iterable[DNSRecord], now: _float, ) -> None: diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 68cb8a9a..01e98e8f 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -20,12 +20,14 @@ USA """ +from __future__ import annotations + import asyncio import logging import sys import threading from types import TracebackType -from typing import Awaitable, Dict, List, Optional, Set, Tuple, Type, Union +from typing import Awaitable from ._cache import DNSCache from ._dns import DNSQuestion, DNSQuestionType @@ -108,9 +110,9 @@ def async_send_with_transport( packet: bytes, packet_num: int, out: DNSOutgoing, - addr: Optional[str], + addr: str | None, port: int, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), + v6_flow_scope: tuple[()] | tuple[int, int] = (), ) -> None: ipv6_socket = transport.is_ipv6 if addr is None: @@ -149,7 +151,7 @@ def __init__( self, interfaces: InterfacesType = InterfaceChoice.All, unicast: bool = False, - ip_version: Optional[IPVersion] = None, + ip_version: IPVersion | None = None, apple_p2p: bool = False, ) -> None: """Creates an instance of the Zeroconf class, establishing @@ -181,7 +183,7 @@ def __init__( self.engine = AsyncEngine(self, listen_socket, respond_sockets) - self.browsers: Dict[ServiceListener, ServiceBrowser] = {} + self.browsers: dict[ServiceListener, ServiceBrowser] = {} self.registry = ServiceRegistry() self.cache = DNSCache() self.question_history = QuestionHistory() @@ -192,9 +194,9 @@ def __init__( self.query_handler = QueryHandler(self) self.record_manager = RecordManager(self) - self._notify_futures: Set[asyncio.Future] = set() - self.loop: Optional[asyncio.AbstractEventLoop] = None - self._loop_thread: Optional[threading.Thread] = None + self._notify_futures: set[asyncio.Future] = set() + self.loop: asyncio.AbstractEventLoop | None = None + self._loop_thread: threading.Thread | None = None self.start() @@ -239,7 +241,7 @@ async def async_wait_for_start(self) -> None: raise NotRunningException @property - def listeners(self) -> Set[RecordUpdateListener]: + def listeners(self) -> set[RecordUpdateListener]: return self.record_manager.listeners async def async_wait(self, timeout: float) -> None: @@ -264,8 +266,8 @@ def get_service_info( type_: str, name: str, timeout: int = 3000, - question_type: Optional[DNSQuestionType] = None, - ) -> Optional[ServiceInfo]: + question_type: DNSQuestionType | None = None, + ) -> ServiceInfo | None: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, which defaults to 3 seconds. @@ -301,7 +303,7 @@ def remove_all_service_listeners(self) -> None: def register_service( self, info: ServiceInfo, - ttl: Optional[int] = None, + ttl: int | None = None, allow_name_change: bool = False, cooperating_responders: bool = False, strict: bool = True, @@ -329,7 +331,7 @@ def register_service( async def async_register_service( self, info: ServiceInfo, - ttl: Optional[int] = None, + ttl: int | None = None, allow_name_change: bool = False, cooperating_responders: bool = False, strict: bool = True, @@ -380,8 +382,8 @@ async def async_get_service_info( type_: str, name: str, timeout: int = 3000, - question_type: Optional[DNSQuestionType] = None, - ) -> Optional[AsyncServiceInfo]: + question_type: DNSQuestionType | None = None, + ) -> AsyncServiceInfo | None: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, which defaults to 3 seconds. @@ -400,7 +402,7 @@ async def _async_broadcast_service( self, info: ServiceInfo, interval: int, - ttl: Optional[int], + ttl: int | None, broadcast_addresses: bool = True, ) -> None: """Send a broadcasts to announce a service at intervals.""" @@ -412,7 +414,7 @@ async def _async_broadcast_service( def generate_service_broadcast( self, info: ServiceInfo, - ttl: Optional[int], + ttl: int | None, broadcast_addresses: bool = True, ) -> DNSOutgoing: """Generate a broadcast to announce a service.""" @@ -439,7 +441,7 @@ def _add_broadcast_answer( # pylint: disable=no-self-use self, out: DNSOutgoing, info: ServiceInfo, - override_ttl: Optional[int], + override_ttl: int | None, broadcast_addresses: bool = True, ) -> None: """Add answers to broadcast a service.""" @@ -481,7 +483,7 @@ async def async_unregister_service(self, info: ServiceInfo) -> Awaitable: self._async_broadcast_service(info, _UNREGISTER_TIME, 0, broadcast_addresses) ) - def generate_unregister_all_services(self) -> Optional[DNSOutgoing]: + def generate_unregister_all_services(self) -> DNSOutgoing | None: """Generate a DNSOutgoing goodbye for all services and remove them from the registry.""" service_infos = self.registry.async_get_service_infos() if not service_infos: @@ -562,7 +564,7 @@ async def async_check_service( def add_listener( self, listener: RecordUpdateListener, - question: Optional[Union[DNSQuestion, List[DNSQuestion]]], + question: DNSQuestion | list[DNSQuestion] | None, ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -584,7 +586,7 @@ def remove_listener(self, listener: RecordUpdateListener) -> None: def async_add_listener( self, listener: RecordUpdateListener, - question: Optional[Union[DNSQuestion, List[DNSQuestion]]], + question: DNSQuestion | list[DNSQuestion] | None, ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -604,10 +606,10 @@ def async_remove_listener(self, listener: RecordUpdateListener) -> None: def send( self, out: DNSOutgoing, - addr: Optional[str] = None, + addr: str | None = None, port: int = _MDNS_PORT, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - transport: Optional[_WrappedTransport] = None, + v6_flow_scope: tuple[()] | tuple[int, int] = (), + transport: _WrappedTransport | None = None, ) -> None: """Sends an outgoing packet threadsafe.""" assert self.loop is not None @@ -616,10 +618,10 @@ def send( def async_send( self, out: DNSOutgoing, - addr: Optional[str] = None, + addr: str | None = None, port: int = _MDNS_PORT, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = (), - transport: Optional[_WrappedTransport] = None, + v6_flow_scope: tuple[()] | tuple[int, int] = (), + transport: _WrappedTransport | None = None, ) -> None: """Sends an outgoing packet.""" if self.done: @@ -701,14 +703,14 @@ async def _async_close(self) -> None: await self.engine._async_close() # pylint: disable=protected-access self._shutdown_threads() - def __enter__(self) -> "Zeroconf": + def __enter__(self) -> Zeroconf: return self def __exit__( # pylint: disable=useless-return self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: self.close() return None diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index c22f8b17..bc0a3948 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -20,9 +20,11 @@ USA """ +from __future__ import annotations + import enum import socket -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union, cast +from typing import TYPE_CHECKING, Any, cast from ._exceptions import AbstractMethodException from ._utils.net import _is_v6_address @@ -94,7 +96,7 @@ def get_type(t: int) -> str: """Type accessor""" return _TYPES.get(t, f"?({t})") - def entry_to_string(self, hdr: str, other: Optional[Union[bytes, str]]) -> str: + def entry_to_string(self, hdr: str, other: bytes | str | None) -> str: """String representation with additional information""" return "{}[{},{}{},{}]{}".format( hdr, @@ -119,7 +121,7 @@ def _fast_init(self, name: str, type_: _int, class_: _int) -> None: self._fast_init_entry(name, type_, class_) self._hash = hash((self.key, type_, self.class_)) - def answered_by(self, rec: "DNSRecord") -> bool: + def answered_by(self, rec: DNSRecord) -> bool: """Returns true if the question is answered by the record""" return self.class_ == rec.class_ and self.type in (rec.type, _TYPE_ANY) and self.name == rec.name @@ -170,8 +172,8 @@ def __init__( name: str, type_: int, class_: int, - ttl: Union[float, int], - created: Optional[float] = None, + ttl: float | int, + created: float | None = None, ) -> None: self._fast_init_record(name, type_, class_, ttl, created or current_time_millis()) @@ -185,10 +187,10 @@ def __eq__(self, other: Any) -> bool: # pylint: disable=no-self-use """Abstract method""" raise AbstractMethodException - def __lt__(self, other: "DNSRecord") -> bool: + def __lt__(self, other: DNSRecord) -> bool: return self.ttl < other.ttl - def suppressed_by(self, msg: "DNSIncoming") -> bool: + def suppressed_by(self, msg: DNSIncoming) -> bool: """Returns true if any answer in a message can suffice for the information held in this record.""" answers = msg.answers() @@ -208,7 +210,7 @@ def get_expiration_time(self, percent: _int) -> float: return self.created + (percent * self.ttl * 10) # TODO: Switch to just int here - def get_remaining_ttl(self, now: _float) -> Union[int, float]: + def get_remaining_ttl(self, now: _float) -> int | float: """Returns the remaining TTL in seconds.""" remain = (self.created + (_EXPIRE_FULL_TIME_MS * self.ttl) - now) / 1000.0 return 0 if remain < 0 else remain @@ -225,18 +227,18 @@ def is_recent(self, now: _float) -> bool: """Returns true if the record more than one quarter of its TTL remaining.""" return self.created + (_RECENT_TIME_MS * self.ttl) > now - def _set_created_ttl(self, created: _float, ttl: Union[float, int]) -> None: + def _set_created_ttl(self, created: _float, ttl: float | int) -> None: """Set the created and ttl of a record.""" # It would be better if we made a copy instead of mutating the record # in place, but records currently don't have a copy method. self.created = created self.ttl = ttl - def write(self, out: "DNSOutgoing") -> None: # pylint: disable=no-self-use + def write(self, out: DNSOutgoing) -> None: # pylint: disable=no-self-use """Abstract method""" raise AbstractMethodException - def to_string(self, other: Union[bytes, str]) -> str: + def to_string(self, other: bytes | str) -> str: """String representation with additional information""" arg = f"{self.ttl}/{int(self.get_remaining_ttl(current_time_millis()))},{cast(Any, other)}" return DNSEntry.entry_to_string(self, "record", arg) @@ -254,8 +256,8 @@ def __init__( class_: int, ttl: int, address: bytes, - scope_id: Optional[int] = None, - created: Optional[float] = None, + scope_id: int | None = None, + created: float | None = None, ) -> None: self._fast_init(name, type_, class_, ttl, address, scope_id, created or current_time_millis()) @@ -266,7 +268,7 @@ def _fast_init( class_: _int, ttl: _float, address: bytes, - scope_id: Optional[_int], + scope_id: _int | None, created: _float, ) -> None: """Fast init for reuse.""" @@ -275,7 +277,7 @@ def _fast_init( self.scope_id = scope_id self._hash = hash((self.key, type_, self.class_, address, scope_id)) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet""" out.write_string(self.address) @@ -320,7 +322,7 @@ def __init__( ttl: int, cpu: str, os: str, - created: Optional[float] = None, + created: float | None = None, ) -> None: self._fast_init(name, type_, class_, ttl, cpu, os, created or current_time_millis()) @@ -333,7 +335,7 @@ def _fast_init( self.os = os self._hash = hash((self.key, type_, self.class_, cpu, os)) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet""" out.write_character_string(self.cpu.encode("utf-8")) out.write_character_string(self.os.encode("utf-8")) @@ -367,7 +369,7 @@ def __init__( class_: int, ttl: int, alias: str, - created: Optional[float] = None, + created: float | None = None, ) -> None: self._fast_init(name, type_, class_, ttl, alias, created or current_time_millis()) @@ -389,7 +391,7 @@ def max_size_compressed(self) -> int: + _NAME_COMPRESSION_MIN_SIZE ) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet""" out.write_name(self.alias) @@ -422,7 +424,7 @@ def __init__( class_: int, ttl: int, text: bytes, - created: Optional[float] = None, + created: float | None = None, ) -> None: self._fast_init(name, type_, class_, ttl, text, created or current_time_millis()) @@ -433,7 +435,7 @@ def _fast_init( self.text = text self._hash = hash((self.key, type_, self.class_, text)) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet""" out.write_string(self.text) @@ -466,12 +468,12 @@ def __init__( name: str, type_: int, class_: int, - ttl: Union[float, int], + ttl: float | int, priority: int, weight: int, port: int, server: str, - created: Optional[float] = None, + created: float | None = None, ) -> None: self._fast_init( name, type_, class_, ttl, priority, weight, port, server, created or current_time_millis() @@ -497,7 +499,7 @@ def _fast_init( self.server_key = server.lower() self._hash = hash((self.key, type_, self.class_, priority, weight, port, self.server_key)) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet""" out.write_short(self.priority) out.write_short(self.weight) @@ -537,10 +539,10 @@ def __init__( name: str, type_: int, class_: int, - ttl: Union[int, float], + ttl: int | float, next_name: str, - rdtypes: List[int], - created: Optional[float] = None, + rdtypes: list[int], + created: float | None = None, ) -> None: self._fast_init(name, type_, class_, ttl, next_name, rdtypes, created or current_time_millis()) @@ -551,7 +553,7 @@ def _fast_init( class_: _int, ttl: _float, next_name: str, - rdtypes: List[_int], + rdtypes: list[_int], created: _float, ) -> None: self._fast_init_record(name, type_, class_, ttl, created) @@ -559,7 +561,7 @@ def _fast_init( self.rdtypes = sorted(rdtypes) self._hash = hash((self.key, type_, self.class_, next_name, *self.rdtypes)) - def write(self, out: "DNSOutgoing") -> None: + def write(self, out: DNSOutgoing) -> None: """Used in constructing an outgoing packet.""" bitmap = bytearray(b"\0" * 32) total_octets = 0 @@ -610,21 +612,21 @@ class DNSRRSet: __slots__ = ("_lookup", "_records") - def __init__(self, records: List[DNSRecord]) -> None: + def __init__(self, records: list[DNSRecord]) -> None: """Create an RRset from records sets.""" self._records = records - self._lookup: Optional[Dict[DNSRecord, DNSRecord]] = None + self._lookup: dict[DNSRecord, DNSRecord] | None = None @property - def lookup(self) -> Dict[DNSRecord, DNSRecord]: + def lookup(self) -> dict[DNSRecord, DNSRecord]: """Return the lookup table.""" return self._get_lookup() - def lookup_set(self) -> Set[DNSRecord]: + def lookup_set(self) -> set[DNSRecord]: """Return the lookup table as aset.""" return set(self._get_lookup()) - def _get_lookup(self) -> Dict[DNSRecord, DNSRecord]: + def _get_lookup(self) -> dict[DNSRecord, DNSRecord]: """Return the lookup table, building it if needed.""" if self._lookup is None: # Build the hash table so we can lookup the record ttl diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 05f8c948..7b22f788 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -20,11 +20,13 @@ USA """ +from __future__ import annotations + import asyncio import itertools import socket import threading -from typing import TYPE_CHECKING, List, Optional, cast +from typing import TYPE_CHECKING, cast from ._record_update import RecordUpdate from ._utils.asyncio import get_running_loop, run_coro_with_timeout @@ -58,31 +60,31 @@ class AsyncEngine: def __init__( self, - zeroconf: "Zeroconf", - listen_socket: Optional[socket.socket], - respond_sockets: List[socket.socket], + zeroconf: Zeroconf, + listen_socket: socket.socket | None, + respond_sockets: list[socket.socket], ) -> None: - self.loop: Optional[asyncio.AbstractEventLoop] = None + self.loop: asyncio.AbstractEventLoop | None = None self.zc = zeroconf - self.protocols: List[AsyncListener] = [] - self.readers: List[_WrappedTransport] = [] - self.senders: List[_WrappedTransport] = [] - self.running_event: Optional[asyncio.Event] = None + self.protocols: list[AsyncListener] = [] + self.readers: list[_WrappedTransport] = [] + self.senders: list[_WrappedTransport] = [] + self.running_event: asyncio.Event | None = None self._listen_socket = listen_socket self._respond_sockets = respond_sockets - self._cleanup_timer: Optional[asyncio.TimerHandle] = None + self._cleanup_timer: asyncio.TimerHandle | None = None def setup( self, loop: asyncio.AbstractEventLoop, - loop_thread_ready: Optional[threading.Event], + loop_thread_ready: threading.Event | None, ) -> None: """Set up the instance.""" self.loop = loop self.running_event = asyncio.Event() self.loop.create_task(self._async_setup(loop_thread_ready)) - async def _async_setup(self, loop_thread_ready: Optional[threading.Event]) -> None: + async def _async_setup(self, loop_thread_ready: threading.Event | None) -> None: """Set up the instance.""" self._async_schedule_next_cache_cleanup() await self._async_create_endpoints() diff --git a/src/zeroconf/_exceptions.py b/src/zeroconf/_exceptions.py index 5eb58f79..5fc81259 100644 --- a/src/zeroconf/_exceptions.py +++ b/src/zeroconf/_exceptions.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + class Error(Exception): """Base class for all zeroconf exceptions.""" diff --git a/src/zeroconf/_handlers/__init__.py b/src/zeroconf/_handlers/__init__.py index 30920c6a..584a74ec 100644 --- a/src/zeroconf/_handlers/__init__.py +++ b/src/zeroconf/_handlers/__init__.py @@ -19,3 +19,5 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ + +from __future__ import annotations diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index 7ddde197..ec53eb84 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -20,8 +20,10 @@ USA """ +from __future__ import annotations + from operator import attrgetter -from typing import Dict, List, Set +from typing import Dict, Set from .._dns import DNSQuestion, DNSRecord from .._protocol.outgoing import DNSOutgoing @@ -96,7 +98,7 @@ def construct_outgoing_multicast_answers( def construct_outgoing_unicast_answers( answers: _AnswerWithAdditionalsType, ucast_source: bool, - questions: List[DNSQuestion], + questions: list[DNSQuestion], id_: int_, ) -> DNSOutgoing: """Add answers and additionals to a DNSOutgoing.""" @@ -111,7 +113,7 @@ def construct_outgoing_unicast_answers( def _add_answers_additionals(out: DNSOutgoing, answers: _AnswerWithAdditionalsType) -> None: # Find additionals and suppress any additionals that are already in answers - sending: Set[DNSRecord] = set(answers) + sending: set[DNSRecord] = set(answers) # Answers are sorted to group names together to increase the chance # that similar names will end up in the same packet and can reduce the # overall size of the outgoing response via name compression diff --git a/src/zeroconf/_handlers/multicast_outgoing_queue.py b/src/zeroconf/_handlers/multicast_outgoing_queue.py index caf6470b..73d5ee43 100644 --- a/src/zeroconf/_handlers/multicast_outgoing_queue.py +++ b/src/zeroconf/_handlers/multicast_outgoing_queue.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + import random from collections import deque from typing import TYPE_CHECKING @@ -53,7 +55,7 @@ class MulticastOutgoingQueue: "zc", ) - def __init__(self, zeroconf: "Zeroconf", additional_delay: _int, max_aggregation_delay: _int) -> None: + def __init__(self, zeroconf: Zeroconf, additional_delay: _int, max_aggregation_delay: _int) -> None: self.zc = zeroconf self.queue: deque[AnswerGroup] = deque() # Additional delay is used to implement diff --git a/src/zeroconf/_handlers/query_handler.py b/src/zeroconf/_handlers/query_handler.py index ccfc7a77..60209568 100644 --- a/src/zeroconf/_handlers/query_handler.py +++ b/src/zeroconf/_handlers/query_handler.py @@ -20,7 +20,9 @@ USA """ -from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, cast from .._cache import DNSCache, _UniqueRecordsType from .._dns import DNSAddress, DNSPointer, DNSQuestion, DNSRecord, DNSRRSet @@ -52,8 +54,8 @@ _RESPOND_IMMEDIATE_TYPES = {_TYPE_NSEC, _TYPE_SRV, *_ADDRESS_RECORD_TYPES} -_EMPTY_SERVICES_LIST: List[ServiceInfo] = [] -_EMPTY_TYPES_LIST: List[str] = [] +_EMPTY_SERVICES_LIST: list[ServiceInfo] = [] +_EMPTY_TYPES_LIST: list[str] = [] _IPVersion_ALL = IPVersion.All @@ -77,8 +79,8 @@ def __init__( self, question: DNSQuestion, strategy_type: _int, - types: List[str], - services: List[ServiceInfo], + types: list[str], + services: list[ServiceInfo], ) -> None: """Create an answer strategy.""" self.question = question @@ -102,17 +104,17 @@ class _QueryResponse: "_ucast", ) - def __init__(self, cache: DNSCache, questions: List[DNSQuestion], is_probe: bool, now: float) -> None: + def __init__(self, cache: DNSCache, questions: list[DNSQuestion], is_probe: bool, now: float) -> None: """Build a query response.""" self._is_probe = is_probe self._questions = questions self._now = now self._cache = cache self._additionals: _AnswerWithAdditionalsType = {} - self._ucast: Set[DNSRecord] = set() - self._mcast_now: Set[DNSRecord] = set() - self._mcast_aggregate: Set[DNSRecord] = set() - self._mcast_aggregate_last_second: Set[DNSRecord] = set() + self._ucast: set[DNSRecord] = set() + self._mcast_now: set[DNSRecord] = set() + self._mcast_aggregate: set[DNSRecord] = set() + self._mcast_aggregate_last_second: set[DNSRecord] = set() def add_qu_question_response(self, answers: _AnswerWithAdditionalsType) -> None: """Generate a response to a multicast QU query.""" @@ -199,7 +201,7 @@ class QueryHandler: "zc", ) - def __init__(self, zc: "Zeroconf") -> None: + def __init__(self, zc: Zeroconf) -> None: """Init the query handler.""" self.zc = zc self.registry = zc.registry @@ -210,7 +212,7 @@ def __init__(self, zc: "Zeroconf") -> None: def _add_service_type_enumeration_query_answers( self, - types: List[str], + types: list[str], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, ) -> None: @@ -232,7 +234,7 @@ def _add_service_type_enumeration_query_answers( def _add_pointer_answers( self, - services: List[ServiceInfo], + services: list[ServiceInfo], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, ) -> None: @@ -251,23 +253,23 @@ def _add_pointer_answers( def _add_address_answers( self, - services: List[ServiceInfo], + services: list[ServiceInfo], answer_set: _AnswerWithAdditionalsType, known_answers: DNSRRSet, type_: _int, ) -> None: """Answer A/AAAA/ANY question.""" for service in services: - answers: List[DNSAddress] = [] - additionals: Set[DNSRecord] = set() - seen_types: Set[int] = set() + answers: list[DNSAddress] = [] + additionals: set[DNSRecord] = set() + seen_types: set[int] = set() for dns_address in service._dns_addresses(None, _IPVersion_ALL): seen_types.add(dns_address.type) if dns_address.type != type_: additionals.add(dns_address) elif not known_answers.suppresses(dns_address): answers.append(dns_address) - missing_types: Set[int] = _ADDRESS_RECORD_TYPES - seen_types + missing_types: set[int] = _ADDRESS_RECORD_TYPES - seen_types if answers: if missing_types: assert service.server is not None, "Service server must be set for NSEC record." @@ -282,8 +284,8 @@ def _answer_question( self, question: DNSQuestion, strategy_type: _int, - types: List[str], - services: List[ServiceInfo], + types: list[str], + services: list[ServiceInfo], known_answers: DNSRRSet, ) -> _AnswerWithAdditionalsType: """Answer a question.""" @@ -311,14 +313,14 @@ def _answer_question( return answer_set def async_response( # pylint: disable=unused-argument - self, msgs: List[DNSIncoming], ucast_source: bool - ) -> Optional[QuestionAnswers]: + self, msgs: list[DNSIncoming], ucast_source: bool + ) -> QuestionAnswers | None: """Deal with incoming query packets. Provides a response if possible. This function must be run in the event loop as it is not threadsafe. """ - strategies: List[_AnswerStrategy] = [] + strategies: list[_AnswerStrategy] = [] for msg in msgs: for question in msg._questions: strategies.extend(self._get_answer_strategies(question)) @@ -334,7 +336,7 @@ def async_response( # pylint: disable=unused-argument questions = msg._questions # Only decode known answers if we are not a probe and we have # at least one answer strategy - answers: List[DNSRecord] = [] + answers: list[DNSRecord] = [] for msg in msgs: if msg.is_probe(): is_probe = True @@ -343,7 +345,7 @@ def async_response( # pylint: disable=unused-argument query_res = _QueryResponse(self.cache, questions, is_probe, msg.now) known_answers = DNSRRSet(answers) - known_answers_set: Optional[Set[DNSRecord]] = None + known_answers_set: set[DNSRecord] | None = None now = msg.now for strategy in strategies: question = strategy.question @@ -373,12 +375,12 @@ def async_response( # pylint: disable=unused-argument def _get_answer_strategies( self, question: DNSQuestion, - ) -> List[_AnswerStrategy]: + ) -> list[_AnswerStrategy]: """Collect strategies to answer a question.""" name = question.name question_lower_name = name.lower() type_ = question.type - strategies: List[_AnswerStrategy] = [] + strategies: list[_AnswerStrategy] = [] if type_ == _TYPE_PTR and question_lower_name == _SERVICE_TYPE_ENUMERATION_NAME: types = self.registry.async_get_types() @@ -433,11 +435,11 @@ def _get_answer_strategies( def handle_assembled_query( self, - packets: List[DNSIncoming], + packets: list[DNSIncoming], addr: _str, port: _int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]], + v6_flow_scope: tuple[()] | tuple[int, int], ) -> None: """Respond to a (re)assembled query. diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index d4e068c2..37232b13 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -21,7 +21,7 @@ cdef class RecordManager: cdef public DNSCache cache cdef public cython.set listeners - cpdef void async_updates(self, object now, object records) + cpdef void async_updates(self, object now, list records) cpdef void async_updates_complete(self, bint notify) diff --git a/src/zeroconf/_handlers/record_manager.py b/src/zeroconf/_handlers/record_manager.py index d4e2792c..566f0e8c 100644 --- a/src/zeroconf/_handlers/record_manager.py +++ b/src/zeroconf/_handlers/record_manager.py @@ -20,7 +20,9 @@ USA """ -from typing import TYPE_CHECKING, List, Optional, Set, Tuple, Union, cast +from __future__ import annotations + +from typing import TYPE_CHECKING, cast from .._cache import _UniqueRecordsType from .._dns import DNSQuestion, DNSRecord @@ -42,13 +44,13 @@ class RecordManager: __slots__ = ("cache", "listeners", "zc") - def __init__(self, zeroconf: "Zeroconf") -> None: + def __init__(self, zeroconf: Zeroconf) -> None: """Init the record manager.""" self.zc = zeroconf self.cache = zeroconf.cache - self.listeners: Set[RecordUpdateListener] = set() + self.listeners: set[RecordUpdateListener] = set() - def async_updates(self, now: _float, records: List[RecordUpdate]) -> None: + def async_updates(self, now: _float, records: list[RecordUpdate]) -> None: """Used to notify listeners of new information that has updated a record. @@ -79,12 +81,12 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: This function must be run in the event loop as it is not threadsafe. """ - updates: List[RecordUpdate] = [] - address_adds: List[DNSRecord] = [] - other_adds: List[DNSRecord] = [] - removes: Set[DNSRecord] = set() + updates: list[RecordUpdate] = [] + address_adds: list[DNSRecord] = [] + other_adds: list[DNSRecord] = [] + removes: set[DNSRecord] = set() now = msg.now - unique_types: Set[Tuple[str, int, int]] = set() + unique_types: set[tuple[str, int, int]] = set() cache = self.cache answers = msg.answers() @@ -165,7 +167,7 @@ def async_updates_from_response(self, msg: DNSIncoming) -> None: def async_add_listener( self, listener: RecordUpdateListener, - question: Optional[Union[DNSQuestion, List[DNSQuestion]]], + question: DNSQuestion | list[DNSQuestion] | None, ) -> None: """Adds a listener for a given question. The listener will have its update_record method called when information is available to @@ -188,14 +190,14 @@ def async_add_listener( self._async_update_matching_records(listener, questions) def _async_update_matching_records( - self, listener: RecordUpdateListener, questions: List[DNSQuestion] + self, listener: RecordUpdateListener, questions: list[DNSQuestion] ) -> None: """Calls back any existing entries in the cache that answer the question. This function must be run from the event loop. """ now = current_time_millis() - records: List[RecordUpdate] = [ + records: list[RecordUpdate] = [ RecordUpdate(record, None) for question in questions for record in self.cache.async_entries_with_name(question.name) diff --git a/src/zeroconf/_history.py b/src/zeroconf/_history.py index aa28519c..5bae7be0 100644 --- a/src/zeroconf/_history.py +++ b/src/zeroconf/_history.py @@ -20,7 +20,7 @@ USA """ -from typing import Dict, List, Set, Tuple +from __future__ import annotations from ._dns import DNSQuestion, DNSRecord from .const import _DUPLICATE_QUESTION_INTERVAL @@ -36,13 +36,13 @@ class QuestionHistory: def __init__(self) -> None: """Init a new QuestionHistory.""" - self._history: Dict[DNSQuestion, Tuple[float, Set[DNSRecord]]] = {} + self._history: dict[DNSQuestion, tuple[float, set[DNSRecord]]] = {} - def add_question_at_time(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> None: + def add_question_at_time(self, question: DNSQuestion, now: _float, known_answers: set[DNSRecord]) -> None: """Remember a question with known answers.""" self._history[question] = (now, known_answers) - def suppresses(self, question: DNSQuestion, now: _float, known_answers: Set[DNSRecord]) -> bool: + def suppresses(self, question: DNSQuestion, now: _float, known_answers: set[DNSRecord]) -> bool: """Check to see if a question should be suppressed. https://datatracker.ietf.org/doc/html/rfc6762#section-7.3 @@ -66,7 +66,7 @@ def suppresses(self, question: DNSQuestion, now: _float, known_answers: Set[DNSR def async_expire(self, now: _float) -> None: """Expire the history of old questions.""" - removes: List[DNSQuestion] = [] + removes: list[DNSQuestion] = [] for question, now_known_answers in self._history.items(): than, _ = now_known_answers if now - than > _DUPLICATE_QUESTION_INTERVAL: diff --git a/src/zeroconf/_logger.py b/src/zeroconf/_logger.py index 1556522e..0d734dfd 100644 --- a/src/zeroconf/_logger.py +++ b/src/zeroconf/_logger.py @@ -21,9 +21,11 @@ USA """ +from __future__ import annotations + import logging import sys -from typing import Any, ClassVar, Dict, Union, cast +from typing import Any, ClassVar, cast log = logging.getLogger(__name__.split(".", maxsplit=1)[0]) log.addHandler(logging.NullHandler()) @@ -38,7 +40,7 @@ def set_logger_level_if_unset() -> None: class QuietLogger: - _seen_logs: ClassVar[Dict[str, Union[int, tuple]]] = {} + _seen_logs: ClassVar[dict[str, int | tuple]] = {} @classmethod def log_exception_warning(cls, *logger_data: Any) -> None: diff --git a/src/zeroconf/_protocol/__init__.py b/src/zeroconf/_protocol/__init__.py index 30920c6a..584a74ec 100644 --- a/src/zeroconf/_protocol/__init__.py +++ b/src/zeroconf/_protocol/__init__.py @@ -19,3 +19,5 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ + +from __future__ import annotations diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 6e009b29..7f4a8eec 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -20,9 +20,11 @@ USA """ +from __future__ import annotations + import struct import sys -from typing import Any, Dict, List, Optional, Set, Tuple, Union +from typing import Any from .._dns import ( DNSAddress, @@ -61,7 +63,7 @@ DECODE_EXCEPTIONS = (IndexError, struct.error, IncomingDecodeError) -_seen_logs: Dict[str, Union[int, tuple]] = {} +_seen_logs: dict[str, int | tuple] = {} _str = str _int = int @@ -94,9 +96,9 @@ class DNSIncoming: def __init__( self, data: bytes, - source: Optional[Tuple[str, int]] = None, - scope_id: Optional[int] = None, - now: Optional[float] = None, + source: tuple[str, int] | None = None, + scope_id: int | None = None, + now: float | None = None, ) -> None: """Constructor from string holding bytes of packet""" self.flags = 0 @@ -104,9 +106,9 @@ def __init__( self.data = data self.view = data self._data_len = len(data) - self._name_cache: Dict[int, List[str]] = {} - self._questions: List[DNSQuestion] = [] - self._answers: List[DNSRecord] = [] + self._name_cache: dict[int, list[str]] = {} + self._questions: list[DNSQuestion] = [] + self._answers: list[DNSRecord] = [] self.id = 0 self._num_questions = 0 self._num_answers = 0 @@ -146,7 +148,7 @@ def truncated(self) -> bool: return (self.flags & _FLAGS_TC) == _FLAGS_TC @property - def questions(self) -> List[DNSQuestion]: + def questions(self) -> list[DNSQuestion]: """Questions in the packet.""" return self._questions @@ -189,7 +191,7 @@ def _log_exception_debug(cls, *logger_data: Any) -> None: log_exc_info = True log.debug(*(logger_data or ["Exception occurred"]), exc_info=log_exc_info) - def answers(self) -> List[DNSRecord]: + def answers(self) -> list[DNSRecord]: """Answers in the packet.""" if not self._did_read_others: try: @@ -306,7 +308,7 @@ def _read_others(self) -> None: def _read_record( self, domain: _str, type_: _int, class_: _int, ttl: _int, length: _int - ) -> Optional[DNSRecord]: + ) -> DNSRecord | None: """Read known records types and skip unknown ones.""" if type_ == _TYPE_A: address_rec = DNSAddress.__new__(DNSAddress) @@ -384,7 +386,7 @@ def _read_record( self.offset += length return None - def _read_bitmap(self, end: _int) -> List[int]: + def _read_bitmap(self, end: _int) -> list[int]: """Reads an NSEC bitmap from the packet.""" rdtypes = [] view = self.view @@ -404,8 +406,8 @@ def _read_bitmap(self, end: _int) -> List[int]: def _read_name(self) -> str: """Reads a domain name from the packet.""" - labels: List[str] = [] - seen_pointers: Set[int] = set() + labels: list[str] = [] + seen_pointers: set[int] = set() original_offset = self.offset self.offset = self._decode_labels_at_offset(original_offset, labels, seen_pointers) self._name_cache[original_offset] = labels @@ -416,7 +418,7 @@ def _read_name(self) -> str: ) return name - def _decode_labels_at_offset(self, off: _int, labels: List[str], seen_pointers: Set[int]) -> int: + def _decode_labels_at_offset(self, off: _int, labels: list[str], seen_pointers: set[int]) -> int: # This is a tight loop that is called frequently, small optimizations can make a difference. view = self.view while off < self._data_len: diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index c937350e..f5d09821 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -20,10 +20,12 @@ USA """ +from __future__ import annotations + import enum import logging from struct import Struct -from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple, Union +from typing import TYPE_CHECKING, Sequence from .._dns import DNSPointer, DNSQuestion, DNSRecord from .._exceptions import NamePartTooLongException @@ -98,20 +100,20 @@ def __init__(self, flags: int, multicast: bool = True, id_: int = 0) -> None: self.finished = False self.id = id_ self.multicast = multicast - self.packets_data: List[bytes] = [] + self.packets_data: list[bytes] = [] # these 3 are per-packet -- see also _reset_for_next_packet() - self.names: Dict[str, int] = {} - self.data: List[bytes] = [] + self.names: dict[str, int] = {} + self.data: list[bytes] = [] self.size: int = _DNS_PACKET_HEADER_LEN self.allow_long: bool = True self.state = STATE_INIT - self.questions: List[DNSQuestion] = [] - self.answers: List[Tuple[DNSRecord, float]] = [] - self.authorities: List[DNSPointer] = [] - self.additionals: List[DNSRecord] = [] + self.questions: list[DNSQuestion] = [] + self.answers: list[tuple[DNSRecord, float]] = [] + self.authorities: list[DNSPointer] = [] + self.additionals: list[DNSRecord] = [] def is_query(self) -> bool: """Returns true if this is a query.""" @@ -150,7 +152,7 @@ def add_answer(self, inp: DNSIncoming, record: DNSRecord) -> None: if not record.suppressed_by(inp): self.add_answer_at_time(record, 0.0) - def add_answer_at_time(self, record: Optional[DNSRecord], now: float_) -> None: + def add_answer_at_time(self, record: DNSRecord | None, now: float_) -> None: """Adds an answer if it does not expire by a certain time""" now_double = now if record is not None and (now_double == 0 or not record.is_expired(now_double)): @@ -220,7 +222,7 @@ def write_short(self, value: int_) -> None: self.data.append(self._get_short(value)) self.size += 2 - def _write_int(self, value: Union[float, int]) -> None: + def _write_int(self, value: float | int) -> None: """Writes an unsigned integer to the packet""" value_as_int = int(value) long_bytes = LONG_LOOKUP.get(value_as_int) @@ -313,7 +315,7 @@ def _write_question(self, question: DNSQuestion_) -> bool: self._write_record_class(question) return self._check_data_limit_or_rollback(start_data_length, start_size) - def _write_record_class(self, record: Union[DNSQuestion_, DNSRecord_]) -> None: + def _write_record_class(self, record: DNSQuestion_ | DNSRecord_) -> None: """Write out the record class including the unique/unicast (QU) bit.""" class_ = record.class_ if record.unique is True and self.multicast: @@ -409,7 +411,7 @@ def _has_more_to_add( or additional_offset < len(self.additionals) ) - def packets(self) -> List[bytes]: + def packets(self) -> list[bytes]: """Returns a list of bytestrings containing the packets' bytes No further parts should be added to the packet once this diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index 912ab6f1..5f817511 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -20,7 +20,7 @@ USA """ -from typing import Optional +from __future__ import annotations from ._dns import DNSRecord @@ -30,16 +30,16 @@ class RecordUpdate: __slots__ = ("new", "old") - def __init__(self, new: DNSRecord, old: Optional[DNSRecord] = None) -> None: + def __init__(self, new: DNSRecord, old: DNSRecord | None = None) -> None: """RecordUpdate represents a change in a DNS record.""" self._fast_init(new, old) - def _fast_init(self, new: _DNSRecord, old: Optional[_DNSRecord]) -> None: + def _fast_init(self, new: _DNSRecord, old: _DNSRecord | None) -> None: """Fast init for RecordUpdate.""" self.new = new self.old = old - def __getitem__(self, index: int) -> Optional[DNSRecord]: + def __getitem__(self, index: int) -> DNSRecord | None: """Get the new or old record.""" if index == 0: return self.new diff --git a/src/zeroconf/_services/__init__.py b/src/zeroconf/_services/__init__.py index 7a6bddeb..6936aed6 100644 --- a/src/zeroconf/_services/__init__.py +++ b/src/zeroconf/_services/__init__.py @@ -20,8 +20,10 @@ USA """ +from __future__ import annotations + import enum -from typing import TYPE_CHECKING, Any, Callable, List +from typing import TYPE_CHECKING, Any, Callable if TYPE_CHECKING: from .._core import Zeroconf @@ -35,13 +37,13 @@ class ServiceStateChange(enum.Enum): class ServiceListener: - def add_service(self, zc: "Zeroconf", type_: str, name: str) -> None: + def add_service(self, zc: Zeroconf, type_: str, name: str) -> None: raise NotImplementedError() - def remove_service(self, zc: "Zeroconf", type_: str, name: str) -> None: + def remove_service(self, zc: Zeroconf, type_: str, name: str) -> None: raise NotImplementedError() - def update_service(self, zc: "Zeroconf", type_: str, name: str) -> None: + def update_service(self, zc: Zeroconf, type_: str, name: str) -> None: raise NotImplementedError() @@ -49,27 +51,27 @@ class Signal: __slots__ = ("_handlers",) def __init__(self) -> None: - self._handlers: List[Callable[..., None]] = [] + self._handlers: list[Callable[..., None]] = [] def fire(self, **kwargs: Any) -> None: for h in self._handlers[:]: h(**kwargs) @property - def registration_interface(self) -> "SignalRegistrationInterface": + def registration_interface(self) -> SignalRegistrationInterface: return SignalRegistrationInterface(self._handlers) class SignalRegistrationInterface: __slots__ = ("_handlers",) - def __init__(self, handlers: List[Callable[..., None]]) -> None: + def __init__(self, handlers: list[Callable[..., None]]) -> None: self._handlers = handlers - def register_handler(self, handler: Callable[..., None]) -> "SignalRegistrationInterface": + def register_handler(self, handler: Callable[..., None]) -> SignalRegistrationInterface: self._handlers.append(handler) return self - def unregister_handler(self, handler: Callable[..., None]) -> "SignalRegistrationInterface": + def unregister_handler(self, handler: Callable[..., None]) -> SignalRegistrationInterface: self._handlers.remove(handler) return self diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 42aaa1ac..c2ab115b 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + import asyncio import heapq import queue @@ -36,11 +38,7 @@ Dict, Iterable, List, - Optional, Set, - Tuple, - Type, - Union, cast, ) @@ -155,13 +153,13 @@ def __repr__(self) -> str: ">" ) - def __lt__(self, other: "_ScheduledPTRQuery") -> bool: + def __lt__(self, other: _ScheduledPTRQuery) -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis < other.when_millis return NotImplemented - def __le__(self, other: "_ScheduledPTRQuery") -> bool: + def __le__(self, other: _ScheduledPTRQuery) -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis < other.when_millis or self.__eq__(other) @@ -173,13 +171,13 @@ def __eq__(self, other: Any) -> bool: return self.when_millis == other.when_millis return NotImplemented - def __ge__(self, other: "_ScheduledPTRQuery") -> bool: + def __ge__(self, other: _ScheduledPTRQuery) -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis > other.when_millis or self.__eq__(other) return NotImplemented - def __gt__(self, other: "_ScheduledPTRQuery") -> bool: + def __gt__(self, other: _ScheduledPTRQuery) -> bool: """Compare two scheduled queries.""" if type(other) is _ScheduledPTRQuery: return self.when_millis > other.when_millis @@ -197,7 +195,7 @@ def __init__(self, now_millis: float, multicast: bool) -> None: self.out = DNSOutgoing(_FLAGS_QR_QUERY, multicast) self.bytes = 0 - def add(self, max_compressed_size: int_, question: DNSQuestion, answers: Set[DNSPointer]) -> None: + def add(self, max_compressed_size: int_, question: DNSQuestion, answers: set[DNSPointer]) -> None: """Add a new set of questions and known answers to the outgoing.""" self.out.add_question(question) for answer in answers: @@ -209,7 +207,7 @@ def group_ptr_queries_with_known_answers( now: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers, -) -> List[DNSOutgoing]: +) -> list[DNSOutgoing]: """Aggregate queries so that as many known answers as possible fit in the same packet without having known answers spill over into the next packet unless the question and known answers are always going to exceed the packet size. @@ -225,19 +223,19 @@ def _group_ptr_queries_with_known_answers( now_millis: float_, multicast: bool_, question_with_known_answers: _QuestionWithKnownAnswers, -) -> List[DNSOutgoing]: +) -> list[DNSOutgoing]: """Inner wrapper for group_ptr_queries_with_known_answers.""" # This is the maximum size the query + known answers can be with name compression. # The actual size of the query + known answers may be a bit smaller since other # parts may be shared when the final DNSOutgoing packets are constructed. The # goal of this algorithm is to quickly bucket the query + known answers without # the overhead of actually constructing the packets. - query_by_size: Dict[DNSQuestion, int] = { + query_by_size: dict[DNSQuestion, int] = { question: (question.max_size + sum(answer.max_size_compressed for answer in known_answers)) for question, known_answers in question_with_known_answers.items() } max_bucket_size = _MAX_MSG_TYPICAL - _DNS_PACKET_HEADER_LEN - query_buckets: List[_DNSPointerOutgoingBucket] = [] + query_buckets: list[_DNSPointerOutgoingBucket] = [] for question in sorted( query_by_size, key=query_by_size.get, # type: ignore @@ -261,12 +259,12 @@ def _group_ptr_queries_with_known_answers( def generate_service_query( - zc: "Zeroconf", + zc: Zeroconf, now_millis: float_, - types_: Set[str], + types_: set[str], multicast: bool, - question_type: Optional[DNSQuestionType], -) -> List[DNSOutgoing]: + question_type: DNSQuestionType | None, +) -> list[DNSOutgoing]: """Generate a service query for sending with zeroconf.send.""" questions_with_known_answers: _QuestionWithKnownAnswers = {} qu_question = not multicast if question_type is None else question_type is QU_QUESTION @@ -296,7 +294,7 @@ def generate_service_query( def _on_change_dispatcher( listener: ServiceListener, - zeroconf: "Zeroconf", + zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange, @@ -346,14 +344,14 @@ class QueryScheduler: def __init__( self, - zc: "Zeroconf", - types: Set[str], - addr: Optional[str], + zc: Zeroconf, + types: set[str], + addr: str | None, port: int, multicast: bool, delay: int, - first_random_delay_interval: Tuple[int, int], - question_type: Optional[DNSQuestionType], + first_random_delay_interval: tuple[int, int], + question_type: DNSQuestionType | None, ) -> None: self._zc = zc self._types = types @@ -362,11 +360,11 @@ def __init__( self._multicast = multicast self._first_random_delay_interval = first_random_delay_interval self._min_time_between_queries_millis = delay - self._loop: Optional[asyncio.AbstractEventLoop] = None + self._loop: asyncio.AbstractEventLoop | None = None self._startup_queries_sent = 0 - self._next_scheduled_for_alias: Dict[str, _ScheduledPTRQuery] = {} + self._next_scheduled_for_alias: dict[str, _ScheduledPTRQuery] = {} self._query_heap: list[_ScheduledPTRQuery] = [] - self._next_run: Optional[asyncio.TimerHandle] = None + self._next_run: asyncio.TimerHandle | None = None self._clock_resolution_millis = time.get_clock_info("monotonic").resolution * 1000 self._question_type = question_type @@ -500,10 +498,10 @@ def _process_ready_types(self) -> None: # with a minimum time between queries of _min_time_between_queries # which defaults to 10s - ready_types: Set[str] = set() - next_scheduled: Optional[_ScheduledPTRQuery] = None + ready_types: set[str] = set() + next_scheduled: _ScheduledPTRQuery | None = None end_time_millis = now_millis + self._clock_resolution_millis - schedule_rescue: List[_ScheduledPTRQuery] = [] + schedule_rescue: list[_ScheduledPTRQuery] = [] while self._query_heap: query = self._query_heap[0] @@ -538,7 +536,7 @@ def _process_ready_types(self) -> None: self._next_run = self._loop.call_at(millis_to_seconds(next_when_millis), self._process_ready_types) def async_send_ready_queries( - self, first_request: bool, now_millis: float_, ready_types: Set[str] + self, first_request: bool, now_millis: float_, ready_types: set[str] ) -> None: """Send any ready queries.""" # If they did not specify and this is the first request, ask QU questions @@ -569,14 +567,14 @@ class _ServiceBrowserBase(RecordUpdateListener): def __init__( self, - zc: "Zeroconf", - type_: Union[str, list], - handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, - listener: Optional[ServiceListener] = None, - addr: Optional[str] = None, + zc: Zeroconf, + type_: str | list, + handlers: ServiceListener | list[Callable[..., None]] | None = None, + listener: ServiceListener | None = None, + addr: str | None = None, port: int = _MDNS_PORT, delay: int = _BROWSER_TIME, - question_type: Optional[DNSQuestionType] = None, + question_type: DNSQuestionType | None = None, ) -> None: """Used to browse for a service for specific type(s). @@ -596,7 +594,7 @@ def __init__( discovers changes in the services availability. """ assert handlers or listener, "You need to specify at least one handler" - self.types: Set[str] = set(type_ if isinstance(type_, list) else [type_]) + self.types: set[str] = set(type_ if isinstance(type_, list) else [type_]) for check_type_ in self.types: # Will generate BadTypeInNameException on a bad name service_type_name(check_type_, strict=False) @@ -604,7 +602,7 @@ def __init__( self._cache = zc.cache assert zc.loop is not None self._loop = zc.loop - self._pending_handlers: Dict[Tuple[str, str], ServiceStateChange] = {} + self._pending_handlers: dict[tuple[str, str], ServiceStateChange] = {} self._service_state_changed = Signal() self.query_scheduler = QueryScheduler( zc, @@ -617,7 +615,7 @@ def __init__( question_type, ) self.done = False - self._query_sender_task: Optional[asyncio.Task] = None + self._query_sender_task: asyncio.Task | None = None if hasattr(handlers, "add_service"): listener = cast("ServiceListener", handlers) @@ -645,7 +643,7 @@ def _async_start(self) -> None: def service_state_changed(self) -> SignalRegistrationInterface: return self._service_state_changed.registration_interface - def _names_matching_types(self, names: Iterable[str]) -> List[Tuple[str, str]]: + def _names_matching_types(self, names: Iterable[str]) -> list[tuple[str, str]]: """Return the type and name for records matching the types we are browsing.""" return [ (type_, name) for name in names for type_ in self.types.intersection(cached_possible_types(name)) @@ -670,7 +668,7 @@ def _enqueue_callback( ): self._pending_handlers[key] = state_change - def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float_, records: list[RecordUpdate]) -> None: """Callback invoked by Zeroconf when new information arrives. Updates information required by browser in the Zeroconf cache. @@ -727,7 +725,7 @@ def async_update_records_complete(self) -> None: self._fire_service_state_changed_event(pending) self._pending_handlers.clear() - def _fire_service_state_changed_event(self, event: Tuple[Tuple[str, str], ServiceStateChange]) -> None: + def _fire_service_state_changed_event(self, event: tuple[tuple[str, str], ServiceStateChange]) -> None: """Fire a service state changed event. When running with ServiceBrowser, this will happen in the dedicated @@ -769,14 +767,14 @@ class ServiceBrowser(_ServiceBrowserBase, threading.Thread): def __init__( self, - zc: "Zeroconf", - type_: Union[str, list], - handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, - listener: Optional[ServiceListener] = None, - addr: Optional[str] = None, + zc: Zeroconf, + type_: str | list, + handlers: ServiceListener | list[Callable[..., None]] | None = None, + listener: ServiceListener | None = None, + addr: str | None = None, port: int = _MDNS_PORT, delay: int = _BROWSER_TIME, - question_type: Optional[DNSQuestionType] = None, + question_type: DNSQuestionType | None = None, ) -> None: assert zc.loop is not None if not zc.loop.is_running(): @@ -821,14 +819,14 @@ def async_update_records_complete(self) -> None: self.queue.put(pending) self._pending_handlers.clear() - def __enter__(self) -> "ServiceBrowser": + def __enter__(self) -> ServiceBrowser: return self def __exit__( # pylint: disable=useless-return self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: self.cancel() return None diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index a6e815b5..67777459 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -20,9 +20,11 @@ USA """ +from __future__ import annotations + import asyncio import random -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Union, cast +from typing import TYPE_CHECKING, Dict, List, Optional, cast from .._cache import DNSCache from .._dns import ( @@ -106,7 +108,7 @@ from .._core import Zeroconf -def instance_name_from_service_info(info: "ServiceInfo", strict: bool = True) -> str: +def instance_name_from_service_info(info: ServiceInfo, strict: bool = True) -> str: """Calculate the instance name from the ServiceInfo.""" # This is kind of funky because of the subtype based tests # need to make subtypes a first class citizen @@ -168,17 +170,17 @@ def __init__( self, type_: str, name: str, - port: Optional[int] = None, + port: int | None = None, weight: int = 0, priority: int = 0, - properties: Union[bytes, Dict] = b"", - server: Optional[str] = None, + properties: bytes | dict = b"", + server: str | None = None, host_ttl: int = _DNS_HOST_TTL, other_ttl: int = _DNS_OTHER_TTL, *, - addresses: Optional[List[bytes]] = None, - parsed_addresses: Optional[List[str]] = None, - interface_index: Optional[int] = None, + addresses: list[bytes] | None = None, + parsed_addresses: list[str] | None = None, + interface_index: int | None = None, ) -> None: # Accept both none, or one, but not both. if addresses is not None and parsed_addresses is not None: @@ -190,8 +192,8 @@ def __init__( self.type = type_ self._name = name self.key = name.lower() - self._ipv4_addresses: List[ZeroconfIPv4Address] = [] - self._ipv6_addresses: List[ZeroconfIPv6Address] = [] + self._ipv4_addresses: list[ZeroconfIPv4Address] = [] + self._ipv6_addresses: list[ZeroconfIPv6Address] = [] if addresses is not None: self.addresses = addresses elif parsed_addresses is not None: @@ -201,20 +203,20 @@ def __init__( self.priority = priority self.server = server if server else None self.server_key = server.lower() if server else None - self._properties: Optional[Dict[bytes, Optional[bytes]]] = None - self._decoded_properties: Optional[Dict[str, Optional[str]]] = None + self._properties: dict[bytes, bytes | None] | None = None + self._decoded_properties: dict[str, str | None] | None = None if isinstance(properties, bytes): self._set_text(properties) else: self._set_properties(properties) self.host_ttl = host_ttl self.other_ttl = other_ttl - self._new_records_futures: Optional[Set[asyncio.Future]] = None - self._dns_address_cache: Optional[List[DNSAddress]] = None - self._dns_pointer_cache: Optional[DNSPointer] = None - self._dns_service_cache: Optional[DNSService] = None - self._dns_text_cache: Optional[DNSText] = None - self._get_address_and_nsec_records_cache: Optional[Set[DNSRecord]] = None + self._new_records_futures: set[asyncio.Future] | None = None + self._dns_address_cache: list[DNSAddress] | None = None + self._dns_pointer_cache: DNSPointer | None = None + self._dns_service_cache: DNSService | None = None + self._dns_text_cache: DNSText | None = None + self._get_address_and_nsec_records_cache: set[DNSRecord] | None = None self._query_record_types = {_TYPE_SRV, _TYPE_TXT, _TYPE_A, _TYPE_AAAA} @property @@ -232,7 +234,7 @@ def name(self, name: str) -> None: self._dns_text_cache = None @property - def addresses(self) -> List[bytes]: + def addresses(self) -> list[bytes]: """IPv4 addresses of this service. Only IPv4 addresses are returned for backward compatibility. @@ -242,7 +244,7 @@ def addresses(self) -> List[bytes]: return self.addresses_by_version(IPVersion.V4Only) @addresses.setter - def addresses(self, value: List[bytes]) -> None: + def addresses(self, value: list[bytes]) -> None: """Replace the addresses list. This replaces all currently stored addresses, both IPv4 and IPv6. @@ -272,7 +274,7 @@ def addresses(self, value: List[bytes]) -> None: self._ipv6_addresses.append(addr) @property - def properties(self) -> Dict[bytes, Optional[bytes]]: + def properties(self) -> dict[bytes, bytes | None]: """Return properties as bytes.""" if self._properties is None: self._unpack_text_into_properties() @@ -281,7 +283,7 @@ def properties(self) -> Dict[bytes, Optional[bytes]]: return self._properties @property - def decoded_properties(self) -> Dict[str, Optional[str]]: + def decoded_properties(self) -> dict[str, str | None]: """Return properties as strings.""" if self._decoded_properties is None: self._generate_decoded_properties() @@ -297,7 +299,7 @@ def async_clear_cache(self) -> None: self._dns_text_cache = None self._get_address_and_nsec_records_cache = None - async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + async def async_wait(self, timeout: float, loop: asyncio.AbstractEventLoop | None = None) -> None: """Calling task waits for a given number of milliseconds or until notified.""" if not self._new_records_futures: self._new_records_futures = set() @@ -305,7 +307,7 @@ async def async_wait(self, timeout: float, loop: Optional[asyncio.AbstractEventL loop or asyncio.get_running_loop(), self._new_records_futures, timeout ) - def addresses_by_version(self, version: IPVersion) -> List[bytes]: + def addresses_by_version(self, version: IPVersion) -> list[bytes]: """List addresses matching IP version. Addresses are guaranteed to be returned in LIFO (last in, first out) @@ -325,7 +327,7 @@ def addresses_by_version(self, version: IPVersion) -> List[bytes]: def ip_addresses_by_version( self, version: IPVersion - ) -> Union[List[ZeroconfIPv4Address], List[ZeroconfIPv6Address]]: + ) -> list[ZeroconfIPv4Address] | list[ZeroconfIPv6Address]: """List ip_address objects matching IP version. Addresses are guaranteed to be returned in LIFO (last in, first out) @@ -338,7 +340,7 @@ def ip_addresses_by_version( def _ip_addresses_by_version_value( self, version_value: int_ - ) -> Union[List[ZeroconfIPv4Address], List[ZeroconfIPv6Address]]: + ) -> list[ZeroconfIPv4Address] | list[ZeroconfIPv6Address]: """Backend for addresses_by_version that uses the raw value.""" if version_value == _IPVersion_All_value: return [*self._ipv4_addresses, *self._ipv6_addresses] # type: ignore[return-value] @@ -346,7 +348,7 @@ def _ip_addresses_by_version_value( return self._ipv4_addresses return self._ipv6_addresses - def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: + def parsed_addresses(self, version: IPVersion = IPVersion.All) -> list[str]: """List addresses in their parsed string form. Addresses are guaranteed to be returned in LIFO (last in, first out) @@ -357,7 +359,7 @@ def parsed_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: """ return [str_without_scope_id(addr) for addr in self._ip_addresses_by_version_value(version.value)] - def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[str]: + def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> list[str]: """Equivalent to parsed_addresses, with the exception that IPv6 Link-Local addresses are qualified with % when available @@ -369,9 +371,9 @@ def parsed_scoped_addresses(self, version: IPVersion = IPVersion.All) -> List[st """ return [str(addr) for addr in self._ip_addresses_by_version_value(version.value)] - def _set_properties(self, properties: Dict[Union[str, bytes], Optional[Union[str, bytes]]]) -> None: + def _set_properties(self, properties: dict[str | bytes, str | bytes | None]) -> None: """Sets properties and text of this info from a dictionary""" - list_: List[bytes] = [] + list_: list[bytes] = [] properties_contain_str = False result = b"" for key, value in properties.items(): @@ -425,7 +427,7 @@ def _unpack_text_into_properties(self) -> None: return index = 0 - properties: Dict[bytes, Optional[bytes]] = {} + properties: dict[bytes, bytes | None] = {} while index < end: length = text[index] index += 1 @@ -443,10 +445,10 @@ def get_name(self) -> str: return self._name[: len(self._name) - len(self.type) - 1] def _get_ip_addresses_from_cache_lifo( - self, zc: "Zeroconf", now: float_, type: int_ - ) -> List[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: + self, zc: Zeroconf, now: float_, type: int_ + ) -> list[ZeroconfIPv4Address | ZeroconfIPv6Address]: """Set IPv6 addresses from the cache.""" - address_list: List[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]] = [] + address_list: list[ZeroconfIPv4Address | ZeroconfIPv6Address] = [] for record in self._get_address_records_from_cache_by_type(zc, type): if record.is_expired(now): continue @@ -456,7 +458,7 @@ def _get_ip_addresses_from_cache_lifo( address_list.reverse() # Reverse to get LIFO order return address_list - def _set_ipv6_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: + def _set_ipv6_addresses_from_cache(self, zc: Zeroconf, now: float_) -> None: """Set IPv6 addresses from the cache.""" if TYPE_CHECKING: self._ipv6_addresses = cast( @@ -466,7 +468,7 @@ def _set_ipv6_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: else: self._ipv6_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA) - def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: + def _set_ipv4_addresses_from_cache(self, zc: Zeroconf, now: float_) -> None: """Set IPv4 addresses from the cache.""" if TYPE_CHECKING: self._ipv4_addresses = cast( @@ -476,7 +478,7 @@ def _set_ipv4_addresses_from_cache(self, zc: "Zeroconf", now: float_) -> None: else: self._ipv4_addresses = self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A) - def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float_, records: list[RecordUpdate]) -> None: """Updates service information from a DNS record. This method will be run in the event loop. @@ -488,7 +490,7 @@ def async_update_records(self, zc: "Zeroconf", now: float_, records: List[Record if updated and new_records_futures: _resolve_all_futures_to_none(new_records_futures) - def _process_record_threadsafe(self, zc: "Zeroconf", record: DNSRecord, now: float_) -> bool: + def _process_record_threadsafe(self, zc: Zeroconf, record: DNSRecord, now: float_) -> bool: """Thread safe record updating. Returns True if a new record was added. @@ -575,17 +577,17 @@ def _process_record_threadsafe(self, zc: "Zeroconf", record: DNSRecord, now: flo def dns_addresses( self, - override_ttl: Optional[int] = None, + override_ttl: int | None = None, version: IPVersion = IPVersion.All, - ) -> List[DNSAddress]: + ) -> list[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" return self._dns_addresses(override_ttl, version) def _dns_addresses( self, - override_ttl: Optional[int], + override_ttl: int | None, version: IPVersion, - ) -> List[DNSAddress]: + ) -> list[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" cacheable = version is IPVersion.All and override_ttl is None if self._dns_address_cache is not None and cacheable: @@ -609,11 +611,11 @@ def _dns_addresses( self._dns_address_cache = records return records - def dns_pointer(self, override_ttl: Optional[int] = None) -> DNSPointer: + def dns_pointer(self, override_ttl: int | None = None) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" return self._dns_pointer(override_ttl) - def _dns_pointer(self, override_ttl: Optional[int]) -> DNSPointer: + def _dns_pointer(self, override_ttl: int | None) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" cacheable = override_ttl is None if self._dns_pointer_cache is not None and cacheable: @@ -630,11 +632,11 @@ def _dns_pointer(self, override_ttl: Optional[int]) -> DNSPointer: self._dns_pointer_cache = record return record - def dns_service(self, override_ttl: Optional[int] = None) -> DNSService: + def dns_service(self, override_ttl: int | None = None) -> DNSService: """Return DNSService from ServiceInfo.""" return self._dns_service(override_ttl) - def _dns_service(self, override_ttl: Optional[int]) -> DNSService: + def _dns_service(self, override_ttl: int | None) -> DNSService: """Return DNSService from ServiceInfo.""" cacheable = override_ttl is None if self._dns_service_cache is not None and cacheable: @@ -657,11 +659,11 @@ def _dns_service(self, override_ttl: Optional[int]) -> DNSService: self._dns_service_cache = record return record - def dns_text(self, override_ttl: Optional[int] = None) -> DNSText: + def dns_text(self, override_ttl: int | None = None) -> DNSText: """Return DNSText from ServiceInfo.""" return self._dns_text(override_ttl) - def _dns_text(self, override_ttl: Optional[int]) -> DNSText: + def _dns_text(self, override_ttl: int | None) -> DNSText: """Return DNSText from ServiceInfo.""" cacheable = override_ttl is None if self._dns_text_cache is not None and cacheable: @@ -678,11 +680,11 @@ def _dns_text(self, override_ttl: Optional[int]) -> DNSText: self._dns_text_cache = record return record - def dns_nsec(self, missing_types: List[int], override_ttl: Optional[int] = None) -> DNSNsec: + def dns_nsec(self, missing_types: list[int], override_ttl: int | None = None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return self._dns_nsec(missing_types, override_ttl) - def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DNSNsec: + def _dns_nsec(self, missing_types: list[int], override_ttl: int | None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -694,17 +696,17 @@ def _dns_nsec(self, missing_types: List[int], override_ttl: Optional[int]) -> DN 0.0, ) - def get_address_and_nsec_records(self, override_ttl: Optional[int] = None) -> Set[DNSRecord]: + def get_address_and_nsec_records(self, override_ttl: int | None = None) -> set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" return self._get_address_and_nsec_records(override_ttl) - def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSRecord]: + def _get_address_and_nsec_records(self, override_ttl: int | None) -> set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" cacheable = override_ttl is None if self._get_address_and_nsec_records_cache is not None and cacheable: return self._get_address_and_nsec_records_cache - missing_types: Set[int] = _ADDRESS_RECORD_TYPES.copy() - records: Set[DNSRecord] = set() + missing_types: set[int] = _ADDRESS_RECORD_TYPES.copy() + records: set[DNSRecord] = set() for dns_address in self._dns_addresses(override_ttl, IPVersion.All): missing_types.discard(dns_address.type) records.add(dns_address) @@ -715,7 +717,7 @@ def _get_address_and_nsec_records(self, override_ttl: Optional[int]) -> Set[DNSR self._get_address_and_nsec_records_cache = records return records - def _get_address_records_from_cache_by_type(self, zc: "Zeroconf", _type: int_) -> List[DNSAddress]: + def _get_address_records_from_cache_by_type(self, zc: Zeroconf, _type: int_) -> list[DNSAddress]: """Get the addresses from the cache.""" if self.server_key is None: return [] @@ -738,14 +740,14 @@ def set_server_if_missing(self) -> None: self.server = self._name self.server_key = self.key - def load_from_cache(self, zc: "Zeroconf", now: Optional[float_] = None) -> bool: + def load_from_cache(self, zc: Zeroconf, now: float_ | None = None) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. """ return self._load_from_cache(zc, now or current_time_millis()) - def _load_from_cache(self, zc: "Zeroconf", now: float_) -> bool: + def _load_from_cache(self, zc: Zeroconf, now: float_) -> bool: """Populate the service info from the cache. This method is designed to be threadsafe. @@ -775,10 +777,10 @@ def _is_complete(self) -> bool: def request( self, - zc: "Zeroconf", + zc: Zeroconf, timeout: float, - question_type: Optional[DNSQuestionType] = None, - addr: Optional[str] = None, + question_type: DNSQuestionType | None = None, + addr: str | None = None, port: int = _MDNS_PORT, ) -> bool: """Returns true if the service could be discovered on the @@ -814,10 +816,10 @@ def _get_random_delay(self) -> int_: async def async_request( self, - zc: "Zeroconf", + zc: Zeroconf, timeout: float, - question_type: Optional[DNSQuestionType] = None, - addr: Optional[str] = None, + question_type: DNSQuestionType | None = None, + addr: str | None = None, port: int = _MDNS_PORT, ) -> bool: """Returns true if the service could be discovered on the @@ -914,7 +916,7 @@ def _add_question_with_known_answers( out.add_answer_at_time(answer, now) def _generate_request_query( - self, zc: "Zeroconf", now: float_, question_type: DNSQuestionType + self, zc: Zeroconf, now: float_, question_type: DNSQuestionType ) -> DNSOutgoing: """Generate the request query.""" out = DNSOutgoing(_FLAGS_QR_QUERY) diff --git a/src/zeroconf/_services/registry.py b/src/zeroconf/_services/registry.py index 4100c690..937992eb 100644 --- a/src/zeroconf/_services/registry.py +++ b/src/zeroconf/_services/registry.py @@ -20,7 +20,7 @@ USA """ -from typing import Dict, List, Optional, Union +from __future__ import annotations from .._exceptions import ServiceNameAlreadyRegistered from .info import ServiceInfo @@ -41,16 +41,16 @@ def __init__( self, ) -> None: """Create the ServiceRegistry class.""" - self._services: Dict[str, ServiceInfo] = {} - self.types: Dict[str, List] = {} - self.servers: Dict[str, List] = {} + self._services: dict[str, ServiceInfo] = {} + self.types: dict[str, list] = {} + self.servers: dict[str, list] = {} self.has_entries: bool = False def async_add(self, info: ServiceInfo) -> None: """Add a new service to the registry.""" self._add(info) - def async_remove(self, info: Union[List[ServiceInfo], ServiceInfo]) -> None: + def async_remove(self, info: list[ServiceInfo] | ServiceInfo) -> None: """Remove a new service from the registry.""" self._remove(info if isinstance(info, list) else [info]) @@ -59,27 +59,27 @@ def async_update(self, info: ServiceInfo) -> None: self._remove([info]) self._add(info) - def async_get_service_infos(self) -> List[ServiceInfo]: + def async_get_service_infos(self) -> list[ServiceInfo]: """Return all ServiceInfo.""" return list(self._services.values()) - def async_get_info_name(self, name: str) -> Optional[ServiceInfo]: + def async_get_info_name(self, name: str) -> ServiceInfo | None: """Return all ServiceInfo for the name.""" return self._services.get(name) - def async_get_types(self) -> List[str]: + def async_get_types(self) -> list[str]: """Return all types.""" return list(self.types) - def async_get_infos_type(self, type_: str) -> List[ServiceInfo]: + def async_get_infos_type(self, type_: str) -> list[ServiceInfo]: """Return all ServiceInfo matching type.""" return self._async_get_by_index(self.types, type_) - def async_get_infos_server(self, server: str) -> List[ServiceInfo]: + def async_get_infos_server(self, server: str) -> list[ServiceInfo]: """Return all ServiceInfo matching server.""" return self._async_get_by_index(self.servers, server) - def _async_get_by_index(self, records: Dict[str, List], key: _str) -> List[ServiceInfo]: + def _async_get_by_index(self, records: dict[str, list], key: _str) -> list[ServiceInfo]: """Return all ServiceInfo matching the index.""" record_list = records.get(key) if record_list is None: @@ -98,7 +98,7 @@ def _add(self, info: ServiceInfo) -> None: self.servers.setdefault(info.server_key, []).append(info.key) self.has_entries = True - def _remove(self, infos: List[ServiceInfo]) -> None: + def _remove(self, infos: list[ServiceInfo]) -> None: """Remove a services under the lock.""" for info in infos: old_service_info = self._services.get(info.key) diff --git a/src/zeroconf/_services/types.py b/src/zeroconf/_services/types.py index 63b6d19a..af25dc6d 100644 --- a/src/zeroconf/_services/types.py +++ b/src/zeroconf/_services/types.py @@ -20,8 +20,9 @@ USA """ +from __future__ import annotations + import time -from typing import Optional, Set, Tuple, Union from .._core import Zeroconf from .._services import ServiceListener @@ -37,7 +38,7 @@ class ZeroconfServiceTypes(ServiceListener): def __init__(self) -> None: """Keep track of found services in a set.""" - self.found_services: Set[str] = set() + self.found_services: set[str] = set() def add_service(self, zc: Zeroconf, type_: str, name: str) -> None: """Service added.""" @@ -52,11 +53,11 @@ def remove_service(self, zc: Zeroconf, type_: str, name: str) -> None: @classmethod def find( cls, - zc: Optional[Zeroconf] = None, - timeout: Union[int, float] = 5, + zc: Zeroconf | None = None, + timeout: int | float = 5, interfaces: InterfacesType = InterfaceChoice.All, - ip_version: Optional[IPVersion] = None, - ) -> Tuple[str, ...]: + ip_version: IPVersion | None = None, + ) -> tuple[str, ...]: """ Return all of the advertised services on any local networks. diff --git a/src/zeroconf/_transport.py b/src/zeroconf/_transport.py index b0811094..c8d7699b 100644 --- a/src/zeroconf/_transport.py +++ b/src/zeroconf/_transport.py @@ -20,9 +20,10 @@ USA """ +from __future__ import annotations + import asyncio import socket -from typing import Tuple class _WrappedTransport: @@ -42,7 +43,7 @@ def __init__( is_ipv6: bool, sock: socket.socket, fileno: int, - sock_name: Tuple, + sock_name: tuple, ) -> None: """Initialize the wrapped transport. diff --git a/src/zeroconf/_updates.py b/src/zeroconf/_updates.py index 58be33d8..c0bf9b8c 100644 --- a/src/zeroconf/_updates.py +++ b/src/zeroconf/_updates.py @@ -20,7 +20,9 @@ USA """ -from typing import TYPE_CHECKING, List +from __future__ import annotations + +from typing import TYPE_CHECKING from ._dns import DNSRecord from ._record_update import RecordUpdate @@ -40,7 +42,7 @@ class RecordUpdateListener: """ def update_record( # pylint: disable=no-self-use - self, zc: "Zeroconf", now: float, record: DNSRecord + self, zc: Zeroconf, now: float, record: DNSRecord ) -> None: """Update a single record. @@ -49,7 +51,7 @@ def update_record( # pylint: disable=no-self-use """ raise RuntimeError("update_record is deprecated and will be removed in a future version.") - def async_update_records(self, zc: "Zeroconf", now: float_, records: List[RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float_, records: list[RecordUpdate]) -> None: """Update multiple records in one shot. All records that are received in a single packet are passed diff --git a/src/zeroconf/_utils/__init__.py b/src/zeroconf/_utils/__init__.py index 30920c6a..584a74ec 100644 --- a/src/zeroconf/_utils/__init__.py +++ b/src/zeroconf/_utils/__init__.py @@ -19,3 +19,5 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ + +from __future__ import annotations diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index 6d070e30..07b3f422 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -20,11 +20,13 @@ USA """ +from __future__ import annotations + import asyncio import concurrent.futures import contextlib import sys -from typing import Any, Awaitable, Coroutine, Optional, Set +from typing import Any, Awaitable, Coroutine if sys.version_info[:2] < (3, 11): from async_timeout import timeout as asyncio_timeout @@ -47,7 +49,7 @@ def _set_future_none_if_not_done(fut: asyncio.Future) -> None: fut.set_result(None) -def _resolve_all_futures_to_none(futures: Set[asyncio.Future]) -> None: +def _resolve_all_futures_to_none(futures: set[asyncio.Future]) -> None: """Resolve all futures to None.""" for fut in futures: _set_future_none_if_not_done(fut) @@ -55,7 +57,7 @@ def _resolve_all_futures_to_none(futures: Set[asyncio.Future]) -> None: async def wait_for_future_set_or_timeout( - loop: asyncio.AbstractEventLoop, future_set: Set[asyncio.Future], timeout: float + loop: asyncio.AbstractEventLoop, future_set: set[asyncio.Future], timeout: float ) -> None: """Wait for a future or timeout (in milliseconds).""" future = loop.create_future() @@ -75,7 +77,7 @@ async def wait_event_or_timeout(event: asyncio.Event, timeout: float) -> None: await event.wait() -async def _async_get_all_tasks(loop: asyncio.AbstractEventLoop) -> Set[asyncio.Task]: +async def _async_get_all_tasks(loop: asyncio.AbstractEventLoop) -> set[asyncio.Task]: """Return all tasks running.""" await asyncio.sleep(0) # flush out any call_soon_threadsafe # If there are multiple event loops running, all_tasks is not @@ -87,7 +89,7 @@ async def _async_get_all_tasks(loop: asyncio.AbstractEventLoop) -> Set[asyncio.T return set() -async def _wait_for_loop_tasks(wait_tasks: Set[asyncio.Task]) -> None: +async def _wait_for_loop_tasks(wait_tasks: set[asyncio.Task]) -> None: """Wait for the event loop thread we started to shutdown.""" await asyncio.wait(wait_tasks, timeout=_TASK_AWAIT_TIMEOUT) @@ -130,7 +132,7 @@ def shutdown_loop(loop: asyncio.AbstractEventLoop) -> None: loop.call_soon_threadsafe(loop.stop) -def get_running_loop() -> Optional[asyncio.AbstractEventLoop]: +def get_running_loop() -> asyncio.AbstractEventLoop | None: """Check if an event loop is already running.""" with contextlib.suppress(RuntimeError): return asyncio.get_running_loop() diff --git a/src/zeroconf/_utils/ipaddress.py b/src/zeroconf/_utils/ipaddress.py index 64cdfb63..d172d0c9 100644 --- a/src/zeroconf/_utils/ipaddress.py +++ b/src/zeroconf/_utils/ipaddress.py @@ -20,9 +20,11 @@ USA """ +from __future__ import annotations + from functools import cache, lru_cache from ipaddress import AddressValueError, IPv4Address, IPv6Address, NetmaskValueError -from typing import Any, Optional, Union +from typing import Any from .._dns import DNSAddress from ..const import _TYPE_AAAA @@ -99,8 +101,8 @@ def is_loopback(self) -> bool: @lru_cache(maxsize=512) def _cached_ip_addresses( - address: Union[str, bytes, int], -) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: + address: str | bytes | int, +) -> ZeroconfIPv4Address | ZeroconfIPv6Address | None: """Cache IP addresses.""" try: return ZeroconfIPv4Address(address) @@ -119,7 +121,7 @@ def _cached_ip_addresses( def get_ip_address_object_from_record( record: DNSAddress, -) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: +) -> ZeroconfIPv4Address | ZeroconfIPv6Address | None: """Get the IP address object from the record.""" if record.type == _TYPE_AAAA and record.scope_id: return ip_bytes_and_scope_to_address(record.address, record.scope_id) @@ -128,7 +130,7 @@ def get_ip_address_object_from_record( def ip_bytes_and_scope_to_address( address: bytes_, scope: int_ -) -> Optional[Union[ZeroconfIPv4Address, ZeroconfIPv6Address]]: +) -> ZeroconfIPv4Address | ZeroconfIPv6Address | None: """Convert the bytes and scope to an IP address object.""" base_address = cached_ip_addresses_wrapper(address) if base_address is not None and base_address.is_link_local: @@ -137,7 +139,7 @@ def ip_bytes_and_scope_to_address( return base_address -def str_without_scope_id(addr: Union[ZeroconfIPv4Address, ZeroconfIPv6Address]) -> str: +def str_without_scope_id(addr: ZeroconfIPv4Address | ZeroconfIPv6Address) -> str: """Return the string representation of the address without the scope id.""" if addr.version == 6: address_str = str(addr) diff --git a/src/zeroconf/_utils/name.py b/src/zeroconf/_utils/name.py index cda01b28..de35f7af 100644 --- a/src/zeroconf/_utils/name.py +++ b/src/zeroconf/_utils/name.py @@ -20,8 +20,9 @@ USA """ +from __future__ import annotations + from functools import lru_cache -from typing import Set from .._exceptions import BadTypeInNameException from ..const import ( @@ -162,7 +163,7 @@ def service_type_name(type_: str, *, strict: bool = True) -> str: # pylint: dis return service_name + trailer -def possible_types(name: str) -> Set[str]: +def possible_types(name: str) -> set[str]: """Build a set of all possible types from a fully qualified name.""" labels = name.split(".") label_count = len(labels) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 7298bec4..3cc4336b 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -20,13 +20,15 @@ USA """ +from __future__ import annotations + import enum import errno import ipaddress import socket import struct import sys -from typing import Any, List, Optional, Sequence, Tuple, Union, cast +from typing import Any, Sequence, Tuple, Union, cast import ifaddr @@ -70,11 +72,11 @@ def _encode_address(address: str) -> bytes: return socket.inet_pton(address_family, address) -def get_all_addresses() -> List[str]: +def get_all_addresses() -> list[str]: return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) -def get_all_addresses_v6() -> List[Tuple[Tuple[str, int, int], int]]: +def get_all_addresses_v6() -> list[tuple[tuple[str, int, int], int]]: # IPv6 multicast uses positive indexes for interfaces # TODO: What about multi-address interfaces? return list( @@ -82,7 +84,7 @@ def get_all_addresses_v6() -> List[Tuple[Tuple[str, int, int], int]]: ) -def ip6_to_address_and_index(adapters: List[Any], ip: str) -> Tuple[Tuple[str, int, int], int]: +def ip6_to_address_and_index(adapters: list[Any], ip: str) -> tuple[tuple[str, int, int], int]: if "%" in ip: ip = ip[: ip.index("%")] # Strip scope_id. ipaddr = ipaddress.ip_address(ip) @@ -98,7 +100,7 @@ def ip6_to_address_and_index(adapters: List[Any], ip: str) -> Tuple[Tuple[str, i raise RuntimeError(f"No adapter found for IP address {ip}") -def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str, int, int]: +def interface_index_to_ip6_address(adapters: list[Any], index: int) -> tuple[str, int, int]: for adapter in adapters: if adapter.index == index: for adapter_ip in adapter.ips: @@ -110,8 +112,8 @@ def interface_index_to_ip6_address(adapters: List[Any], index: int) -> Tuple[str def ip6_addresses_to_indexes( - interfaces: Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], -) -> List[Tuple[Tuple[str, int, int], int]]: + interfaces: Sequence[str | int | tuple[tuple[str, int, int], int]], +) -> list[tuple[tuple[str, int, int], int]]: """Convert IPv6 interface addresses to interface indexes. IPv4 addresses are ignored. @@ -133,14 +135,14 @@ def ip6_addresses_to_indexes( def normalize_interface_choice( choice: InterfacesType, ip_version: IPVersion = IPVersion.V4Only -) -> List[Union[str, Tuple[Tuple[str, int, int], int]]]: +) -> list[str | tuple[tuple[str, int, int], int]]: """Convert the interfaces choice into internal representation. :param choice: `InterfaceChoice` or list of interface addresses or indexes (IPv6 only). :param ip_address: IP version to use (ignored if `choice` is a list). :returns: List of IP addresses (for IPv4) and indexes (for IPv6). """ - result: List[Union[str, Tuple[Tuple[str, int, int], int]]] = [] + result: list[str | tuple[tuple[str, int, int], int]] = [] if choice is InterfaceChoice.Default: if ip_version != IPVersion.V4Only: # IPv6 multicast uses interface 0 to mean the default @@ -196,7 +198,7 @@ def set_so_reuseport_if_available(s: socket.socket) -> None: def set_mdns_port_socket_options_for_ip_version( s: socket.socket, - bind_addr: Union[Tuple[str], Tuple[str, int, int]], + bind_addr: tuple[str] | tuple[str, int, int], ip_version: IPVersion, ) -> None: """Set ttl/hops and loop for mdns port.""" @@ -219,11 +221,11 @@ def set_mdns_port_socket_options_for_ip_version( def new_socket( - bind_addr: Union[Tuple[str], Tuple[str, int, int]], + bind_addr: tuple[str] | tuple[str, int, int], port: int = _MDNS_PORT, ip_version: IPVersion = IPVersion.V4Only, apple_p2p: bool = False, -) -> Optional[socket.socket]: +) -> socket.socket | None: log.debug( "Creating new socket with port %s, ip_version %s, apple_p2p %s and bind_addr %r", port, @@ -265,7 +267,7 @@ def new_socket( def add_multicast_member( listen_socket: socket.socket, - interface: Union[str, Tuple[Tuple[str, int, int], int]], + interface: str | tuple[tuple[str, int, int], int], ) -> bool: # This is based on assumptions in normalize_interface_choice is_v6 = isinstance(interface, tuple) @@ -331,9 +333,9 @@ def add_multicast_member( def new_respond_socket( - interface: Union[str, Tuple[Tuple[str, int, int], int]], + interface: str | tuple[tuple[str, int, int], int], apple_p2p: bool = False, -) -> Optional[socket.socket]: +) -> socket.socket | None: is_v6 = isinstance(interface, tuple) respond_socket = new_socket( ip_version=(IPVersion.V6Only if is_v6 else IPVersion.V4Only), @@ -360,7 +362,7 @@ def create_sockets( unicast: bool = False, ip_version: IPVersion = IPVersion.V4Only, apple_p2p: bool = False, -) -> Tuple[Optional[socket.socket], List[socket.socket]]: +) -> tuple[socket.socket | None, list[socket.socket]]: if unicast: listen_socket = None else: diff --git a/src/zeroconf/_utils/time.py b/src/zeroconf/_utils/time.py index 055e0658..4057f063 100644 --- a/src/zeroconf/_utils/time.py +++ b/src/zeroconf/_utils/time.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + import time _float = float diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 926ef509..2a29a4bb 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -20,10 +20,12 @@ USA """ +from __future__ import annotations + import asyncio import contextlib from types import TracebackType # used in type hints -from typing import Awaitable, Callable, Dict, List, Optional, Tuple, Type, Union +from typing import Awaitable, Callable from ._core import Zeroconf from ._dns import DNSQuestionType @@ -63,14 +65,14 @@ class AsyncServiceBrowser(_ServiceBrowserBase): def __init__( self, - zeroconf: "Zeroconf", - type_: Union[str, list], - handlers: Optional[Union[ServiceListener, List[Callable[..., None]]]] = None, - listener: Optional[ServiceListener] = None, - addr: Optional[str] = None, + zeroconf: Zeroconf, + type_: str | list, + handlers: ServiceListener | list[Callable[..., None]] | None = None, + listener: ServiceListener | None = None, + addr: str | None = None, port: int = _MDNS_PORT, delay: int = _BROWSER_TIME, - question_type: Optional[DNSQuestionType] = None, + question_type: DNSQuestionType | None = None, ) -> None: super().__init__(zeroconf, type_, handlers, listener, addr, port, delay, question_type) self._async_start() @@ -79,15 +81,15 @@ async def async_cancel(self) -> None: """Cancel the browser.""" self._async_cancel() - async def __aenter__(self) -> "AsyncServiceBrowser": + async def __aenter__(self) -> AsyncServiceBrowser: return self async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: await self.async_cancel() return None @@ -98,11 +100,11 @@ class AsyncZeroconfServiceTypes(ZeroconfServiceTypes): @classmethod async def async_find( cls, - aiozc: Optional["AsyncZeroconf"] = None, - timeout: Union[int, float] = 5, + aiozc: AsyncZeroconf | None = None, + timeout: int | float = 5, interfaces: InterfacesType = InterfaceChoice.All, - ip_version: Optional[IPVersion] = None, - ) -> Tuple[str, ...]: + ip_version: IPVersion | None = None, + ) -> tuple[str, ...]: """ Return all of the advertised services on any local networks. @@ -145,9 +147,9 @@ def __init__( self, interfaces: InterfacesType = InterfaceChoice.All, unicast: bool = False, - ip_version: Optional[IPVersion] = None, + ip_version: IPVersion | None = None, apple_p2p: bool = False, - zc: Optional[Zeroconf] = None, + zc: Zeroconf | None = None, ) -> None: """Creates an instance of the Zeroconf class, establishing multicast communications, and listening. @@ -170,12 +172,12 @@ def __init__( ip_version=ip_version, apple_p2p=apple_p2p, ) - self.async_browsers: Dict[ServiceListener, AsyncServiceBrowser] = {} + self.async_browsers: dict[ServiceListener, AsyncServiceBrowser] = {} async def async_register_service( self, info: ServiceInfo, - ttl: Optional[int] = None, + ttl: int | None = None, allow_name_change: bool = False, cooperating_responders: bool = False, strict: bool = True, @@ -236,8 +238,8 @@ async def async_get_service_info( type_: str, name: str, timeout: int = 3000, - question_type: Optional[DNSQuestionType] = None, - ) -> Optional[AsyncServiceInfo]: + question_type: DNSQuestionType | None = None, + ) -> AsyncServiceInfo | None: """Returns network's service information for a particular name and type, or None if no service matches by the timeout, which defaults to 3 seconds. @@ -268,14 +270,14 @@ async def async_remove_all_service_listeners(self) -> None: *(self.async_remove_service_listener(listener) for listener in list(self.async_browsers)) ) - async def __aenter__(self) -> "AsyncZeroconf": + async def __aenter__(self) -> AsyncZeroconf: return self async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: await self.async_close() return None diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index d84cb73b..3b4b3abc 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -20,6 +20,8 @@ USA """ +from __future__ import annotations + import re import socket diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py index e69de29b..9d48db4f 100644 --- a/tests/benchmarks/__init__.py +++ b/tests/benchmarks/__init__.py @@ -0,0 +1 @@ +from __future__ import annotations diff --git a/tests/benchmarks/helpers.py b/tests/benchmarks/helpers.py index e701e0b6..4f5f7d66 100644 --- a/tests/benchmarks/helpers.py +++ b/tests/benchmarks/helpers.py @@ -1,5 +1,7 @@ """Benchmark helpers.""" +from __future__ import annotations + import socket from zeroconf import DNSAddress, DNSOutgoing, DNSService, DNSText, const diff --git a/tests/benchmarks/test_cache.py b/tests/benchmarks/test_cache.py index 6fde9438..7813f679 100644 --- a/tests/benchmarks/test_cache.py +++ b/tests/benchmarks/test_cache.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pytest_codspeed import BenchmarkFixture from zeroconf import DNSCache, DNSPointer, current_time_millis diff --git a/tests/benchmarks/test_incoming.py b/tests/benchmarks/test_incoming.py index e0552f3a..6d31e51e 100644 --- a/tests/benchmarks/test_incoming.py +++ b/tests/benchmarks/test_incoming.py @@ -1,5 +1,7 @@ """Benchmark for DNSIncoming.""" +from __future__ import annotations + import socket from pytest_codspeed import BenchmarkFixture diff --git a/tests/benchmarks/test_outgoing.py b/tests/benchmarks/test_outgoing.py index 69de540e..a8db4d6f 100644 --- a/tests/benchmarks/test_outgoing.py +++ b/tests/benchmarks/test_outgoing.py @@ -1,5 +1,7 @@ """Benchmark for DNSOutgoing.""" +from __future__ import annotations + from pytest_codspeed import BenchmarkFixture from zeroconf._protocol.outgoing import State diff --git a/tests/benchmarks/test_send.py b/tests/benchmarks/test_send.py index 7a6d664b..596662a2 100644 --- a/tests/benchmarks/test_send.py +++ b/tests/benchmarks/test_send.py @@ -1,5 +1,7 @@ """Benchmark for sending packets.""" +from __future__ import annotations + import pytest from pytest_codspeed import BenchmarkFixture diff --git a/tests/benchmarks/test_txt_properties.py b/tests/benchmarks/test_txt_properties.py index ad75ab35..72afa0b6 100644 --- a/tests/benchmarks/test_txt_properties.py +++ b/tests/benchmarks/test_txt_properties.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pytest_codspeed import BenchmarkFixture from zeroconf import ServiceInfo diff --git a/tests/conftest.py b/tests/conftest.py index ba49cef6..1f323785 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,7 @@ """conftest for zeroconf tests.""" +from __future__ import annotations + import threading from unittest.mock import patch diff --git a/tests/services/__init__.py b/tests/services/__init__.py index 30920c6a..584a74ec 100644 --- a/tests/services/__init__.py +++ b/tests/services/__init__.py @@ -19,3 +19,5 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ + +from __future__ import annotations diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 5268c341..986df64e 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._services.browser.""" +from __future__ import annotations + import asyncio import logging import os @@ -863,7 +865,7 @@ def test_legacy_record_update_listener(): class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def update_record(self, zc: "Zeroconf", now: float, record: r.DNSRecord) -> None: + def update_record(self, zc: Zeroconf, now: float, record: r.DNSRecord) -> None: nonlocal updates updates.append(record) diff --git a/tests/services/test_registry.py b/tests/services/test_registry.py index 999e422c..c3ae3a28 100644 --- a/tests/services/test_registry.py +++ b/tests/services/test_registry.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._services.registry.""" +from __future__ import annotations + import socket import unittest diff --git a/tests/services/test_types.py b/tests/services/test_types.py index 811b22c5..63292246 100644 --- a/tests/services/test_types.py +++ b/tests/services/test_types.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._services.types.""" +from __future__ import annotations + import logging import os import socket diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 86e9e8c7..40ecf816 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -1,5 +1,7 @@ """Unit tests for aio.py.""" +from __future__ import annotations + import asyncio import logging import os diff --git a/tests/test_cache.py b/tests/test_cache.py index f5304cef..9d55435d 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._cache.""" +from __future__ import annotations + import logging import unittest.mock from heapq import heapify, heappop diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index 8bd443a4..74ed1f12 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -1,5 +1,7 @@ """Test to check for circular imports.""" +from __future__ import annotations + import asyncio import sys diff --git a/tests/test_dns.py b/tests/test_dns.py index 491e2ca7..246c8dcf 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._dns.""" +from __future__ import annotations + import logging import os import socket diff --git a/tests/test_engine.py b/tests/test_engine.py index 23a03949..b7a94c86 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._engine""" +from __future__ import annotations + import asyncio import itertools import logging diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index cf004d2c..ab181db1 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._exceptions""" +from __future__ import annotations + import logging import unittest.mock diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 80ee7f40..fd0e689c 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._handlers""" +from __future__ import annotations + import asyncio import logging import os @@ -1371,7 +1373,7 @@ async def test_record_update_manager_add_listener_callsback_existing_records(): class MyListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -1973,7 +1975,7 @@ async def test_add_listener_warns_when_not_using_record_update_listener(caplog): class MyListener: """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -2005,7 +2007,7 @@ async def test_async_updates_iteration_safe(): class OtherListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) @@ -2014,7 +2016,7 @@ def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.Recor class ListenerThatAddsListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def async_update_records(self, zc: "Zeroconf", now: float, records: list[r.RecordUpdate]) -> None: + def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordUpdate]) -> None: """Update multiple records in one shot.""" updated.extend(records) zc.async_add_listener(other, None) diff --git a/tests/test_history.py b/tests/test_history.py index 606362d1..4c9836ce 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -1,5 +1,7 @@ """Unit tests for _history.py.""" +from __future__ import annotations + import zeroconf as r import zeroconf.const as const from zeroconf._history import QuestionHistory diff --git a/tests/test_init.py b/tests/test_init.py index 78fb1e37..a36ff8fd 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf.py""" +from __future__ import annotations + import logging import socket import time diff --git a/tests/test_logger.py b/tests/test_logger.py index ecaf9dd0..aa5b5382 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -1,5 +1,7 @@ """Unit tests for logger.py.""" +from __future__ import annotations + import logging from unittest.mock import call, patch diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 1397c60c..08d7e600 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._protocol""" +from __future__ import annotations + import copy import logging import os diff --git a/tests/test_services.py b/tests/test_services.py index 992070e2..e93174cc 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._services.""" +from __future__ import annotations + import logging import os import socket diff --git a/tests/test_updates.py b/tests/test_updates.py index 1af85736..a057486c 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._updates.""" +from __future__ import annotations + import logging import socket import time @@ -45,7 +47,7 @@ def test_legacy_record_update_listener(): class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" - def update_record(self, zc: "Zeroconf", now: float, record: r.DNSRecord) -> None: + def update_record(self, zc: Zeroconf, now: float, record: r.DNSRecord) -> None: nonlocal updates updates.append(record) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 30920c6a..584a74ec 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -19,3 +19,5 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """ + +from __future__ import annotations diff --git a/tests/utils/test_ipaddress.py b/tests/utils/test_ipaddress.py index c6f63aaf..4379f458 100644 --- a/tests/utils/test_ipaddress.py +++ b/tests/utils/test_ipaddress.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._utils.ipaddress.""" +from __future__ import annotations + from zeroconf import const from zeroconf._dns import DNSAddress from zeroconf._utils import ipaddress diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index 6f2c6b13..1feb7713 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._utils.name.""" +from __future__ import annotations + import socket import pytest diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 17212af2..489a6460 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -1,5 +1,7 @@ """Unit tests for zeroconf._utils.net.""" +from __future__ import annotations + import errno import socket import unittest From bcf4a440a3865a5a9e1021a7f9772fc618694e45 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 31 Jan 2025 13:58:46 -0600 Subject: [PATCH 336/434] chore: fix missed future annotations (#1503) --- src/zeroconf/_listener.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 1980a820..925c689e 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -20,11 +20,13 @@ USA """ +from __future__ import annotations + import asyncio import logging import random from functools import partial -from typing import TYPE_CHECKING, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Tuple, cast from ._logger import QuietLogger, log from ._protocol.incoming import DNSIncoming @@ -68,23 +70,21 @@ class AsyncListener: "zc", ) - def __init__(self, zc: "Zeroconf") -> None: + def __init__(self, zc: Zeroconf) -> None: self.zc = zc self._registry = zc.registry self._record_manager = zc.record_manager self._query_handler = zc.query_handler - self.data: Optional[bytes] = None + self.data: bytes | None = None self.last_time: float = 0 - self.last_message: Optional[DNSIncoming] = None - self.transport: Optional[_WrappedTransport] = None - self.sock_description: Optional[str] = None - self._deferred: Dict[str, List[DNSIncoming]] = {} - self._timers: Dict[str, asyncio.TimerHandle] = {} + self.last_message: DNSIncoming | None = None + self.transport: _WrappedTransport | None = None + self.sock_description: str | None = None + self._deferred: dict[str, list[DNSIncoming]] = {} + self._timers: dict[str, asyncio.TimerHandle] = {} super().__init__() - def datagram_received( - self, data: _bytes, addrs: Union[Tuple[str, int], Tuple[str, int, int, int]] - ) -> None: + def datagram_received(self, data: _bytes, addrs: tuple[str, int] | tuple[str, int, int, int]) -> None: data_len = len(data) debug = DEBUG_ENABLED() @@ -108,7 +108,7 @@ def _process_datagram_at_time( data_len: _int, now: _float, data: _bytes, - addrs: Union[Tuple[str, int], Tuple[str, int, int, int]], + addrs: tuple[str, int] | tuple[str, int, int, int], ) -> None: if ( self.data == data @@ -129,7 +129,7 @@ def _process_datagram_at_time( return if len(addrs) == 2: - v6_flow_scope: Union[Tuple[()], Tuple[int, int]] = () + v6_flow_scope: tuple[()] | tuple[int, int] = () # https://github.com/python/mypy/issues/1178 addr, port = addrs # type: ignore addr_port = addrs @@ -189,7 +189,7 @@ def handle_query_or_defer( addr: _str, port: _int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]], + v6_flow_scope: tuple[()] | tuple[int, int], ) -> None: """Deal with incoming query packets. Provides a response if possible.""" @@ -224,11 +224,11 @@ def _cancel_any_timers_for_addr(self, addr: _str) -> None: def _respond_query( self, - msg: Optional[DNSIncoming], + msg: DNSIncoming | None, addr: _str, port: _int, transport: _WrappedTransport, - v6_flow_scope: Union[Tuple[()], Tuple[int, int]], + v6_flow_scope: tuple[()] | tuple[int, int], ) -> None: """Respond to a query and reassemble any truncated deferred packets.""" self._cancel_any_timers_for_addr(addr) @@ -252,5 +252,5 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: self.transport = wrapped_transport self.sock_description = f"{wrapped_transport.fileno} ({wrapped_transport.sock_name})" - def connection_lost(self, exc: Optional[Exception]) -> None: + def connection_lost(self, exc: Exception | None) -> None: """Handle connection lost.""" From 44457be4571add2f851192db3b37a96d9d27b00e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 31 Jan 2025 14:06:05 -0600 Subject: [PATCH 337/434] feat: eliminate async_timeout dep on python less than 3.11 (#1500) --- poetry.lock | 15 +-------------- pyproject.toml | 1 - src/zeroconf/_core.py | 20 ++++++++++++++------ src/zeroconf/_engine.py | 15 ++++++++------- src/zeroconf/_utils/asyncio.py | 21 +++++++++++---------- src/zeroconf/asyncio.py | 5 +++-- tests/utils/test_asyncio.py | 13 +++++++------ 7 files changed, 44 insertions(+), 46 deletions(-) diff --git a/poetry.lock b/poetry.lock index 14c79f61..962899b2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -12,19 +12,6 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] -[[package]] -name = "async-timeout" -version = "5.0.1" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.11\"" -files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, -] - [[package]] name = "babel" version = "2.16.0" @@ -1140,4 +1127,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "eb91a0dd1c260f37d2579b4793f537f8017f9e1801e2a372849439f5c9132245" +content-hash = "ea903296f015035c594eb8cce08d4dedc716074e33644033938dfdb5f047d72e" diff --git a/pyproject.toml b/pyproject.toml index f5084253..7514d9a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,6 @@ prerelease = true [tool.poetry.dependencies] python = "^3.9" -async-timeout = {version = ">=3.0.0", python = "<3.11"} ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 01e98e8f..3f007c17 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -55,8 +55,8 @@ get_running_loop, run_coro_with_timeout, shutdown_loop, - wait_event_or_timeout, wait_for_future_set_or_timeout, + wait_future_or_timeout, ) from ._utils.name import service_type_name from ._utils.net import ( @@ -203,7 +203,15 @@ def __init__( @property def started(self) -> bool: """Check if the instance has started.""" - return bool(not self.done and self.engine.running_event and self.engine.running_event.is_set()) + running_future = self.engine.running_future + return bool( + not self.done + and running_future + and running_future.done() + and not running_future.cancelled() + and not running_future.exception() + and running_future.result() + ) def start(self) -> None: """Start Zeroconf.""" @@ -227,7 +235,7 @@ def _run_loop() -> None: self._loop_thread.start() loop_thread_ready.wait() - async def async_wait_for_start(self) -> None: + async def async_wait_for_start(self, timeout: float = _STARTUP_TIMEOUT) -> None: """Wait for start up for actions that require a running Zeroconf instance. Throws NotRunningException if the instance is not running or could @@ -235,9 +243,9 @@ async def async_wait_for_start(self) -> None: """ if self.done: # If the instance was shutdown from under us, raise immediately raise NotRunningException - assert self.engine.running_event is not None - await wait_event_or_timeout(self.engine.running_event, timeout=_STARTUP_TIMEOUT) - if not self.engine.running_event.is_set() or self.done: + assert self.engine.running_future is not None + await wait_future_or_timeout(self.engine.running_future, timeout=timeout) + if not self.started: raise NotRunningException @property diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 7b22f788..8c800a33 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -53,7 +53,7 @@ class AsyncEngine: "loop", "protocols", "readers", - "running_event", + "running_future", "senders", "zc", ) @@ -69,7 +69,7 @@ def __init__( self.protocols: list[AsyncListener] = [] self.readers: list[_WrappedTransport] = [] self.senders: list[_WrappedTransport] = [] - self.running_event: asyncio.Event | None = None + self.running_future: asyncio.Future[bool | None] | None = None self._listen_socket = listen_socket self._respond_sockets = respond_sockets self._cleanup_timer: asyncio.TimerHandle | None = None @@ -81,15 +81,15 @@ def setup( ) -> None: """Set up the instance.""" self.loop = loop - self.running_event = asyncio.Event() + self.running_future = loop.create_future() self.loop.create_task(self._async_setup(loop_thread_ready)) async def _async_setup(self, loop_thread_ready: threading.Event | None) -> None: """Set up the instance.""" self._async_schedule_next_cache_cleanup() await self._async_create_endpoints() - assert self.running_event is not None - self.running_event.set() + assert self.running_future is not None + self.running_future.set_result(True) if loop_thread_ready: loop_thread_ready.set() @@ -142,8 +142,9 @@ async def _async_close(self) -> None: def _async_shutdown(self) -> None: """Shutdown transports and sockets.""" - assert self.running_event is not None - self.running_event.clear() + assert self.running_future is not None + assert self.loop is not None + self.running_future = self.loop.create_future() for wrapped_transport in itertools.chain(self.senders, self.readers): wrapped_transport.transport.close() diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index 07b3f422..c92d99d5 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -28,11 +28,6 @@ import sys from typing import Any, Awaitable, Coroutine -if sys.version_info[:2] < (3, 11): - from async_timeout import timeout as asyncio_timeout -else: - from asyncio import timeout as asyncio_timeout # type: ignore[attr-defined] - from .._exceptions import EventLoopBlocked from ..const import _LOADED_SYSTEM_TIMEOUT from .time import millis_to_seconds @@ -70,11 +65,17 @@ async def wait_for_future_set_or_timeout( future_set.discard(future) -async def wait_event_or_timeout(event: asyncio.Event, timeout: float) -> None: - """Wait for an event or timeout.""" - with contextlib.suppress(asyncio.TimeoutError): - async with asyncio_timeout(timeout): - await event.wait() +async def wait_future_or_timeout(future: asyncio.Future[bool | None], timeout: float) -> None: + """Wait for a future or timeout.""" + loop = asyncio.get_running_loop() + handle = loop.call_later(timeout, _set_future_none_if_not_done, future) + try: + await future + except asyncio.CancelledError: + if sys.version_info >= (3, 11) and (task := asyncio.current_task()) and task.cancelling(): + raise + finally: + handle.cancel() async def _async_get_all_tasks(loop: asyncio.AbstractEventLoop) -> set[asyncio.Task]: diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index 2a29a4bb..ce5a43eb 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -29,6 +29,7 @@ from ._core import Zeroconf from ._dns import DNSQuestionType +from ._exceptions import NotRunningException from ._services import ServiceListener from ._services.browser import _ServiceBrowserBase from ._services.info import AsyncServiceInfo, ServiceInfo @@ -227,8 +228,8 @@ async def async_close(self) -> None: """Ends the background threads, and prevent this instance from servicing further queries.""" if not self.zeroconf.done: - with contextlib.suppress(asyncio.TimeoutError): - await asyncio.wait_for(self.zeroconf.async_wait_for_start(), timeout=1) + with contextlib.suppress(NotRunningException): + await self.zeroconf.async_wait_for_start(timeout=1.0) await self.async_remove_all_service_listeners() await self.async_unregister_all_services() await self.zeroconf._async_close() # pylint: disable=protected-access diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index 09137a71..7989a82c 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -45,16 +45,17 @@ def test_get_running_loop_no_loop() -> None: @pytest.mark.asyncio -async def test_wait_event_or_timeout_times_out() -> None: - """Test wait_event_or_timeout will timeout.""" - test_event = asyncio.Event() - await aioutils.wait_event_or_timeout(test_event, 0.1) +async def test_wait_future_or_timeout_times_out() -> None: + """Test wait_future_or_timeout will timeout.""" + loop = asyncio.get_running_loop() + test_future = loop.create_future() + await aioutils.wait_future_or_timeout(test_future, 0.1) - task = asyncio.ensure_future(test_event.wait()) + task = asyncio.ensure_future(test_future) await asyncio.sleep(0.1) async def _async_wait_or_timeout(): - await aioutils.wait_event_or_timeout(test_event, 0.1) + await aioutils.wait_future_or_timeout(test_future, 0.1) # Test high lock contention await asyncio.gather(*[_async_wait_or_timeout() for _ in range(100)]) From 71106950f3da3f5228f91ebe58085e866960140c Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 31 Jan 2025 20:15:51 +0000 Subject: [PATCH 338/434] 0.143.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 174b0d7e..3a3f6993 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.143.0 (2025-01-31) + +### Features + +- Eliminate async_timeout dep on python less than 3.11 + ([#1500](https://github.com/python-zeroconf/python-zeroconf/pull/1500), + [`44457be`](https://github.com/python-zeroconf/python-zeroconf/commit/44457be4571add2f851192db3b37a96d9d27b00e)) + + ## v0.142.0 (2025-01-30) ### Features diff --git a/pyproject.toml b/pyproject.toml index 7514d9a5..72728b5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.142.0" +version = "0.143.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 26f60cde..b85aee74 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.142.0" +__version__ = "0.143.0" __license__ = "LGPL" From dd46325832324f16c07bd297e3dfeaa16f7e8fc3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Feb 2025 16:06:01 -0600 Subject: [PATCH 339/434] chore(ci): bump the github-actions group with 2 updates (#1504) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e43c63b8..76043c6a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,14 +134,14 @@ jobs: # Do a dry run of PSR - name: Test release - uses: python-semantic-release/python-semantic-release@v9.16.1 + uses: python-semantic-release/python-semantic-release@v9.17.0 if: github.ref_name != 'master' with: root_options: --noop # On main branch: actual PSR + upload to PyPI & GitHub - name: Release - uses: python-semantic-release/python-semantic-release@v9.16.1 + uses: python-semantic-release/python-semantic-release@v9.17.0 id: release if: github.ref_name == 'master' with: @@ -237,7 +237,7 @@ jobs: path: dist merge-multiple: true - - uses: pypa/gh-action-pypi-publish@v1.12.3 + - uses: pypa/gh-action-pypi-publish@v1.12.4 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} From e5226161f0b183d3786ecd917efaea8b36a94673 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 14:14:51 -0600 Subject: [PATCH 340/434] chore(pre-commit.ci): pre-commit autoupdate (#1507) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 246493ed..10dee2b0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -39,13 +39,13 @@ repos: - id: pyupgrade args: [--py38-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.3 + rev: v0.9.4 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - id: ruff-format - repo: https://github.com/codespell-project/codespell - rev: v2.4.0 + rev: v2.4.1 hooks: - id: codespell - repo: https://github.com/PyCQA/flake8 From e53f05d5ceab52f60c61b76cf6c24adc0fa78fa6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 14:15:02 -0600 Subject: [PATCH 341/434] chore(deps-dev): bump pytest-asyncio from 0.25.2 to 0.25.3 (#1506) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 61 +++++------------------------------------------------ 1 file changed, 5 insertions(+), 56 deletions(-) diff --git a/poetry.lock b/poetry.lock index 962899b2..09c1dd7e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "alabaster" @@ -6,7 +6,6 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -18,7 +17,6 @@ version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, @@ -33,7 +31,6 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["docs"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -45,7 +42,6 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -125,7 +121,6 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -227,8 +222,6 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev", "docs"] -markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -240,7 +233,6 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -318,7 +310,6 @@ version = "3.0.11" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -groups = ["dev"] files = [ {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, @@ -394,7 +385,6 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -406,8 +396,6 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -422,7 +410,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -437,7 +424,6 @@ version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" optional = false python-versions = "*" -groups = ["main"] files = [ {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, @@ -449,7 +435,6 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["docs"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -461,8 +446,6 @@ version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -486,7 +469,6 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -498,7 +480,6 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["docs"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -516,7 +497,6 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -541,7 +521,6 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -612,7 +591,6 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -624,7 +602,6 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -636,7 +613,6 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -652,7 +628,6 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -664,7 +639,6 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -679,7 +653,6 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -698,14 +671,13 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.25.2" +version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, - {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, ] [package.dependencies] @@ -721,7 +693,6 @@ version = "3.1.2" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, @@ -754,7 +725,6 @@ version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -773,7 +743,6 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -788,7 +757,6 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -810,7 +778,6 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" -groups = ["dev"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -830,7 +797,6 @@ version = "75.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, @@ -851,7 +817,6 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" -groups = ["docs"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -863,7 +828,6 @@ version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -900,7 +864,6 @@ version = "3.0.2" description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" -groups = ["docs"] files = [ {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, @@ -920,7 +883,6 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -937,7 +899,6 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -954,7 +915,6 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -971,7 +931,6 @@ version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" -groups = ["docs"] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -986,7 +945,6 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" -groups = ["docs"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -1001,7 +959,6 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -1018,7 +975,6 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -1035,7 +991,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -1070,7 +1025,6 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] -markers = {dev = "python_full_version <= \"3.11.0a6\"", docs = "python_version < \"3.11\""} [[package]] name = "typing-extensions" @@ -1078,8 +1032,6 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.11\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1091,7 +1043,6 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -1109,8 +1060,6 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] -markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -1125,6 +1074,6 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", type = ["pytest-mypy"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = "^3.9" content-hash = "ea903296f015035c594eb8cce08d4dedc716074e33644033938dfdb5f047d72e" From e9479237cb48e9d25ff56ce2906713b14d16d364 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 14:15:09 -0600 Subject: [PATCH 342/434] chore(deps-dev): bump pytest-codspeed from 3.1.2 to 3.2.0 (#1505) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 09c1dd7e..258b28f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -689,23 +689,23 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-codspeed" -version = "3.1.2" +version = "3.2.0" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, - {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, - {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:109f9f4dd1088019c3b3f887d003b7d65f98a7736ca1d457884f5aa293e8e81c"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f69a03b52c9bb041aec1b8ee54b7b6c37a6d0a948786effa4c71157765b6da"}, + {file = "pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39"}, + {file = "pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155"}, ] [package.dependencies] From ffc902098c97ff36d0da3e7bd83ac342272e3e71 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 3 Feb 2025 14:37:16 -0600 Subject: [PATCH 343/434] chore: migrate to Python 3.13 for benchmarks (#1508) --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76043c6a..937166fe 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -95,10 +95,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Setup Python 3.12 + - name: Setup Python 3.13 uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: 3.13 - uses: snok/install-poetry@v1.4.1 - name: Install Dependencies run: | From 3dda6d57e005e97a4b6d7b72e6249ff8c7b9aab0 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Wed, 5 Feb 2025 00:57:08 +0100 Subject: [PATCH 344/434] chore: limit `prettier` to specific files (#1509) --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 10dee2b0..76171d45 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,6 +33,7 @@ repos: hooks: - id: prettier args: ["--tab-width", "2"] + files: ".(css|html|js|json|md|toml|yaml)$" - repo: https://github.com/asottile/pyupgrade rev: v3.19.1 hooks: From 468e00cc6a2b8cf6104d18a857905b9faf3a38b6 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Sun, 9 Feb 2025 17:58:23 +0100 Subject: [PATCH 345/434] chore: update to modern typing (#1511) --- .pre-commit-config.yaml | 8 +++--- pyproject.toml | 2 +- src/zeroconf/_cache.py | 5 ++-- src/zeroconf/_core.py | 2 +- src/zeroconf/_handlers/answers.py | 3 +-- src/zeroconf/_listener.py | 4 +-- src/zeroconf/_protocol/outgoing.py | 3 ++- src/zeroconf/_services/browser.py | 13 ++++----- src/zeroconf/_services/info.py | 10 +++---- src/zeroconf/_utils/asyncio.py | 3 ++- src/zeroconf/_utils/net.py | 11 ++++---- src/zeroconf/asyncio.py | 3 ++- tests/conftest.py | 8 +++--- tests/test_init.py | 7 ++--- tests/test_listener.py | 5 ++-- tests/test_logger.py | 42 +++++++++++++++++------------- tests/utils/test_net.py | 14 ++++++---- 17 files changed, 79 insertions(+), 64 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 76171d45..8ea453bc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.1.1 + rev: v4.2.1 hooks: - id: commitizen stages: [commit-msg] @@ -38,9 +38,9 @@ repos: rev: v3.19.1 hooks: - id: pyupgrade - args: [--py38-plus] + args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.4 + rev: v0.9.5 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -54,7 +54,7 @@ repos: hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.14.1 + rev: v1.15.0 hooks: - id: mypy additional_dependencies: [] diff --git a/pyproject.toml b/pyproject.toml index 72728b5b..c9a28a93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,7 +86,7 @@ sphinx = "^7.4.7 || ^8.1.3" sphinx-rtd-theme = "^3.0.2" [tool.ruff] -target-version = "py38" +target-version = "py39" line-length = 110 [tool.ruff.lint] diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index 5ac43f30..c8e2686e 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -22,8 +22,9 @@ from __future__ import annotations +from collections.abc import Iterable from heapq import heapify, heappop, heappush -from typing import Dict, Iterable, Union, cast +from typing import Union, cast from ._dns import ( DNSAddress, @@ -40,7 +41,7 @@ _UNIQUE_RECORD_TYPES = (DNSAddress, DNSHinfo, DNSPointer, DNSText, DNSService) _UniqueRecordsType = Union[DNSAddress, DNSHinfo, DNSPointer, DNSText, DNSService] -_DNSRecordCacheType = Dict[str, Dict[DNSRecord, DNSRecord]] +_DNSRecordCacheType = dict[str, dict[DNSRecord, DNSRecord]] _DNSRecord = DNSRecord _str = str _float = float diff --git a/src/zeroconf/_core.py b/src/zeroconf/_core.py index 3f007c17..5e3a7f46 100644 --- a/src/zeroconf/_core.py +++ b/src/zeroconf/_core.py @@ -26,8 +26,8 @@ import logging import sys import threading +from collections.abc import Awaitable from types import TracebackType -from typing import Awaitable from ._cache import DNSCache from ._dns import DNSQuestion, DNSQuestionType diff --git a/src/zeroconf/_handlers/answers.py b/src/zeroconf/_handlers/answers.py index ec53eb84..07b0a65a 100644 --- a/src/zeroconf/_handlers/answers.py +++ b/src/zeroconf/_handlers/answers.py @@ -23,13 +23,12 @@ from __future__ import annotations from operator import attrgetter -from typing import Dict, Set from .._dns import DNSQuestion, DNSRecord from .._protocol.outgoing import DNSOutgoing from ..const import _FLAGS_AA, _FLAGS_QR_RESPONSE -_AnswerWithAdditionalsType = Dict[DNSRecord, Set[DNSRecord]] +_AnswerWithAdditionalsType = dict[DNSRecord, set[DNSRecord]] int_ = int diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 925c689e..406273e9 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -26,7 +26,7 @@ import logging import random from functools import partial -from typing import TYPE_CHECKING, Tuple, cast +from typing import TYPE_CHECKING, cast from ._logger import QuietLogger, log from ._protocol.incoming import DNSIncoming @@ -134,7 +134,7 @@ def _process_datagram_at_time( addr, port = addrs # type: ignore addr_port = addrs if TYPE_CHECKING: - addr_port = cast(Tuple[str, int], addr_port) + addr_port = cast(tuple[str, int], addr_port) scope = None else: # https://github.com/python/mypy/issues/1178 diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index f5d09821..6837e39a 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -24,8 +24,9 @@ import enum import logging +from collections.abc import Sequence from struct import Struct -from typing import TYPE_CHECKING, Sequence +from typing import TYPE_CHECKING from .._dns import DNSPointer, DNSQuestion, DNSRecord from .._exceptions import NamePartTooLongException diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index c2ab115b..ab8c050d 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -29,16 +29,13 @@ import threading import time import warnings +from collections.abc import Iterable from functools import partial from types import TracebackType # used in type hints from typing import ( TYPE_CHECKING, Any, Callable, - Dict, - Iterable, - List, - Set, cast, ) @@ -96,7 +93,7 @@ bool_ = bool str_ = str -_QuestionWithKnownAnswers = Dict[DNSQuestion, Set[DNSPointer]] +_QuestionWithKnownAnswers = dict[DNSQuestion, set[DNSPointer]] heappop = heapq.heappop heappush = heapq.heappush @@ -282,7 +279,7 @@ def generate_service_query( log.debug("Asking %s was suppressed by the question history", question) continue if TYPE_CHECKING: - pointer_known_answers = cast(Set[DNSPointer], known_answers) + pointer_known_answers = cast(set[DNSPointer], known_answers) else: pointer_known_answers = known_answers questions_with_known_answers[question] = pointer_known_answers @@ -618,10 +615,10 @@ def __init__( self._query_sender_task: asyncio.Task | None = None if hasattr(handlers, "add_service"): - listener = cast("ServiceListener", handlers) + listener = cast(ServiceListener, handlers) handlers = None - handlers = cast(List[Callable[..., None]], handlers or []) + handlers = cast(list[Callable[..., None]], handlers or []) if listener: handlers.append(_service_state_changed_from_listener(listener)) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 67777459..b22fc805 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -24,7 +24,7 @@ import asyncio import random -from typing import TYPE_CHECKING, Dict, List, Optional, cast +from typing import TYPE_CHECKING, cast from .._cache import DNSCache from .._dns import ( @@ -395,7 +395,7 @@ def _set_properties(self, properties: dict[str | bytes, str | bytes | None]) -> # as-is, without decoding them, otherwise calling # self.properties will lazy decode them, which is expensive. if TYPE_CHECKING: - self._properties = cast("Dict[bytes, Optional[bytes]]", properties) + self._properties = cast(dict[bytes, bytes | None], properties) else: self._properties = properties self.text = result @@ -462,7 +462,7 @@ def _set_ipv6_addresses_from_cache(self, zc: Zeroconf, now: float_) -> None: """Set IPv6 addresses from the cache.""" if TYPE_CHECKING: self._ipv6_addresses = cast( - "List[ZeroconfIPv6Address]", + list[ZeroconfIPv6Address], self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_AAAA), ) else: @@ -472,7 +472,7 @@ def _set_ipv4_addresses_from_cache(self, zc: Zeroconf, now: float_) -> None: """Set IPv4 addresses from the cache.""" if TYPE_CHECKING: self._ipv4_addresses = cast( - "List[ZeroconfIPv4Address]", + list[ZeroconfIPv4Address], self._get_ip_addresses_from_cache_lifo(zc, now, _TYPE_A), ) else: @@ -724,7 +724,7 @@ def _get_address_records_from_cache_by_type(self, zc: Zeroconf, _type: int_) -> cache = zc.cache if TYPE_CHECKING: records = cast( - "List[DNSAddress]", + list[DNSAddress], cache.get_all_by_details(self.server_key, _type, _CLASS_IN), ) else: diff --git a/src/zeroconf/_utils/asyncio.py b/src/zeroconf/_utils/asyncio.py index c92d99d5..86090601 100644 --- a/src/zeroconf/_utils/asyncio.py +++ b/src/zeroconf/_utils/asyncio.py @@ -26,7 +26,8 @@ import concurrent.futures import contextlib import sys -from typing import Any, Awaitable, Coroutine +from collections.abc import Awaitable, Coroutine +from typing import Any from .._exceptions import EventLoopBlocked from ..const import _LOADED_SYSTEM_TIMEOUT diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 3cc4336b..3321211f 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -28,7 +28,8 @@ import socket import struct import sys -from typing import Any, Sequence, Tuple, Union, cast +from collections.abc import Sequence +from typing import Any, Union, cast import ifaddr @@ -42,7 +43,7 @@ class InterfaceChoice(enum.Enum): All = 2 -InterfacesType = Union[Sequence[Union[str, int, Tuple[Tuple[str, int, int], int]]], InterfaceChoice] +InterfacesType = Union[Sequence[Union[str, int, tuple[tuple[str, int, int], int]]], InterfaceChoice] @enum.unique @@ -93,7 +94,7 @@ def ip6_to_address_and_index(adapters: list[Any], ip: str) -> tuple[tuple[str, i # IPv6 addresses are represented as tuples if isinstance(adapter_ip.ip, tuple) and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr: return ( - cast(Tuple[str, int, int], adapter_ip.ip), + cast(tuple[str, int, int], adapter_ip.ip), cast(int, adapter.index), ) @@ -106,7 +107,7 @@ def interface_index_to_ip6_address(adapters: list[Any], index: int) -> tuple[str for adapter_ip in adapter.ips: # IPv6 addresses are represented as tuples if isinstance(adapter_ip.ip, tuple): - return cast(Tuple[str, int, int], adapter_ip.ip) + return cast(tuple[str, int, int], adapter_ip.ip) raise RuntimeError(f"No adapter found for index {index}") @@ -340,7 +341,7 @@ def new_respond_socket( respond_socket = new_socket( ip_version=(IPVersion.V6Only if is_v6 else IPVersion.V4Only), apple_p2p=apple_p2p, - bind_addr=cast(Tuple[Tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), + bind_addr=cast(tuple[tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), ) if not respond_socket: return None diff --git a/src/zeroconf/asyncio.py b/src/zeroconf/asyncio.py index ce5a43eb..a0f4a99d 100644 --- a/src/zeroconf/asyncio.py +++ b/src/zeroconf/asyncio.py @@ -24,8 +24,9 @@ import asyncio import contextlib +from collections.abc import Awaitable from types import TracebackType # used in type hints -from typing import Awaitable, Callable +from typing import Callable from ._core import Zeroconf from ._dns import DNSQuestionType diff --git a/tests/conftest.py b/tests/conftest.py index 1f323785..531c810b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,9 +23,11 @@ def verify_threads_ended(): @pytest.fixture def run_isolated(): """Change the mDNS port to run the test in isolation.""" - with patch.object(query_handler, "_MDNS_PORT", 5454), patch.object( - _core, "_MDNS_PORT", 5454 - ), patch.object(const, "_MDNS_PORT", 5454): + with ( + patch.object(query_handler, "_MDNS_PORT", 5454), + patch.object(_core, "_MDNS_PORT", 5454), + patch.object(const, "_MDNS_PORT", 5454), + ): yield diff --git a/tests/test_init.py b/tests/test_init.py index a36ff8fd..5ccb9ef6 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -89,9 +89,10 @@ def test_large_packet_exception_log_handling(self): # instantiate a zeroconf instance zc = Zeroconf(interfaces=["127.0.0.1"]) - with patch("zeroconf._logger.log.warning") as mocked_log_warn, patch( - "zeroconf._logger.log.debug" - ) as mocked_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mocked_log_warn, + patch("zeroconf._logger.log.debug") as mocked_log_debug, + ): # now that we have a long packet in our possession, let's verify the # exception handling. out = r.DNSOutgoing(const._FLAGS_QR_RESPONSE | const._FLAGS_AA) diff --git a/tests/test_listener.py b/tests/test_listener.py index a55fc143..4897eabe 100644 --- a/tests/test_listener.py +++ b/tests/test_listener.py @@ -59,8 +59,9 @@ def test_guard_against_oversized_packets(): try: # We are patching to generate an oversized packet - with patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), patch.object( - outgoing, "_MAX_MSG_TYPICAL", 100000 + with ( + patch.object(outgoing, "_MAX_MSG_ABSOLUTE", 100000), + patch.object(outgoing, "_MAX_MSG_TYPICAL", 100000), ): over_sized_packet = generated.packets()[0] assert len(over_sized_packet) > const._MAX_MSG_ABSOLUTE diff --git a/tests/test_logger.py b/tests/test_logger.py index aa5b5382..4e09aa3b 100644 --- a/tests/test_logger.py +++ b/tests/test_logger.py @@ -27,17 +27,19 @@ def test_log_warning_once(): """Test we only log with warning level once.""" QuietLogger._seen_logs = {} quiet_logger = QuietLogger() - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_warning_once("the warning") assert mock_log_warning.mock_calls assert not mock_log_debug.mock_calls - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_warning_once("the warning") assert not mock_log_warning.mock_calls @@ -48,17 +50,19 @@ def test_log_exception_warning(): """Test we only log with warning level once.""" QuietLogger._seen_logs = {} quiet_logger = QuietLogger() - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_exception_warning("the exception warning") assert mock_log_warning.mock_calls assert not mock_log_debug.mock_calls - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_exception_warning("the exception warning") assert not mock_log_warning.mock_calls @@ -85,17 +89,19 @@ def test_log_exception_once(): QuietLogger._seen_logs = {} quiet_logger = QuietLogger() exc = Exception() - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_exception_once(exc, "the exceptional exception warning") assert mock_log_warning.mock_calls assert not mock_log_debug.mock_calls - with patch("zeroconf._logger.log.warning") as mock_log_warning, patch( - "zeroconf._logger.log.debug" - ) as mock_log_debug: + with ( + patch("zeroconf._logger.log.warning") as mock_log_warning, + patch("zeroconf._logger.log.debug") as mock_log_debug, + ): quiet_logger.log_exception_once(exc, "the exceptional exception warning") assert not mock_log_warning.mock_calls diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 489a6460..f7e51c86 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -82,9 +82,11 @@ def test_ip6_addresses_to_indexes(): def test_normalize_interface_choice_errors(): """Test we generate exception on invalid input.""" - with patch("zeroconf._utils.net.get_all_addresses", return_value=[]), patch( - "zeroconf._utils.net.get_all_addresses_v6", return_value=[] - ), pytest.raises(RuntimeError): + with ( + patch("zeroconf._utils.net.get_all_addresses", return_value=[]), + patch("zeroconf._utils.net.get_all_addresses_v6", return_value=[]), + pytest.raises(RuntimeError), + ): netutils.normalize_interface_choice(r.InterfaceChoice.All) with pytest.raises(TypeError): @@ -128,8 +130,10 @@ def _log_error(*args): errors_logged.append(args) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - with pytest.raises(OSError), patch.object(netutils.log, "error", _log_error), patch( - "socket.socket.setsockopt", side_effect=OSError + with ( + pytest.raises(OSError), + patch.object(netutils.log, "error", _log_error), + patch("socket.socket.setsockopt", side_effect=OSError), ): netutils.disable_ipv6_only_or_raise(sock) From d54da2dca6b75b2d5cf37b0f52290bac3d2b4edf Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 10 Feb 2025 17:06:12 +0100 Subject: [PATCH 346/434] chore: ignore c files generated by cython (#1512) --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 0af9ce1e..430fbec9 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ docs/_build/ .vscode /dist/ /zeroconf.egg-info/ +/src/**/*.c From c9aa89911a79dc661df3d887875fde57b34132f7 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 10 Feb 2025 17:20:46 +0100 Subject: [PATCH 347/434] chore: set cython max line length to 110 (#1513) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c9a28a93..0bb177ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -198,5 +198,5 @@ build-backend = "poetry.core.masonry.api" ignore-words-list = ["additionals", "HASS"] [tool.cython-lint] -max-line-length = 88 +max-line-length = 110 ignore = ['E501'] # too many to fix right now From e429d6661cd8a486575e8d7f8ac02bfc006c3e7e Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Mon, 10 Feb 2025 19:00:41 +0100 Subject: [PATCH 348/434] chore: add typing, enable additional mypy checks (#1514) --- .pre-commit-config.yaml | 2 +- pyproject.toml | 10 ++++++++-- src/zeroconf/_dns.py | 18 +++++++++--------- src/zeroconf/_listener.py | 4 ++-- src/zeroconf/_utils/net.py | 8 ++++---- 5 files changed, 24 insertions(+), 18 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ea453bc..afeeffbf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,7 +57,7 @@ repos: rev: v1.15.0 hooks: - id: mypy - additional_dependencies: [] + additional_dependencies: [ifaddr] - repo: https://github.com/MarcoGorelli/cython-lint rev: v0.16.6 hooks: diff --git a/pyproject.toml b/pyproject.toml index 0bb177ba..3bcd954a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -162,15 +162,21 @@ profile = "black" known_first_party = ["zeroconf", "tests"] [tool.mypy] +warn_unused_configs = true check_untyped_defs = true disallow_any_generics = false # turn this on when we drop 3.7/3.8 support disallow_incomplete_defs = true disallow_untyped_defs = true +warn_incomplete_stub = true mypy_path = "src/" -no_implicit_optional = true show_error_codes = true +warn_redundant_casts = false # Activate for cleanup. +warn_return_any = true warn_unreachable = true -warn_unused_ignores = false +warn_unused_ignores = false # Does not always work properly, activate for cleanup. +extra_checks = true +strict_equality = true +strict_bytes = true # Will be true by default with mypy v2 release. exclude = [ 'docs/*', 'bench/*', diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index bc0a3948..591eb018 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -79,7 +79,7 @@ def _fast_init_entry(self, name: str, type_: _int, class_: _int) -> None: self.class_ = class_ & _CLASS_MASK self.unique = (class_ & _CLASS_UNIQUE) != 0 - def _dns_entry_matches(self, other) -> bool: # type: ignore[no-untyped-def] + def _dns_entry_matches(self, other: DNSEntry) -> bool: return self.key == other.key and self.type == other.type and self.class_ == other.class_ def __eq__(self, other: Any) -> bool: @@ -135,7 +135,7 @@ def __eq__(self, other: Any) -> bool: @property def max_size(self) -> int: """Maximum size of the question in the packet.""" - return len(self.name.encode("utf-8")) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT # type # class + return len(self.name.encode("utf-8")) + _LEN_BYTE + _LEN_SHORT + _LEN_SHORT @property def unicast(self) -> bool: @@ -199,7 +199,7 @@ def suppressed_by(self, msg: DNSIncoming) -> bool: return True return False - def _suppressed_by_answer(self, other) -> bool: # type: ignore[no-untyped-def] + def _suppressed_by_answer(self, other: DNSRecord) -> bool: """Returns true if another record has same name, type and class, and if its TTL is at least half of this record's.""" return self == other and other.ttl > (self.ttl / 2) @@ -285,7 +285,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on address""" return isinstance(other, DNSAddress) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSAddress) -> bool: return ( self.address == other.address and self.scope_id == other.scope_id @@ -344,7 +344,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on cpu and os.""" return isinstance(other, DNSHinfo) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSHinfo) -> bool: """Tests equality on cpu and os.""" return self.cpu == other.cpu and self.os == other.os and self._dns_entry_matches(other) @@ -399,7 +399,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on alias.""" return isinstance(other, DNSPointer) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSPointer) -> bool: """Tests equality on alias.""" return self.alias_key == other.alias_key and self._dns_entry_matches(other) @@ -447,7 +447,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on text.""" return isinstance(other, DNSText) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSText) -> bool: """Tests equality on text.""" return self.text == other.text and self._dns_entry_matches(other) @@ -510,7 +510,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on priority, weight, port and server""" return isinstance(other, DNSService) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSService) -> bool: """Tests equality on priority, weight, port and server.""" return ( self.priority == other.priority @@ -585,7 +585,7 @@ def __eq__(self, other: Any) -> bool: """Tests equality on next_name and rdtypes.""" return isinstance(other, DNSNsec) and self._eq(other) - def _eq(self, other) -> bool: # type: ignore[no-untyped-def] + def _eq(self, other: DNSNsec) -> bool: """Tests equality on next_name and rdtypes.""" return ( self.next_name == other.next_name diff --git a/src/zeroconf/_listener.py b/src/zeroconf/_listener.py index 406273e9..ed503169 100644 --- a/src/zeroconf/_listener.py +++ b/src/zeroconf/_listener.py @@ -131,14 +131,14 @@ def _process_datagram_at_time( if len(addrs) == 2: v6_flow_scope: tuple[()] | tuple[int, int] = () # https://github.com/python/mypy/issues/1178 - addr, port = addrs # type: ignore + addr, port = addrs addr_port = addrs if TYPE_CHECKING: addr_port = cast(tuple[str, int], addr_port) scope = None else: # https://github.com/python/mypy/issues/1178 - addr, port, flow, scope = addrs # type: ignore + addr, port, flow, scope = addrs if debug: # pragma: no branch log.debug("IPv6 scope_id %d associated to the receiving interface", scope) v6_flow_scope = (flow, scope) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 3321211f..62033ad5 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -74,14 +74,14 @@ def _encode_address(address: str) -> bytes: def get_all_addresses() -> list[str]: - return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) + return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) # type: ignore[misc] def get_all_addresses_v6() -> list[tuple[tuple[str, int, int], int]]: # IPv6 multicast uses positive indexes for interfaces # TODO: What about multi-address interfaces? return list( - {(addr.ip, iface.index) for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv6} + {(addr.ip, iface.index) for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv6} # type: ignore[misc] ) @@ -127,9 +127,9 @@ def ip6_addresses_to_indexes( for iface in interfaces: if isinstance(iface, int): - result.append((interface_index_to_ip6_address(adapters, iface), iface)) + result.append((interface_index_to_ip6_address(adapters, iface), iface)) # type: ignore[arg-type] elif isinstance(iface, str) and ipaddress.ip_address(iface).version == 6: - result.append(ip6_to_address_and_index(adapters, iface)) + result.append(ip6_to_address_and_index(adapters, iface)) # type: ignore[arg-type] return result From 00cd7368f542420658156a5e94523c19bf5031a9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 12:00:47 -0600 Subject: [PATCH 349/434] chore(pre-commit.ci): pre-commit autoupdate (#1515) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index afeeffbf..5c4754d8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.5 + rev: v0.9.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From f377d5cd08d724282c8487785163b466f3971344 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 11 Feb 2025 19:19:10 -0600 Subject: [PATCH 350/434] fix: make no buffer space available when adding multicast memberships forgiving (#1516) --- src/zeroconf/_utils/net.py | 14 ++++++++++++++ tests/utils/test_net.py | 23 ++++++++++++++++++++++- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 62033ad5..78f37641 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -302,6 +302,20 @@ def add_multicast_member( interface, ) return False + if _errno == errno.ENOBUFS: + # https://github.com/python-zeroconf/python-zeroconf/issues/1510 + if not is_v6 and sys.platform.startswith("linux"): + log.warning( + "No buffer space available when adding %s to multicast group, " + "try increasing `net.ipv4.igmp_max_memberships` to `1024` in sysctl.conf", + interface, + ) + else: + log.warning( + "No buffer space available when adding %s to multicast group.", + interface, + ) + return False if _errno == errno.EADDRNOTAVAIL: log.info( "Address not available when adding %s to multicast " diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index f7e51c86..a770e1ce 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -4,6 +4,7 @@ import errno import socket +import sys import unittest from unittest.mock import MagicMock, Mock, patch @@ -181,7 +182,7 @@ def test_set_mdns_port_socket_options_for_ip_version(): netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) -def test_add_multicast_member(): +def test_add_multicast_member(caplog: pytest.LogCaptureFixture) -> None: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) interface = "127.0.0.1" @@ -221,6 +222,26 @@ def test_add_multicast_member(): with patch("socket.socket.setsockopt"): assert netutils.add_multicast_member(sock, interface) is True + # Ran out of IGMP memberships is forgiving and logs about igmp_max_memberships on linux + caplog.clear() + with ( + patch.object(sys, "platform", "linux"), + patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available")), + ): + assert netutils.add_multicast_member(sock, interface) is False + assert "No buffer space available" in caplog.text + assert "net.ipv4.igmp_max_memberships" in caplog.text + + # Ran out of IGMP memberships is forgiving and logs + caplog.clear() + with ( + patch.object(sys, "platform", "darwin"), + patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available")), + ): + assert netutils.add_multicast_member(sock, interface) is False + assert "No buffer space available" in caplog.text + assert "net.ipv4.igmp_max_memberships" not in caplog.text + def test_bind_raises_skips_address(): """Test bind failing in new_socket returns None on EADDRNOTAVAIL.""" From a15a1e01e1485a19104ff9bfcdcb7e962396ebeb Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 12 Feb 2025 01:28:59 +0000 Subject: [PATCH 351/434] 0.143.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a3f6993..031d03da 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.143.1 (2025-02-12) + +### Bug Fixes + +- Make no buffer space available when adding multicast memberships forgiving + ([#1516](https://github.com/python-zeroconf/python-zeroconf/pull/1516), + [`f377d5c`](https://github.com/python-zeroconf/python-zeroconf/commit/f377d5cd08d724282c8487785163b466f3971344)) + + ## v0.143.0 (2025-01-31) ### Features diff --git a/pyproject.toml b/pyproject.toml index 3bcd954a..7d5ccea0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.143.0" +version = "0.143.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b85aee74..bc5d62ed 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.143.0" +__version__ = "0.143.1" __license__ = "LGPL" From 39887b80328d616e8e6f6ca9d08aecc06f7b0711 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 11 Feb 2025 19:32:34 -0600 Subject: [PATCH 352/434] feat: add armv7l wheel builds (#1517) --- .github/workflows/ci.yml | 54 ++++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 937166fe..357facaa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,7 +161,7 @@ jobs: needs: [release] if: needs.release.outputs.released == 'true' - name: Build wheels on ${{ matrix.os }} (${{ matrix.musl }}) + name: Build wheels on ${{ matrix.os }} (${{ matrix.musl }}) [${{ matrix.qemu }}] runs-on: ${{ matrix.os }} strategy: matrix: @@ -173,27 +173,47 @@ jobs: macos-13, macos-latest, ] - musl: ["", "musllinux"] - exclude: - - os: windows-2019 - musl: "musllinux" - - os: macos-13 - musl: "musllinux" - - os: macos-latest - musl: "musllinux" - + qemu: [''] + musl: [""] + include: + - os: ubuntu-latest + qemu: armv7l + musl: "" + - os: ubuntu-latest + qemu: armv7l + musl: musllinux + - os: ubuntu-latest + musl: musllinux + - os: ubuntu-24.04-arm + musl: musllinux steps: - - uses: actions/checkout@v4 + - name: Checkout + uses: actions/checkout@v4 with: fetch-depth: 0 ref: "master" - # Used to host cibuildwheel - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.11" - + python-version: "3.12" + - name: Set up QEMU + if: ${{ matrix.qemu }} + uses: docker/setup-qemu-action@v3 + with: + platforms: all + # This should be temporary + # xref https://github.com/docker/setup-qemu-action/issues/188 + # xref https://github.com/tonistiigi/binfmt/issues/215 + image: tonistiigi/binfmt:qemu-v8.1.5 + id: qemu + - name: Prepare emulation + run: | + if [[ -n "${{ matrix.qemu }}" ]]; then + # Build emulated architectures only if QEMU is set, + # use default "auto" otherwise + echo "CIBW_ARCHS_LINUX=${{ matrix.qemu }}" >> $GITHUB_ENV + fi - name: Install python-semantic-release run: pipx install python-semantic-release==7.34.6 @@ -208,20 +228,18 @@ jobs: ref: "${{ steps.release_tag.outputs.newest_release_tag }}" fetch-depth: 0 - - name: Build wheels ${{ matrix.musl }} + - name: Build wheels ${{ matrix.musl }} (${{ matrix.qemu }}) uses: pypa/cibuildwheel@v2.22.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc - CIBW_ARCHS_LINUX: ${{ matrix.os == 'ubuntu-24.04-arm' && 'aarch64' || 'auto' }} - CIBW_BUILD_VERBOSITY: 3 REQUIRE_CYTHON: 1 - uses: actions/upload-artifact@v4 with: path: ./wheelhouse/*.whl - name: wheels-${{ matrix.os }}-${{ matrix.musl }} + name: wheels-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu }} upload_pypi: needs: [build_wheels] From bd271f30441b969a9db0bdf9cad64325b6e6b33e Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 12 Feb 2025 01:47:39 +0000 Subject: [PATCH 353/434] 0.144.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 031d03da..e9500c2b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,14 @@ # CHANGELOG +## v0.144.0 (2025-02-12) + +### Features + +- Add armv7l wheel builds ([#1517](https://github.com/python-zeroconf/python-zeroconf/pull/1517), + [`39887b8`](https://github.com/python-zeroconf/python-zeroconf/commit/39887b80328d616e8e6f6ca9d08aecc06f7b0711)) + + ## v0.143.1 (2025-02-12) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 7d5ccea0..66ce717b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.143.1" +version = "0.144.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index bc5d62ed..9f7764d2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.143.1" +__version__ = "0.144.0" __license__ = "LGPL" From e7adac9c59fc4d0c4822c6097a4daee3d68eb4de Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 11 Feb 2025 19:54:14 -0600 Subject: [PATCH 354/434] fix: wheel builds failing after adding armv7l builds (#1518) --- .github/workflows/ci.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 357facaa..f8eaa7ec 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -208,6 +208,7 @@ jobs: image: tonistiigi/binfmt:qemu-v8.1.5 id: qemu - name: Prepare emulation + if: ${{ matrix.qemu }} run: | if [[ -n "${{ matrix.qemu }}" ]]; then # Build emulated architectures only if QEMU is set, @@ -232,7 +233,7 @@ jobs: uses: pypa/cibuildwheel@v2.22.0 # to supply options, put them in 'env', like: env: - CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* *p39-*_aarch64 *p310-*_aarch64 pp*_aarch64 ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} + CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc REQUIRE_CYTHON: 1 From dba44e4cbc853c1fd99f7fad274afc1cba99363f Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 12 Feb 2025 02:04:11 +0000 Subject: [PATCH 355/434] 0.144.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e9500c2b..6c8f248c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.144.1 (2025-02-12) + +### Bug Fixes + +- Wheel builds failing after adding armv7l builds + ([#1518](https://github.com/python-zeroconf/python-zeroconf/pull/1518), + [`e7adac9`](https://github.com/python-zeroconf/python-zeroconf/commit/e7adac9c59fc4d0c4822c6097a4daee3d68eb4de)) + + ## v0.144.0 (2025-02-12) ### Features diff --git a/pyproject.toml b/pyproject.toml index 66ce717b..8ec6a85b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.144.0" +version = "0.144.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 9f7764d2..6efc9a5c 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.144.0" +__version__ = "0.144.1" __license__ = "LGPL" From cf44289c33baee045ac84b40b218e4a92589a30f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 12 Feb 2025 22:24:56 -0600 Subject: [PATCH 356/434] chore: cleanup typing in net utils (#1521) --- src/zeroconf/_utils/net.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index 78f37641..fd6e0dff 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -85,29 +85,30 @@ def get_all_addresses_v6() -> list[tuple[tuple[str, int, int], int]]: ) -def ip6_to_address_and_index(adapters: list[Any], ip: str) -> tuple[tuple[str, int, int], int]: +def ip6_to_address_and_index(adapters: list[ifaddr.Adapter], ip: str) -> tuple[tuple[str, int, int], int]: if "%" in ip: ip = ip[: ip.index("%")] # Strip scope_id. ipaddr = ipaddress.ip_address(ip) for adapter in adapters: for adapter_ip in adapter.ips: # IPv6 addresses are represented as tuples - if isinstance(adapter_ip.ip, tuple) and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr: - return ( - cast(tuple[str, int, int], adapter_ip.ip), - cast(int, adapter.index), - ) + if ( + adapter.index is not None + and isinstance(adapter_ip.ip, tuple) + and ipaddress.ip_address(adapter_ip.ip[0]) == ipaddr + ): + return (adapter_ip.ip, adapter.index) raise RuntimeError(f"No adapter found for IP address {ip}") -def interface_index_to_ip6_address(adapters: list[Any], index: int) -> tuple[str, int, int]: +def interface_index_to_ip6_address(adapters: list[ifaddr.Adapter], index: int) -> tuple[str, int, int]: for adapter in adapters: if adapter.index == index: for adapter_ip in adapter.ips: # IPv6 addresses are represented as tuples if isinstance(adapter_ip.ip, tuple): - return cast(tuple[str, int, int], adapter_ip.ip) + return adapter_ip.ip raise RuntimeError(f"No adapter found for index {index}") @@ -414,8 +415,7 @@ def create_sockets( return listen_socket, respond_sockets -def get_errno(e: Exception) -> int: - assert isinstance(e, socket.error) +def get_errno(e: OSError) -> int: return cast(int, e.args[0]) From 0ca624da07b3f5ceb158c1afd51318c3ed017ab8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 13 Feb 2025 06:01:56 -0600 Subject: [PATCH 357/434] chore: enable some more ruff rules (#1522) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- build_ext.py | 1 - pyproject.toml | 77 ++++++++++++++++++++++++++++++ src/zeroconf/_history.py | 4 +- src/zeroconf/_protocol/incoming.py | 2 +- src/zeroconf/_protocol/outgoing.py | 2 +- src/zeroconf/_record_update.py | 2 +- src/zeroconf/_services/__init__.py | 6 +-- src/zeroconf/_services/info.py | 8 ++-- tests/test_handlers.py | 5 +- tests/test_history.py | 2 +- tests/test_services.py | 1 - 11 files changed, 91 insertions(+), 19 deletions(-) diff --git a/build_ext.py b/build_ext.py index 26b4eb96..e91f6350 100644 --- a/build_ext.py +++ b/build_ext.py @@ -54,4 +54,3 @@ def build(setup_kwargs: Any) -> None: except Exception: if os.environ.get("REQUIRE_CYTHON"): raise - pass diff --git a/pyproject.toml b/pyproject.toml index 8ec6a85b..6404e8d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,6 +93,24 @@ line-length = 110 ignore = [ "S101", # use of assert "S104", # S104 Possible binding to all interfaces + "PLR0912", # too many to fix right now + "TC001", # too many to fix right now + "TID252", # skip + "PLR0913", # too late to make changes here + "PLR0911", # would be breaking change + "TRY003", # too many to fix + "SLF001", # design choice + "TC003", # too many to fix + "PLR2004" , # too many to fix + "PGH004", # too many to fix + "PGH003", # too many to fix + "SIM110", # this is slower + "FURB136", # this is slower for Cython + "PYI034", # enable when we drop Py3.10 + "PYI032", # breaks Cython + "PYI041", # breaks Cython + "FURB188", # usually slower + "PERF401", # Cython: closures inside cpdef functions not yet supported ] select = [ "B", # flake8-bugbear @@ -104,8 +122,67 @@ select = [ "UP", # pyupgrade "I", # isort "RUF", # ruff specific + "FLY", # flynt + "FURB", # refurb + "G", # flake8-logging-format , + "PERF", # Perflint + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "PTH", # flake8-pathlib + "PYI", # flake8-pyi + "RET", # flake8-return + "RSE", # flake8-raise , + "SIM", # flake8-simplify + "SLF", # flake8-self + "SLOT", # flake8-slots + "T100", # Trace found: {name} used + "T20", # flake8-print + "TC", # flake8-type-checking + "TID", # Tidy imports + "TRY", # tryceratops ] +[tool.ruff.lint.per-file-ignores] +"tests/**/*" = [ + "D100", + "D101", + "D102", + "D103", + "D104", + "S101", + "SLF001", + "PLR2004", # too many to fix right now + "PT011", # too many to fix right now + "PT006", # too many to fix right now + "PGH003", # too many to fix right now + "PT007", # too many to fix right now + "PT027", # too many to fix right now + "PLW0603" , # too many to fix right now + "PLR0915", # too many to fix right now + "FLY002", # too many to fix right now + "PT018", # too many to fix right now + "PLR0124", # too many to fix right now + "SIM202" , # too many to fix right now + "PT012" , # too many to fix right now + "TID252", # too many to fix right now + "PLR0913", # skip this one + "SIM102" , # too many to fix right now + "SIM108", # too many to fix right now + "TC003", # too many to fix right now + "TC002", # too many to fix right now + "T201", # too many to fix right now +] +"bench/**/*" = [ + "T201", # intended +] +"examples/**/*" = [ + "T201", # intended +] +"setup.py" = ["D100"] +"conftest.py" = ["D100"] +"docs/conf.py" = ["D100"] [tool.pylint.BASIC] class-const-naming-style = "any" diff --git a/src/zeroconf/_history.py b/src/zeroconf/_history.py index 5bae7be0..1b6f3fad 100644 --- a/src/zeroconf/_history.py +++ b/src/zeroconf/_history.py @@ -60,9 +60,7 @@ def suppresses(self, question: DNSQuestion, now: _float, known_answers: set[DNSR return False # The last question has more known answers than # we knew so we have to ask - if previous_known_answers - known_answers: - return False - return True + return not previous_known_answers - known_answers def async_expire(self, now: _float) -> None: """Expire the history of old questions.""" diff --git a/src/zeroconf/_protocol/incoming.py b/src/zeroconf/_protocol/incoming.py index 7f4a8eec..2d977b64 100644 --- a/src/zeroconf/_protocol/incoming.py +++ b/src/zeroconf/_protocol/incoming.py @@ -398,7 +398,7 @@ def _read_bitmap(self, end: _int) -> list[int]: bitmap_length = view[offset_plus_one] bitmap_end = offset_plus_two + bitmap_length for i, byte in enumerate(self.data[offset_plus_two:bitmap_end]): - for bit in range(0, 8): + for bit in range(8): if byte & (0x80 >> bit): rdtypes.append(bit + window * 256 + i * 8) self.offset += 2 + bitmap_length diff --git a/src/zeroconf/_protocol/outgoing.py b/src/zeroconf/_protocol/outgoing.py index 6837e39a..fd5e57a0 100644 --- a/src/zeroconf/_protocol/outgoing.py +++ b/src/zeroconf/_protocol/outgoing.py @@ -272,7 +272,7 @@ def write_name(self, name: str_) -> None: """ # split name into each label - if name.endswith("."): + if name and name[-1] == ".": name = name[:-1] index = self.names.get(name, 0) diff --git a/src/zeroconf/_record_update.py b/src/zeroconf/_record_update.py index 5f817511..497ee39d 100644 --- a/src/zeroconf/_record_update.py +++ b/src/zeroconf/_record_update.py @@ -43,6 +43,6 @@ def __getitem__(self, index: int) -> DNSRecord | None: """Get the new or old record.""" if index == 0: return self.new - elif index == 1: + if index == 1: return self.old raise IndexError(index) diff --git a/src/zeroconf/_services/__init__.py b/src/zeroconf/_services/__init__.py index 6936aed6..b244552f 100644 --- a/src/zeroconf/_services/__init__.py +++ b/src/zeroconf/_services/__init__.py @@ -38,13 +38,13 @@ class ServiceStateChange(enum.Enum): class ServiceListener: def add_service(self, zc: Zeroconf, type_: str, name: str) -> None: - raise NotImplementedError() + raise NotImplementedError def remove_service(self, zc: Zeroconf, type_: str, name: str) -> None: - raise NotImplementedError() + raise NotImplementedError def update_service(self, zc: Zeroconf, type_: str, name: str) -> None: - raise NotImplementedError() + raise NotImplementedError class Signal: diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index b22fc805..9cd8df16 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -378,13 +378,13 @@ def _set_properties(self, properties: dict[str | bytes, str | bytes | None]) -> result = b"" for key, value in properties.items(): if isinstance(key, str): - key = key.encode("utf-8") + key = key.encode("utf-8") # noqa: PLW2901 properties_contain_str = True record = key if value is not None: if not isinstance(value, bytes): - value = str(value).encode("utf-8") + value = str(value).encode("utf-8") # noqa: PLW2901 properties_contain_str = True record += b"=" + value list_.append(record) @@ -524,7 +524,7 @@ def _process_record_threadsafe(self, zc: Zeroconf, record: DNSRecord, now: float # since by default IPv4Address.__eq__ compares the # the addresses on version and int which more than # we need here since we know the version is 4. - elif ip_addr.zc_integer != ipv4_addresses[0].zc_integer: + if ip_addr.zc_integer != ipv4_addresses[0].zc_integer: ipv4_addresses.remove(ip_addr) ipv4_addresses.insert(0, ip_addr) @@ -540,7 +540,7 @@ def _process_record_threadsafe(self, zc: Zeroconf, record: DNSRecord, now: float # since by default IPv6Address.__eq__ compares the # the addresses on version and int which more than # we need here since we know the version is 6. - elif ip_addr.zc_integer != self._ipv6_addresses[0].zc_integer: + if ip_addr.zc_integer != self._ipv6_addresses[0].zc_integer: ipv6_addresses.remove(ip_addr) ipv6_addresses.insert(0, ip_addr) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index fd0e689c..ffa4ff88 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -67,10 +67,9 @@ def test_ttl(self): def get_ttl(record_type): if expected_ttl is not None: return expected_ttl - elif record_type in [const._TYPE_A, const._TYPE_SRV, const._TYPE_NSEC]: + if record_type in [const._TYPE_A, const._TYPE_SRV, const._TYPE_NSEC]: return const._DNS_HOST_TTL - else: - return const._DNS_OTHER_TTL + return const._DNS_OTHER_TTL def _process_outgoing_packet(out): """Sends an outgoing packet.""" diff --git a/tests/test_history.py b/tests/test_history.py index 4c9836ce..e9254168 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -3,7 +3,7 @@ from __future__ import annotations import zeroconf as r -import zeroconf.const as const +from zeroconf import const from zeroconf._history import QuestionHistory diff --git a/tests/test_services.py b/tests/test_services.py index e93174cc..7d7c3fc7 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -71,7 +71,6 @@ def update_service(self, zeroconf, type, name): class MySubListener(r.ServiceListener): def add_service(self, zeroconf, type, name): sub_service_added.set() - pass def remove_service(self, zeroconf, type, name): pass From 8fbbe419a069c3ee946a4185e14bd6de660aa67b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 14 Feb 2025 11:45:11 -0600 Subject: [PATCH 358/434] chore: split up armv7l wheels to speed up release (#1524) --- .github/workflows/ci.yml | 56 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 51 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f8eaa7ec..5c57e3b5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -173,19 +173,58 @@ jobs: macos-13, macos-latest, ] - qemu: [''] + qemu: [""] musl: [""] + pyver: [""] include: + - os: ubuntu-latest + musl: "musllinux" + - os: ubuntu-24.04-arm + musl: "musllinux" + # qemu is slow, make a single + # runner per Python version + - os: ubuntu-latest + qemu: armv7l + musl: "musllinux" + pyver: cp39 + - os: ubuntu-latest + qemu: armv7l + musl: "musllinux" + pyver: cp310 + - os: ubuntu-latest + qemu: armv7l + musl: "musllinux" + pyver: cp311 + - os: ubuntu-latest + qemu: armv7l + musl: "musllinux" + pyver: cp312 + - os: ubuntu-latest + qemu: armv7l + musl: "musllinux" + pyver: cp313 + # qemu is slow, make a single + # runner per Python version - os: ubuntu-latest qemu: armv7l musl: "" + pyver: cp39 - os: ubuntu-latest qemu: armv7l - musl: musllinux + musl: "" + pyver: cp310 - os: ubuntu-latest - musl: musllinux - - os: ubuntu-24.04-arm - musl: musllinux + qemu: armv7l + musl: "" + pyver: cp311 + - os: ubuntu-latest + qemu: armv7l + musl: "" + pyver: cp312 + - os: ubuntu-latest + qemu: armv7l + musl: "" + pyver: cp313 steps: - name: Checkout uses: actions/checkout@v4 @@ -215,6 +254,13 @@ jobs: # use default "auto" otherwise echo "CIBW_ARCHS_LINUX=${{ matrix.qemu }}" >> $GITHUB_ENV fi + - name: Limit to a specific Python version on slow QEMU + if: ${{ matrix.pyver }} + run: | + if [[ -n "${{ matrix.pyver }}" ]]; then + echo "CIBW_BUILD=${{ matrix.pyver }}*" >> $GITHUB_ENV + fi + - name: Install python-semantic-release run: pipx install python-semantic-release==7.34.6 From f5e55ffafedbcf8296afe4920b59d00f74ac2abb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 14 Feb 2025 12:07:56 -0600 Subject: [PATCH 359/434] chore: improve wheel build runner names in the CI (#1525) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5c57e3b5..27977693 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,7 +161,7 @@ jobs: needs: [release] if: needs.release.outputs.released == 'true' - name: Build wheels on ${{ matrix.os }} (${{ matrix.musl }}) [${{ matrix.qemu }}] + name: Wheels for ${{ matrix.os }} (${{ matrix.musl == 'musllinux' && 'musllinux' || 'manylinux' }}) ${{ matrix.qemu }} ${{ matrix.pyver }} runs-on: ${{ matrix.os }} strategy: matrix: From 48dbb7190a4f5126e39dbcdb87e34380d4562cd0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 14 Feb 2025 12:21:31 -0600 Subject: [PATCH 360/434] fix: add a helpful hint for when EADDRINUSE happens during startup (#1526) --- src/zeroconf/_utils/net.py | 19 +++++++++++++++++++ tests/utils/test_net.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index fd6e0dff..c2312e01 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -262,6 +262,25 @@ def new_socket( bind_tup, ) return None + if ex.errno == errno.EADDRINUSE: + if sys.platform.startswith("darwin") or sys.platform.startswith("freebsd"): + log.error( + "Address in use when binding to %s; " + "On BSD based systems sharing the same port with another " + "stack may require processes to run with the same UID; " + "When using avahi, make sure disallow-other-stacks is set" + " to no in avahi-daemon.conf", + bind_tup, + ) + else: + log.error( + "Address in use when binding to %s; " + "When using avahi, make sure disallow-other-stacks is set" + " to no in avahi-daemon.conf", + bind_tup, + ) + # This is still a fatal error as its not going to work + # if we can't hear the traffic coming in. raise log.debug("Created socket %s", s) return s diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index a770e1ce..f763b655 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -260,6 +260,40 @@ def _mock_socket(*args, **kwargs): netutils.new_socket(("0.0.0.0", 0)) # type: ignore[arg-type] +def test_bind_raises_address_in_use(caplog: pytest.LogCaptureFixture) -> None: + """Test bind failing in new_socket returns None on EADDRINUSE.""" + + def _mock_socket(*args, **kwargs): + sock = MagicMock() + sock.bind = MagicMock(side_effect=OSError(errno.EADDRINUSE, f"Error: {errno.EADDRINUSE}")) + return sock + + with ( + pytest.raises(OSError), + patch.object(sys, "platform", "darwin"), + patch("socket.socket", _mock_socket), + ): + netutils.new_socket(("0.0.0.0", 0)) # type: ignore[arg-type] + assert ( + "On BSD based systems sharing the same port with " + "another stack may require processes to run with the same UID" + ) in caplog.text + assert ( + "When using avahi, make sure disallow-other-stacks is set to no in avahi-daemon.conf" in caplog.text + ) + + caplog.clear() + with pytest.raises(OSError), patch.object(sys, "platform", "linux"), patch("socket.socket", _mock_socket): + netutils.new_socket(("0.0.0.0", 0)) # type: ignore[arg-type] + assert ( + "On BSD based systems sharing the same port with " + "another stack may require processes to run with the same UID" + ) not in caplog.text + assert ( + "When using avahi, make sure disallow-other-stacks is set to no in avahi-daemon.conf" in caplog.text + ) + + def test_new_respond_socket_new_socket_returns_none(): """Test new_respond_socket returns None if new_socket returns None.""" with patch.object(netutils, "new_socket", return_value=None): From 450f5a568a0321fd09a7eb0e3f7bb251ad35c4a0 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 14 Feb 2025 18:33:24 +0000 Subject: [PATCH 361/434] 0.144.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c8f248c..f986d810 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.144.2 (2025-02-14) + +### Bug Fixes + +- Add a helpful hint for when EADDRINUSE happens during startup + ([#1526](https://github.com/python-zeroconf/python-zeroconf/pull/1526), + [`48dbb71`](https://github.com/python-zeroconf/python-zeroconf/commit/48dbb7190a4f5126e39dbcdb87e34380d4562cd0)) + + ## v0.144.1 (2025-02-12) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 6404e8d3..c87ef7c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.144.1" +version = "0.144.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 6efc9a5c..7ec3c120 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.144.1" +__version__ = "0.144.2" __license__ = "LGPL" From 43136fa418d4d7826415e1d0f7761b198347ced7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 14 Feb 2025 13:12:51 -0600 Subject: [PATCH 362/434] fix: non unique name during wheel upload (#1527) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 27977693..578fe76a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -286,7 +286,7 @@ jobs: - uses: actions/upload-artifact@v4 with: path: ./wheelhouse/*.whl - name: wheels-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu }} + name: wheels-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu }}-${{ matrix.pyver }} upload_pypi: needs: [build_wheels] From 53cc868b77e98e9b3f938a1ad905b3be20fce9ac Mon Sep 17 00:00:00 2001 From: semantic-release Date: Fri, 14 Feb 2025 19:26:13 +0000 Subject: [PATCH 363/434] 0.144.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f986d810..19b36d46 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.144.3 (2025-02-14) + +### Bug Fixes + +- Non unique name during wheel upload + ([#1527](https://github.com/python-zeroconf/python-zeroconf/pull/1527), + [`43136fa`](https://github.com/python-zeroconf/python-zeroconf/commit/43136fa418d4d7826415e1d0f7761b198347ced7)) + + ## v0.144.2 (2025-02-14) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index c87ef7c9..4015b88f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.144.2" +version = "0.144.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 7ec3c120..59052702 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.144.2" +__version__ = "0.144.3" __license__ = "LGPL" From 8c913e11ea59d59d4905defa53aa1e963695c47c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 14 Feb 2025 14:32:11 -0800 Subject: [PATCH 364/434] chore: enable ASYNC ruff rules (#1528) --- examples/async_apple_scanner.py | 3 +-- examples/async_browser.py | 3 +-- examples/async_registration.py | 3 +-- pyproject.toml | 1 + 4 files changed, 4 insertions(+), 6 deletions(-) diff --git a/examples/async_apple_scanner.py b/examples/async_apple_scanner.py index 19691662..00744b5c 100755 --- a/examples/async_apple_scanner.py +++ b/examples/async_apple_scanner.py @@ -96,8 +96,7 @@ async def async_run(self) -> None: ALL_SERVICES, **kwargs, # type: ignore[arg-type] ) - while True: - await asyncio.sleep(1) + await asyncio.Event().wait() async def async_close(self) -> None: assert self.aiozc is not None diff --git a/examples/async_browser.py b/examples/async_browser.py index d86cfc5e..58193f70 100755 --- a/examples/async_browser.py +++ b/examples/async_browser.py @@ -74,8 +74,7 @@ async def async_run(self) -> None: self.aiobrowser = AsyncServiceBrowser( self.aiozc.zeroconf, services, handlers=[async_on_service_state_change] ) - while True: - await asyncio.sleep(1) + await asyncio.Event().wait() async def async_close(self) -> None: assert self.aiozc is not None diff --git a/examples/async_registration.py b/examples/async_registration.py index d01b15e1..5c774cad 100755 --- a/examples/async_registration.py +++ b/examples/async_registration.py @@ -24,8 +24,7 @@ async def register_services(self, infos: list[AsyncServiceInfo]) -> None: background_tasks = await asyncio.gather(*tasks) await asyncio.gather(*background_tasks) print("Finished registration, press Ctrl-C to exit...") - while True: - await asyncio.sleep(1) + await asyncio.Event().wait() async def unregister_services(self, infos: list[AsyncServiceInfo]) -> None: assert self.aiozc is not None diff --git a/pyproject.toml b/pyproject.toml index 4015b88f..eb4ead92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,6 +113,7 @@ ignore = [ "PERF401", # Cython: closures inside cpdef functions not yet supported ] select = [ + "ASYNC", # async rules "B", # flake8-bugbear "C4", # flake8-comprehensions "S", # flake8-bandit From 1c7f3548b6cbddf73dbb9d69cd8987c8ad32c705 Mon Sep 17 00:00:00 2001 From: Rotzbua Date: Sat, 15 Feb 2025 23:47:51 +0100 Subject: [PATCH 365/434] feat(docs): enable link to source code (#1529) --- docs/conf.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index c3bce671..11a0f2d4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,9 +33,14 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ + "sphinx.ext.todo", # Allow todo comments. + "sphinx.ext.viewcode", # Link to source code. "sphinx.ext.autodoc", "zeroconfautodocfix", # Must be after "sphinx.ext.autodoc" "sphinx.ext.intersphinx", + "sphinx.ext.coverage", # Enable the overage report. + "sphinx.ext.duration", # Show build duration at the end. + "sphinx_rtd_theme", # Required for theme. ] templates_path = ["_templates"] @@ -53,6 +58,11 @@ "**": ("localtoc.html", "relations.html", "sourcelink.html", "searchbox.html"), } +# -- Options for RTD theme --------------------------------------------------- +# https://sphinx-rtd-theme.readthedocs.io/en/stable/configuring.html + +# html_theme_options = {} + # -- Options for HTML help output -------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-help-output From d4506fd4dc391fb366d1f7dfb5ec77933221a2eb Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 15 Feb 2025 22:57:36 +0000 Subject: [PATCH 366/434] 0.145.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 19b36d46..243772b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.145.0 (2025-02-15) + +### Features + +- **docs**: Enable link to source code + ([#1529](https://github.com/python-zeroconf/python-zeroconf/pull/1529), + [`1c7f354`](https://github.com/python-zeroconf/python-zeroconf/commit/1c7f3548b6cbddf73dbb9d69cd8987c8ad32c705)) + + ## v0.144.3 (2025-02-14) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index eb4ead92..78b100d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.144.3" +version = "0.145.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 59052702..2cccd05d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.144.3" +__version__ = "0.145.0" __license__ = "LGPL" From aab566f1d9a5d6e2c73ba459daed85a4ed81d721 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Feb 2025 10:52:54 -0600 Subject: [PATCH 367/434] chore(deps-dev): bump cython from 3.0.11 to 3.0.12 (#1531) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 132 ++++++++++++++++++++++++++-------------------------- 1 file changed, 65 insertions(+), 67 deletions(-) diff --git a/poetry.lock b/poetry.lock index 258b28f2..9caaac17 100644 --- a/poetry.lock +++ b/poetry.lock @@ -306,77 +306,75 @@ toml = ["tomli"] [[package]] name = "cython" -version = "3.0.11" +version = "3.0.12" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" files = [ - {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, - {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, - {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, - {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, - {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, - {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, - {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, - {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, - {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, - {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, - {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, - {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, - {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, - {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, - {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, - {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, - {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, - {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, - {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, - {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, - {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, - {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, - {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, - {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, - {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, - {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, - {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, - {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, - {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, - {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, - {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, - {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, - {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, - {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, - {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, - {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, - {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, - {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, - {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, - {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, - {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, - {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, - {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, - {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, - {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, - {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, - {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, - {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, - {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, - {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, - {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, - {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, - {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, - {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, - {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, - {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, - {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, - {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, - {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, - {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, - {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, - {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, - {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, - {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, - {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, - {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, + {file = "Cython-3.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba67eee9413b66dd9fbacd33f0bc2e028a2a120991d77b5fd4b19d0b1e4039b9"}, + {file = "Cython-3.0.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee2717e5b5f7d966d0c6e27d2efe3698c357aa4d61bb3201997c7a4f9fe485a"}, + {file = "Cython-3.0.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cffc3464f641c8d0dda942c7c53015291beea11ec4d32421bed2f13b386b819"}, + {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d3a8f81980ffbd74e52f9186d8f1654e347d0c44bfea6b5997028977f481a179"}, + {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8d32856716c369d01f2385ad9177cdd1a11079ac89ea0932dc4882de1aa19174"}, + {file = "Cython-3.0.12-cp310-cp310-win32.whl", hash = "sha256:712c3f31adec140dc60d064a7f84741f50e2c25a8edd7ae746d5eb4d3ef7072a"}, + {file = "Cython-3.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:d6945694c5b9170cfbd5f2c0d00ef7487a2de7aba83713a64ee4ebce7fad9e05"}, + {file = "Cython-3.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feb86122a823937cc06e4c029d80ff69f082ebb0b959ab52a5af6cdd271c5dc3"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfdbea486e702c328338314adb8e80f5f9741f06a0ae83aaec7463bc166d12e8"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563de1728c8e48869d2380a1b76bbc1b1b1d01aba948480d68c1d05e52d20c92"}, + {file = "Cython-3.0.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398d4576c1e1f6316282aa0b4a55139254fbed965cba7813e6d9900d3092b128"}, + {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1e5eadef80143026944ea8f9904715a008f5108d1d644a89f63094cc37351e73"}, + {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a93cbda00a5451175b97dea5a9440a3fcee9e54b4cba7a7dbcba9a764b22aec"}, + {file = "Cython-3.0.12-cp311-cp311-win32.whl", hash = "sha256:3109e1d44425a2639e9a677b66cd7711721a5b606b65867cb2d8ef7a97e2237b"}, + {file = "Cython-3.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:d4b70fc339adba1e2111b074ee6119fe9fd6072c957d8597bce9a0dd1c3c6784"}, + {file = "Cython-3.0.12-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe030d4a00afb2844f5f70896b7f2a1a0d7da09bf3aa3d884cbe5f73fff5d310"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7fec4f052b8fe173fe70eae75091389955b9a23d5cec3d576d21c5913b49d47"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0faa5e39e5c8cdf6f9c3b1c3f24972826e45911e7f5b99cf99453fca5432f45e"}, + {file = "Cython-3.0.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d53de996ed340e9ab0fc85a88aaa8932f2591a2746e1ab1c06e262bd4ec4be7"}, + {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ea3a0e19ab77266c738aa110684a753a04da4e709472cadeff487133354d6ab8"}, + {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c151082884be468f2f405645858a857298ac7f7592729e5b54788b5c572717ba"}, + {file = "Cython-3.0.12-cp312-cp312-win32.whl", hash = "sha256:3083465749911ac3b2ce001b6bf17f404ac9dd35d8b08469d19dc7e717f5877a"}, + {file = "Cython-3.0.12-cp312-cp312-win_amd64.whl", hash = "sha256:c0b91c7ebace030dd558ea28730de8c580680b50768e5af66db2904a3716c3e3"}, + {file = "Cython-3.0.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4ee6f1ea1bead8e6cbc4e64571505b5d8dbdb3b58e679d31f3a84160cebf1a1a"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57aefa6d3341109e46ec1a13e3a763aaa2cbeb14e82af2485b318194be1d9170"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:879ae9023958d63c0675015369384642d0afb9c9d1f3473df9186c42f7a9d265"}, + {file = "Cython-3.0.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36fcd584dae547de6f095500a380f4a0cce72b7a7e409e9ff03cb9beed6ac7a1"}, + {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62b79dcc0de49efe9e84b9d0e2ae0a6fc9b14691a65565da727aa2e2e63c6a28"}, + {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4aa255781b093a8401109d8f2104bbb2e52de7639d5896aefafddc85c30e0894"}, + {file = "Cython-3.0.12-cp313-cp313-win32.whl", hash = "sha256:77d48f2d4bab9fe1236eb753d18f03e8b2619af5b6f05d51df0532a92dfb38ab"}, + {file = "Cython-3.0.12-cp313-cp313-win_amd64.whl", hash = "sha256:86c304b20bd57c727c7357e90d5ba1a2b6f1c45492de2373814d7745ef2e63b4"}, + {file = "Cython-3.0.12-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ff5c0b6a65b08117d0534941d404833d516dac422eee88c6b4fd55feb409a5ed"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:680f1d6ed4436ae94805db264d6155ed076d2835d84f20dcb31a7a3ad7f8668c"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc24609613fa06d0d896309f7164ba168f7e8d71c1e490ed2a08d23351c3f41"}, + {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1879c073e2b34924ce9b7ca64c212705dcc416af4337c45f371242b2e5f6d32"}, + {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:bfb75123dd4ff767baa37d7036da0de2dfb6781ff256eef69b11b88b9a0691d1"}, + {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:f39640f8df0400cde6882e23c734f15bb8196de0a008ae5dc6c8d1ec5957d7c8"}, + {file = "Cython-3.0.12-cp36-cp36m-win32.whl", hash = "sha256:8c9efe9a0895abee3cadfdad4130b30f7b5e57f6e6a51ef2a44f9fc66a913880"}, + {file = "Cython-3.0.12-cp36-cp36m-win_amd64.whl", hash = "sha256:63d840f2975e44d74512f8f34f1f7cb8121c9428e26a3f6116ff273deb5e60a2"}, + {file = "Cython-3.0.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:75c5acd40b97cff16fadcf6901a91586cbca5dcdba81f738efaf1f4c6bc8dccb"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e62564457851db1c40399bd95a5346b9bb99e17a819bf583b362f418d8f3457a"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccd1228cc203b1f1b8a3d403f5a20ad1c40e5879b3fbf5851ce09d948982f2c"}, + {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25529ee948f44d9a165ff960c49d4903267c20b5edf2df79b45924802e4cca6e"}, + {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:90cf599372c5a22120609f7d3a963f17814799335d56dd0dcf8fe615980a8ae1"}, + {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9f8c48748a9c94ea5d59c26ab49ad0fad514d36f894985879cf3c3ca0e600bf4"}, + {file = "Cython-3.0.12-cp37-cp37m-win32.whl", hash = "sha256:3e4fa855d98bc7bd6a2049e0c7dc0dcf595e2e7f571a26e808f3efd84d2db374"}, + {file = "Cython-3.0.12-cp37-cp37m-win_amd64.whl", hash = "sha256:120681093772bf3600caddb296a65b352a0d3556e962b9b147efcfb8e8c9801b"}, + {file = "Cython-3.0.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:731d719423e041242c9303c80cae4327467299b90ffe62d4cc407e11e9ea3160"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3238a29f37999e27494d120983eca90d14896b2887a0bd858a381204549137a"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b588c0a089a9f4dd316d2f9275230bad4a7271e5af04e1dc41d2707c816be44b"}, + {file = "Cython-3.0.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab9f5198af74eb16502cc143cdde9ca1cbbf66ea2912e67440dd18a36e3b5fa"}, + {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8ee841c0e114efa1e849c281ac9b8df8aa189af10b4a103b1c5fd71cbb799679"}, + {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:43c48b5789398b228ea97499f5b864843ba9b1ab837562a9227c6f58d16ede8b"}, + {file = "Cython-3.0.12-cp38-cp38-win32.whl", hash = "sha256:5e5f17c48a4f41557fbcc7ee660ccfebe4536a34c557f553b6893c1b3c83df2d"}, + {file = "Cython-3.0.12-cp38-cp38-win_amd64.whl", hash = "sha256:309c081057930bb79dc9ea3061a1af5086c679c968206e9c9c2ec90ab7cb471a"}, + {file = "Cython-3.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54115fcc126840926ff3b53cfd2152eae17b3522ae7f74888f8a41413bd32f25"}, + {file = "Cython-3.0.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:629db614b9c364596d7c975fa3fb3978e8c5349524353dbe11429896a783fc1e"}, + {file = "Cython-3.0.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af081838b0f9e12a83ec4c3809a00a64c817f489f7c512b0e3ecaf5f90a2a816"}, + {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ce459808f7d8d5d4007bc5486fe50532529096b43957af6cbffcb4d9cc5c8d"}, + {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6c6cd6a75c8393e6805d17f7126b96a894f310a1a9ea91c47d141fb9341bfa8"}, + {file = "Cython-3.0.12-cp39-cp39-win32.whl", hash = "sha256:a4032e48d4734d2df68235d21920c715c451ac9de15fa14c71b378e8986b83be"}, + {file = "Cython-3.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:dcdc3e5d4ce0e7a4af6903ed580833015641e968d18d528d8371e2435a34132c"}, + {file = "Cython-3.0.12-py2.py3-none-any.whl", hash = "sha256:0038c9bae46c459669390e53a1ec115f8096b2e4647ae007ff1bf4e6dee92806"}, + {file = "cython-3.0.12.tar.gz", hash = "sha256:b988bb297ce76c671e28c97d017b95411010f7c77fa6623dd0bb47eed1aee1bc"}, ] [[package]] From 777c379ea9208ed662fdfd2f79f94e3bb138378b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Feb 2025 10:53:14 -0600 Subject: [PATCH 368/434] chore(pre-commit.ci): pre-commit autoupdate (#1532) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5c4754d8..0c966743 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: hooks: - id: codespell - repo: https://github.com/PyCQA/flake8 - rev: 7.1.1 + rev: 7.1.2 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy From d4e6f25754c15417b8bd9839dc8636b2cff717c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 17 Feb 2025 22:33:00 -0600 Subject: [PATCH 369/434] fix: hold a strong reference to the AsyncEngine setup task (#1533) --- src/zeroconf/_engine.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_engine.py b/src/zeroconf/_engine.py index 8c800a33..8a371e1e 100644 --- a/src/zeroconf/_engine.py +++ b/src/zeroconf/_engine.py @@ -50,6 +50,7 @@ class AsyncEngine: "_cleanup_timer", "_listen_socket", "_respond_sockets", + "_setup_task", "loop", "protocols", "readers", @@ -73,6 +74,7 @@ def __init__( self._listen_socket = listen_socket self._respond_sockets = respond_sockets self._cleanup_timer: asyncio.TimerHandle | None = None + self._setup_task: asyncio.Task[None] | None = None def setup( self, @@ -82,14 +84,15 @@ def setup( """Set up the instance.""" self.loop = loop self.running_future = loop.create_future() - self.loop.create_task(self._async_setup(loop_thread_ready)) + self._setup_task = self.loop.create_task(self._async_setup(loop_thread_ready)) async def _async_setup(self, loop_thread_ready: threading.Event | None) -> None: """Set up the instance.""" self._async_schedule_next_cache_cleanup() await self._async_create_endpoints() assert self.running_future is not None - self.running_future.set_result(True) + if not self.running_future.done(): + self.running_future.set_result(True) if loop_thread_ready: loop_thread_ready.set() @@ -135,6 +138,8 @@ def _async_schedule_next_cache_cleanup(self) -> None: async def _async_close(self) -> None: """Cancel and wait for the cleanup task to finish.""" + assert self._setup_task is not None + await self._setup_task self._async_shutdown() await asyncio.sleep(0) # flush out any call soons assert self._cleanup_timer is not None From 91a58dd67d55835b1d74e5cd31ff7b0323805c63 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Tue, 18 Feb 2025 04:42:38 +0000 Subject: [PATCH 370/434] 0.145.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 243772b0..e770a88f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.145.1 (2025-02-18) + +### Bug Fixes + +- Hold a strong reference to the AsyncEngine setup task + ([#1533](https://github.com/python-zeroconf/python-zeroconf/pull/1533), + [`d4e6f25`](https://github.com/python-zeroconf/python-zeroconf/commit/d4e6f25754c15417b8bd9839dc8636b2cff717c8)) + + ## v0.145.0 (2025-02-15) ### Features diff --git a/pyproject.toml b/pyproject.toml index 78b100d2..e6aa0efe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.145.0" +version = "0.145.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 2cccd05d..d2235d5c 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.145.0" +__version__ = "0.145.1" __license__ = "LGPL" From 6c02b1fe6e1a6b86194e7e90e0039c058ff4f8e0 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 24 Feb 2025 22:21:23 +0000 Subject: [PATCH 371/434] chore(pre-commit.ci): pre-commit autoupdate (#1534) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0c966743..f0d1bec6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.2.1 + rev: v4.2.2 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.6 + rev: v0.9.7 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From a06df79660093e7a59bc88c125c459aa7ec5df1e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 28 Feb 2025 23:21:18 -0600 Subject: [PATCH 372/434] chore(ci): bump python-semantic-release/python-semantic-release from 9.17.0 to 9.21.0 in the github-actions group (#1535) chore(ci): bump python-semantic-release/python-semantic-release Bumps the github-actions group with 1 update: [python-semantic-release/python-semantic-release](https://github.com/python-semantic-release/python-semantic-release). Updates `python-semantic-release/python-semantic-release` from 9.17.0 to 9.21.0 - [Release notes](https://github.com/python-semantic-release/python-semantic-release/releases) - [Changelog](https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.rst) - [Commits](https://github.com/python-semantic-release/python-semantic-release/compare/v9.17.0...v9.21.0) --- updated-dependencies: - dependency-name: python-semantic-release/python-semantic-release dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-actions ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 578fe76a..5a1a1720 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,14 +134,14 @@ jobs: # Do a dry run of PSR - name: Test release - uses: python-semantic-release/python-semantic-release@v9.17.0 + uses: python-semantic-release/python-semantic-release@v9.21.0 if: github.ref_name != 'master' with: root_options: --noop # On main branch: actual PSR + upload to PyPI & GitHub - name: Release - uses: python-semantic-release/python-semantic-release@v9.17.0 + uses: python-semantic-release/python-semantic-release@v9.21.0 id: release if: github.ref_name == 'master' with: From c2ac47e2df6d8ee92d51fc7a72d1d18f88e0132f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Mar 2025 22:21:43 -0700 Subject: [PATCH 373/434] chore(deps-dev): bump pytest from 8.3.4 to 8.3.5 (#1536) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9caaac17..6b66bcd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -647,13 +647,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] From 558c0607fd1bf4a2ecc9bb5d84bcd8824c7fc922 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 2 Mar 2025 22:22:02 -0700 Subject: [PATCH 374/434] chore(deps-dev): bump setuptools from 75.8.0 to 75.8.2 (#1537) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6b66bcd1..e0d48822 100644 --- a/poetry.lock +++ b/poetry.lock @@ -791,13 +791,13 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "75.8.0" +version = "75.8.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" files = [ - {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, - {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, + {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, + {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, ] [package.extras] From 25454648221ab659b48b58f7cfb2f6199fadea1c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 4 Mar 2025 14:00:53 -1000 Subject: [PATCH 375/434] chore(pre-commit.ci): pre-commit autoupdate (#1538) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/commitizen-tools/commitizen: v4.2.2 → v4.4.1](https://github.com/commitizen-tools/commitizen/compare/v4.2.2...v4.4.1) - [github.com/astral-sh/ruff-pre-commit: v0.9.7 → v0.9.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.7...v0.9.9) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f0d1bec6..265f703e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.2.2 + rev: v4.4.1 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.7 + rev: v0.9.9 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 7d0768f7622e3af9e7cdb7335bb2ddbbe493b4bd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 4 Mar 2025 14:01:17 -1000 Subject: [PATCH 376/434] chore: update process to get relase tag from PSR in release workflow (#1539) fixes #1201 --- .github/workflows/ci.yml | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5a1a1720..457b4e1d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -125,6 +125,7 @@ jobs: contents: write outputs: released: ${{ steps.release.outputs.released }} + newest_release_tag: ${{ steps.release.outputs.tag }} steps: - uses: actions/checkout@v4 @@ -261,22 +262,13 @@ jobs: echo "CIBW_BUILD=${{ matrix.pyver }}*" >> $GITHUB_ENV fi - - name: Install python-semantic-release - run: pipx install python-semantic-release==7.34.6 - - - name: Get Release Tag - id: release_tag - shell: bash - run: | - echo "::set-output name=newest_release_tag::$(semantic-release print-version --current)" - - uses: actions/checkout@v4 with: - ref: "${{ steps.release_tag.outputs.newest_release_tag }}" + ref: ${{ needs.release.outputs.newest_release_tag }} fetch-depth: 0 - name: Build wheels ${{ matrix.musl }} (${{ matrix.qemu }}) - uses: pypa/cibuildwheel@v2.22.0 + uses: pypa/cibuildwheel@v2.23.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} From dea233c1e0e80584263090727ce07648755964af Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 4 Mar 2025 15:36:34 -1000 Subject: [PATCH 377/434] feat: reduce size of wheels (#1540) feat: reduce size of binaries --- build_ext.py | 57 ++++++++++++++++++++++++++++++++++------------------ 1 file changed, 37 insertions(+), 20 deletions(-) diff --git a/build_ext.py b/build_ext.py index e91f6350..7faa607f 100644 --- a/build_ext.py +++ b/build_ext.py @@ -5,8 +5,44 @@ from distutils.command.build_ext import build_ext from typing import Any +try: + from setuptools import Extension +except ImportError: + from distutils.core import Extension + _LOGGER = logging.getLogger(__name__) +TO_CYTHONIZE = [ + "src/zeroconf/_dns.py", + "src/zeroconf/_cache.py", + "src/zeroconf/_history.py", + "src/zeroconf/_record_update.py", + "src/zeroconf/_listener.py", + "src/zeroconf/_protocol/incoming.py", + "src/zeroconf/_protocol/outgoing.py", + "src/zeroconf/_handlers/answers.py", + "src/zeroconf/_handlers/record_manager.py", + "src/zeroconf/_handlers/multicast_outgoing_queue.py", + "src/zeroconf/_handlers/query_handler.py", + "src/zeroconf/_services/__init__.py", + "src/zeroconf/_services/browser.py", + "src/zeroconf/_services/info.py", + "src/zeroconf/_services/registry.py", + "src/zeroconf/_updates.py", + "src/zeroconf/_utils/ipaddress.py", + "src/zeroconf/_utils/time.py", +] + +EXTENSIONS = [ + Extension( + ext.removeprefix("src/").removesuffix(".py").replace("/", "."), + [ext], + language="c", + extra_compile_args=["-O3", "-g0"], + ) + for ext in TO_CYTHONIZE +] + class BuildExt(build_ext): def build_extensions(self) -> None: @@ -25,26 +61,7 @@ def build(setup_kwargs: Any) -> None: setup_kwargs.update( { "ext_modules": cythonize( - [ - "src/zeroconf/_dns.py", - "src/zeroconf/_cache.py", - "src/zeroconf/_history.py", - "src/zeroconf/_record_update.py", - "src/zeroconf/_listener.py", - "src/zeroconf/_protocol/incoming.py", - "src/zeroconf/_protocol/outgoing.py", - "src/zeroconf/_handlers/answers.py", - "src/zeroconf/_handlers/record_manager.py", - "src/zeroconf/_handlers/multicast_outgoing_queue.py", - "src/zeroconf/_handlers/query_handler.py", - "src/zeroconf/_services/__init__.py", - "src/zeroconf/_services/browser.py", - "src/zeroconf/_services/info.py", - "src/zeroconf/_services/registry.py", - "src/zeroconf/_updates.py", - "src/zeroconf/_utils/ipaddress.py", - "src/zeroconf/_utils/time.py", - ], + EXTENSIONS, compiler_directives={"language_level": "3"}, # Python 3 ), "cmdclass": {"build_ext": BuildExt}, From c9ef9ee527767d3e2fae0dd0d7df1b5ed156ea26 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 5 Mar 2025 01:44:48 +0000 Subject: [PATCH 378/434] 0.146.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 10 ++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e770a88f..580dffb0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,16 @@ # CHANGELOG +## v0.146.0 (2025-03-05) + +### Features + +- Reduce size of wheels ([#1540](https://github.com/python-zeroconf/python-zeroconf/pull/1540), + [`dea233c`](https://github.com/python-zeroconf/python-zeroconf/commit/dea233c1e0e80584263090727ce07648755964af)) + +feat: reduce size of binaries + + ## v0.145.1 (2025-02-18) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index e6aa0efe..2b94783e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.145.1" +version = "0.146.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index d2235d5c..68e7213d 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.145.1" +__version__ = "0.146.0" __license__ = "LGPL" From fa65cc8791a6f4c53bc29088cb60b83f420b1ae6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 4 Mar 2025 17:06:33 -1000 Subject: [PATCH 379/434] fix: use trusted publishing for uploading wheels (#1541) --- .github/workflows/ci.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 457b4e1d..2fb9b06f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -284,19 +284,19 @@ jobs: needs: [build_wheels] runs-on: ubuntu-latest environment: release + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - uses: actions/download-artifact@v4 with: # unpacks default artifact into dist/ # if `name: artifact` is omitted, the action will create extra parent dir - pattern: wheels-* path: dist + pattern: wheels-* merge-multiple: true - - uses: pypa/gh-action-pypi-publish@v1.12.4 - with: - user: __token__ - password: ${{ secrets.PYPI_TOKEN }} + - uses: + pypa/gh-action-pypi-publish@v1.12.4 # To test: repository_url: https://test.pypi.org/legacy/ From ea6905b1a0e6122e1baa0cbb39db1cb91ec0f310 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 5 Mar 2025 03:16:07 +0000 Subject: [PATCH 380/434] 0.146.1 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 580dffb0..f28b0022 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.146.1 (2025-03-05) + +### Bug Fixes + +- Use trusted publishing for uploading wheels + ([#1541](https://github.com/python-zeroconf/python-zeroconf/pull/1541), + [`fa65cc8`](https://github.com/python-zeroconf/python-zeroconf/commit/fa65cc8791a6f4c53bc29088cb60b83f420b1ae6)) + + ## v0.146.0 (2025-03-05) ### Features diff --git a/pyproject.toml b/pyproject.toml index 2b94783e..4e056cd7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.0" +version = "0.146.1" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 68e7213d..b915b8d7 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.0" +__version__ = "0.146.1" __license__ = "LGPL" From 080462ed7bfd49311010a3c06d600e77bcc5fb8d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 9 Mar 2025 19:08:36 -1000 Subject: [PATCH 381/434] chore(deps-dev): bump setuptools from 75.8.2 to 76.0.0 (#1543) --- poetry.lock | 83 ++++++++++++++++++++++++++++++++++++++++---------- pyproject.toml | 2 +- 2 files changed, 68 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index e0d48822..75863563 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -6,6 +6,7 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -17,6 +18,7 @@ version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, @@ -31,6 +33,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -42,6 +45,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -121,6 +125,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -222,6 +227,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev", "docs"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -233,6 +240,7 @@ version = "7.6.10" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, @@ -302,7 +310,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cython" @@ -310,6 +318,7 @@ version = "3.0.12" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["dev"] files = [ {file = "Cython-3.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba67eee9413b66dd9fbacd33f0bc2e028a2a120991d77b5fd4b19d0b1e4039b9"}, {file = "Cython-3.0.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee2717e5b5f7d966d0c6e27d2efe3698c357aa4d61bb3201997c7a4f9fe485a"}, @@ -383,6 +392,7 @@ version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -394,6 +404,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -408,6 +420,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -422,6 +435,7 @@ version = "0.2.0" description = "Cross-platform network interface and IP address enumeration library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ifaddr-0.2.0-py3-none-any.whl", hash = "sha256:085e0305cfe6f16ab12d72e2024030f5d52674afad6911bb1eee207177b8a748"}, {file = "ifaddr-0.2.0.tar.gz", hash = "sha256:cc0cbfcaabf765d44595825fb96a99bb12c79716b73b44330ea38ee2b0c4aed4"}, @@ -433,6 +447,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -444,6 +459,8 @@ version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] +markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -453,12 +470,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -467,6 +484,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -478,6 +496,7 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, @@ -495,6 +514,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -519,6 +539,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -589,6 +610,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -600,6 +622,7 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -611,6 +634,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -626,6 +650,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -637,6 +662,7 @@ version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, @@ -651,6 +677,7 @@ version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, @@ -673,6 +700,7 @@ version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, @@ -691,6 +719,7 @@ version = "3.2.0" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, @@ -723,6 +752,7 @@ version = "6.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, @@ -741,6 +771,7 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -755,6 +786,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -776,6 +808,7 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -791,23 +824,24 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "75.8.2" +version = "76.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "setuptools-75.8.2-py3-none-any.whl", hash = "sha256:558e47c15f1811c1fa7adbd0096669bf76c1d3f433f58324df69f3f5ecac4e8f"}, - {file = "setuptools-75.8.2.tar.gz", hash = "sha256:4880473a969e5f23f2a2be3646b2dfd84af9028716d398e46192f84bc36900d2"}, + {file = "setuptools-76.0.0-py3-none-any.whl", hash = "sha256:199466a166ff664970d0ee145839f5582cb9bca7a0a3a2e795b6a9cb2308e9c6"}, + {file = "setuptools-76.0.0.tar.gz", hash = "sha256:43b4ee60e10b0d0ee98ad11918e114c70701bc6051662a9a675a0496c1a158f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "snowballstemmer" @@ -815,6 +849,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -826,6 +861,7 @@ version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -862,6 +898,7 @@ version = "3.0.2" description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, @@ -881,6 +918,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -897,6 +935,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -913,6 +952,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -929,6 +969,7 @@ version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" +groups = ["docs"] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -943,6 +984,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["docs"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -957,6 +999,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -973,6 +1016,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -989,6 +1033,7 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" +groups = ["dev", "docs"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -1023,6 +1068,7 @@ files = [ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +markers = {dev = "python_full_version <= \"3.11.0a6\"", docs = "python_version < \"3.11\""} [[package]] name = "typing-extensions" @@ -1030,6 +1076,8 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1041,13 +1089,14 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1058,20 +1107,22 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" +groups = ["dev", "docs"] +markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" -content-hash = "ea903296f015035c594eb8cce08d4dedc716074e33644033938dfdb5f047d72e" +content-hash = "f866b539caf6f0140faba8aa19f4e1fae2013a48fc3346747f104dfe62ef290b" diff --git a/pyproject.toml b/pyproject.toml index 4e056cd7..9d38dc55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.26.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<76.0.0" +setuptools = ">=65.6.3,<77.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" From 89e3cbd4ad7ceab07925bfeb8814d3d1163d810f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Mar 2025 06:47:31 -1000 Subject: [PATCH 382/434] chore(pre-commit.ci): pre-commit autoupdate (#1544) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 265f703e..633a2c35 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.9 + rev: v0.9.10 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 019c641dc22c1fb30f7764525ab9777eaa98b388 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 13 Mar 2025 12:40:28 -1000 Subject: [PATCH 383/434] chore: upgrade to ruff 0.1.0 (#1547) --- .pre-commit-config.yaml | 2 +- pyproject.toml | 9 +--- src/zeroconf/_services/browser.py | 2 +- tests/__init__.py | 1 - tests/benchmarks/test_cache.py | 1 - tests/benchmarks/test_incoming.py | 1 - tests/benchmarks/test_outgoing.py | 1 - tests/benchmarks/test_send.py | 3 +- tests/benchmarks/test_txt_properties.py | 1 - tests/conftest.py | 5 +-- tests/services/test_browser.py | 19 ++++---- tests/services/test_info.py | 33 +++++++------- tests/test_asyncio.py | 59 ++++++++++++------------- tests/test_cache.py | 7 ++- tests/test_circular_imports.py | 2 +- tests/test_core.py | 7 ++- tests/test_dns.py | 1 - tests/test_engine.py | 5 +-- tests/test_handlers.py | 33 +++++++------- tests/test_protocol.py | 1 - tests/test_services.py | 1 - tests/test_updates.py | 1 - tests/utils/test_asyncio.py | 9 ++-- tests/utils/test_name.py | 1 - tests/utils/test_net.py | 1 - 25 files changed, 89 insertions(+), 117 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 633a2c35..a38eaca6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.10 + rev: v0.1.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/pyproject.toml b/pyproject.toml index 9d38dc55..9c92f362 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -94,22 +94,18 @@ ignore = [ "S101", # use of assert "S104", # S104 Possible binding to all interfaces "PLR0912", # too many to fix right now - "TC001", # too many to fix right now "TID252", # skip "PLR0913", # too late to make changes here "PLR0911", # would be breaking change "TRY003", # too many to fix "SLF001", # design choice - "TC003", # too many to fix "PLR2004" , # too many to fix "PGH004", # too many to fix "PGH003", # too many to fix "SIM110", # this is slower - "FURB136", # this is slower for Cython "PYI034", # enable when we drop Py3.10 "PYI032", # breaks Cython "PYI041", # breaks Cython - "FURB188", # usually slower "PERF401", # Cython: closures inside cpdef functions not yet supported ] select = [ @@ -124,7 +120,6 @@ select = [ "I", # isort "RUF", # ruff specific "FLY", # flynt - "FURB", # refurb "G", # flake8-logging-format , "PERF", # Perflint "PGH", # pygrep-hooks @@ -140,7 +135,6 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print - "TC", # flake8-type-checking "TID", # Tidy imports "TRY", # tryceratops ] @@ -171,9 +165,8 @@ select = [ "PLR0913", # skip this one "SIM102" , # too many to fix right now "SIM108", # too many to fix right now - "TC003", # too many to fix right now - "TC002", # too many to fix right now "T201", # too many to fix right now + "PT004", # nice to have ] "bench/**/*" = [ "T201", # intended diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index ab8c050d..6bf3f0f4 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -278,7 +278,7 @@ def generate_service_query( if not qu_question and question_history.suppresses(question, now_millis, known_answers): log.debug("Asking %s was suppressed by the question history", question) continue - if TYPE_CHECKING: + if TYPE_CHECKING: # noqa: SIM108 pointer_known_answers = cast(set[DNSPointer], known_answers) else: pointer_known_answers = known_answers diff --git a/tests/__init__.py b/tests/__init__.py index a70cca60..3df09819 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -29,7 +29,6 @@ from unittest import mock import ifaddr - from zeroconf import DNSIncoming, DNSQuestion, DNSRecord, Zeroconf from zeroconf._history import QuestionHistory diff --git a/tests/benchmarks/test_cache.py b/tests/benchmarks/test_cache.py index 7813f679..e32abda0 100644 --- a/tests/benchmarks/test_cache.py +++ b/tests/benchmarks/test_cache.py @@ -1,7 +1,6 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture - from zeroconf import DNSCache, DNSPointer, current_time_millis from zeroconf.const import _CLASS_IN, _TYPE_PTR diff --git a/tests/benchmarks/test_incoming.py b/tests/benchmarks/test_incoming.py index 6d31e51e..672e5c78 100644 --- a/tests/benchmarks/test_incoming.py +++ b/tests/benchmarks/test_incoming.py @@ -5,7 +5,6 @@ import socket from pytest_codspeed import BenchmarkFixture - from zeroconf import ( DNSAddress, DNSIncoming, diff --git a/tests/benchmarks/test_outgoing.py b/tests/benchmarks/test_outgoing.py index a8db4d6f..cc2f3f42 100644 --- a/tests/benchmarks/test_outgoing.py +++ b/tests/benchmarks/test_outgoing.py @@ -3,7 +3,6 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture - from zeroconf._protocol.outgoing import State from .helpers import generate_packets diff --git a/tests/benchmarks/test_send.py b/tests/benchmarks/test_send.py index 596662a2..d931b48b 100644 --- a/tests/benchmarks/test_send.py +++ b/tests/benchmarks/test_send.py @@ -4,13 +4,12 @@ import pytest from pytest_codspeed import BenchmarkFixture - from zeroconf.asyncio import AsyncZeroconf from .helpers import generate_packets -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_sending_packets(benchmark: BenchmarkFixture) -> None: """Benchmark sending packets.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) diff --git a/tests/benchmarks/test_txt_properties.py b/tests/benchmarks/test_txt_properties.py index 72afa0b6..b7b0e767 100644 --- a/tests/benchmarks/test_txt_properties.py +++ b/tests/benchmarks/test_txt_properties.py @@ -1,7 +1,6 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture - from zeroconf import ServiceInfo info = ServiceInfo( diff --git a/tests/conftest.py b/tests/conftest.py index 531c810b..3d891ec4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,7 +6,6 @@ from unittest.mock import patch import pytest - from zeroconf import _core, const from zeroconf._handlers import query_handler @@ -20,7 +19,7 @@ def verify_threads_ended(): assert not threads -@pytest.fixture +@pytest.fixture() def run_isolated(): """Change the mDNS port to run the test in isolation.""" with ( @@ -31,7 +30,7 @@ def run_isolated(): yield -@pytest.fixture +@pytest.fixture() def disable_duplicate_packet_suppression(): """Disable duplicate packet suppress. diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index 986df64e..f5237365 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -14,7 +14,6 @@ from unittest.mock import patch import pytest - import zeroconf as r import zeroconf._services.browser as _services_browser from zeroconf import ( @@ -556,7 +555,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): zeroconf_browser.close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_asking_default_is_asking_qm_questions_after_the_first_qu(): """Verify the service browser's first questions are QU and refresh queries are QM.""" service_added = asyncio.Event() @@ -658,7 +657,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_ttl_refresh_cancelled_rescue_query(): """Verify seeing a name again cancels the rescue query.""" service_added = asyncio.Event() @@ -768,7 +767,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_asking_qm_questions(): """Verify explicitly asking QM questions.""" type_ = "_quservice._tcp.local." @@ -807,7 +806,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_asking_qu_questions(): """Verify the service browser can ask QU questions.""" type_ = "_quservice._tcp.local." @@ -1139,7 +1138,7 @@ def test_group_ptr_queries_with_known_answers(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_generate_service_query_suppress_duplicate_questions(): """Generate a service query for sending with zeroconf.send.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1192,7 +1191,7 @@ async def test_generate_service_query_suppress_duplicate_questions(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_query_scheduler(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} @@ -1285,7 +1284,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_query_scheduler_rescue_records(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} @@ -1580,7 +1579,7 @@ def test_scheduled_ptr_query_dunder_methods(): assert query75 >= other # type: ignore[operator] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_close_zeroconf_without_browser_before_start_up_queries(): """Test that we stop sending startup queries if zeroconf is closed out from under the browser.""" service_added = asyncio.Event() @@ -1648,7 +1647,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await browser.async_cancel() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_close_zeroconf_without_browser_after_start_up_queries(): """Test that we stop sending rescue queries if zeroconf is closed out from under the browser.""" service_added = asyncio.Event() diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 3d4c5302..8b912bea 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -14,7 +14,6 @@ from unittest.mock import patch import pytest - import zeroconf as r from zeroconf import DNSAddress, RecordUpdate, const from zeroconf._services import info @@ -828,7 +827,7 @@ def test_scoped_addresses_from_cache(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_multiple_a_addresses_newest_address_first(): """Test that info.addresses returns the newest seen address first.""" type_ = "_http._tcp.local." @@ -848,7 +847,7 @@ async def test_multiple_a_addresses_newest_address_first(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_invalid_a_addresses(caplog): type_ = "_http._tcp.local." registration_name = f"multiarec.{type_}" @@ -1057,7 +1056,7 @@ def test_request_timeout(): assert (end_time - start_time) < 3000 + 1000 -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_we_try_four_times_with_random_delay(): """Verify we try four times even with the random delay.""" type_ = "_typethatisnothere._tcp.local." @@ -1080,7 +1079,7 @@ def async_send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): assert request_count == 4 -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_release_wait_when_new_recorded_added(): """Test that async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." @@ -1145,7 +1144,7 @@ async def test_release_wait_when_new_recorded_added(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_port_changes_are_seen(): """Test that port changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1228,7 +1227,7 @@ async def test_port_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_port_changes_are_seen_with_directed_request(): """Test that port changes are seen by async_request with a directed request.""" type_ = "_http._tcp.local." @@ -1311,7 +1310,7 @@ async def test_port_changes_are_seen_with_directed_request(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_ipv4_changes_are_seen(): """Test that ipv4 changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1399,7 +1398,7 @@ async def test_ipv4_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_ipv6_changes_are_seen(): """Test that ipv6 changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1494,7 +1493,7 @@ async def test_ipv6_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_bad_ip_addresses_ignored_in_cache(): """Test that bad ip address in the cache are ignored async_request.""" type_ = "_http._tcp.local." @@ -1548,7 +1547,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_name_change_as_seen_has_ip_in_cache(): """Test that service name changes are seen by async_request when the ip is in the cache.""" type_ = "_http._tcp.local." @@ -1630,7 +1629,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_name_change_as_seen_ip_not_in_cache(): """Test that service name changes are seen by async_request when the ip is not in the cache.""" type_ = "_http._tcp.local." @@ -1712,7 +1711,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() @patch.object(info, "_LISTENER_TIME", 10000000) async def test_release_wait_when_new_recorded_added_concurrency(): """Test that concurrent async_request returns as soon as new matching records are added to the cache.""" @@ -1784,7 +1783,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_info_nsec_records(): """Test we can generate nsec records from ServiceInfo.""" type_ = "_http._tcp.local." @@ -1799,7 +1798,7 @@ async def test_service_info_nsec_records(): assert nsec_record.rdtypes == [const._TYPE_A, const._TYPE_AAAA] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_address_resolver(): """Test that the address resolver works.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1823,7 +1822,7 @@ async def test_address_resolver(): assert resolver.addresses == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_address_resolver_ipv4(): """Test that the IPv4 address resolver works.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1847,7 +1846,7 @@ async def test_address_resolver_ipv4(): assert resolver.addresses == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio +@pytest.mark.asyncio() @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") async def test_address_resolver_ipv6(): diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 40ecf816..e3102507 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -11,7 +11,6 @@ from unittest.mock import ANY, call, patch import pytest - import zeroconf._services.browser as _services_browser from zeroconf import ( DNSAddress, @@ -79,14 +78,14 @@ def verify_threads_ended(): assert not threads -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_basic_usage() -> None: """Test we can create and close the instance.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_close_twice() -> None: """Test we can close twice.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -94,7 +93,7 @@ async def test_async_close_twice() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_with_sync_passed_in() -> None: """Test we can create and close the instance when passing in a sync Zeroconf.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -103,7 +102,7 @@ async def test_async_with_sync_passed_in() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_with_sync_passed_in_closed_in_async() -> None: """Test caller closes the sync version in async.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -113,7 +112,7 @@ async def test_async_with_sync_passed_in_closed_in_async() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_sync_within_event_loop_executor() -> None: """Test sync version still works from an executor within an event loop.""" @@ -125,7 +124,7 @@ def sync_code(): await asyncio.get_event_loop().run_in_executor(None, sync_code) -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration() -> None: """Test registering services broadcasts the registration by default.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -192,7 +191,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_with_server_missing() -> None: """Test registering a service with the server not specified. @@ -259,7 +258,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_same_server_different_ports() -> None: """Test registering services with the same server with different srv records.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -326,7 +325,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_same_server_same_ports() -> None: """Test registering services with the same server with the exact same srv record.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -393,7 +392,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_name_conflict() -> None: """Test registering services throws on name conflict.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -441,7 +440,7 @@ async def test_async_service_registration_name_conflict() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_name_does_not_match_type() -> None: """Test registering services throws when the name does not match the type.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -467,7 +466,7 @@ async def test_async_service_registration_name_does_not_match_type() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_registration_name_strict_check() -> None: """Test registering services throws when the name does not comply.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -502,7 +501,7 @@ async def test_async_service_registration_name_strict_check() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_tasks() -> None: """Test awaiting broadcast tasks""" @@ -568,7 +567,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_wait_unblocks_on_update() -> None: """Test async_wait will unblock on update.""" @@ -604,7 +603,7 @@ async def test_async_wait_unblocks_on_update() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_info_async_request() -> None: """Test registering services broadcasts and query with AsyncServceInfo.async_request.""" if not has_working_ipv6() or os.environ.get("SKIP_IPV6"): @@ -713,7 +712,7 @@ async def test_service_info_async_request() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_service_browser() -> None: """Test AsyncServiceBrowser.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -773,7 +772,7 @@ def update_service(self, aiozc: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_context_manager() -> None: """Test using an async context manager.""" type_ = "_test10-sr-type._tcp.local." @@ -797,7 +796,7 @@ async def test_async_context_manager() -> None: assert aiosinfo is not None -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_browser_cancel_async_context_manager(): """Test we can cancel an AsyncServiceBrowser with it being used as an async context manager.""" @@ -823,7 +822,7 @@ class MyServiceListener(ServiceListener): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_unregister_all_services() -> None: """Test unregistering all services.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -882,7 +881,7 @@ async def test_async_unregister_all_services() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_zeroconf_service_types(): type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" @@ -916,7 +915,7 @@ async def test_async_zeroconf_service_types(): await zeroconf_registrar.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_guard_against_running_serviceinfo_request_event_loop() -> None: """Test that running ServiceInfo.request from the event loop throws.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -927,7 +926,7 @@ async def test_guard_against_running_serviceinfo_request_event_loop() -> None: await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_browser_instantiation_generates_add_events_from_cache(): """Test that the ServiceBrowser will generate Add events with the existing cache when starting.""" @@ -976,7 +975,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_integration(): service_added = asyncio.Event() service_removed = asyncio.Event() @@ -1124,7 +1123,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_info_asking_default_is_asking_qm_questions_after_the_first_qu(): """Verify the service info first question is QU and subsequent ones are QM questions.""" type_ = "_quservice._tcp.local." @@ -1178,7 +1177,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_service_browser_ignores_unrelated_updates(): """Test that the ServiceBrowser ignores unrelated updates.""" @@ -1275,7 +1274,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_request_timeout(): """Test that the timeout does not throw an exception and finishes close to the actual timeout.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1289,7 +1288,7 @@ async def test_async_request_timeout(): assert (end_time - start_time) < 3000 + 1000 -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_request_non_running_instance(): """Test that the async_request throws when zeroconf is not running.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1298,7 +1297,7 @@ async def test_async_request_non_running_instance(): await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_legacy_unicast_response(run_isolated): """Verify legacy unicast responses include questions and correct id.""" type_ = "_mservice._tcp.local." @@ -1339,7 +1338,7 @@ async def test_legacy_unicast_response(run_isolated): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_update_with_uppercase_names(run_isolated): """Test an ip update from a shelly which uses uppercase names.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) diff --git a/tests/test_cache.py b/tests/test_cache.py index 9d55435d..5bd6a869 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -7,7 +7,6 @@ from heapq import heapify, heappop import pytest - import zeroconf as r from zeroconf import const @@ -364,7 +363,7 @@ def test_async_get_unique_returns_newest_record(): assert record is record2 -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_cache_heap_cleanup() -> None: """Test that the heap gets cleaned up when there are many old expirations.""" cache = r.DNSCache() @@ -416,7 +415,7 @@ async def test_cache_heap_cleanup() -> None: assert not cache.async_entries_with_name(name), cache._expire_heap -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_cache_heap_multi_name_cleanup() -> None: """Test cleanup with multiple names.""" cache = r.DNSCache() @@ -452,7 +451,7 @@ async def test_cache_heap_multi_name_cleanup() -> None: assert not cache.async_entries_with_name(name), cache._expire_heap -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_cache_heap_pops_order() -> None: """Test cache heap is popped in order.""" cache = r.DNSCache() diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index 74ed1f12..79d58ae1 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -8,7 +8,7 @@ import pytest -@pytest.mark.asyncio +@pytest.mark.asyncio() @pytest.mark.timeout(30) # cloud can take > 9s @pytest.mark.parametrize( "module", diff --git a/tests/test_core.py b/tests/test_core.py index fcfdf424..1dfb9806 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -15,7 +15,6 @@ from unittest.mock import AsyncMock, Mock, patch import pytest - import zeroconf as r from zeroconf import NotRunningException, Zeroconf, const, current_time_millis from zeroconf._listener import AsyncListener, _WrappedTransport @@ -665,7 +664,7 @@ def test_tc_bit_defers_last_response_missing(): zc.close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_open_close_twice_from_async() -> None: """Test we can close twice from a coroutine when using Zeroconf. @@ -685,7 +684,7 @@ async def test_open_close_twice_from_async() -> None: await asyncio.sleep(0) -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_multiple_sync_instances_stared_from_async_close(): """Test we can shutdown multiple sync instances from async.""" @@ -741,7 +740,7 @@ def _background_register(): bgthread.join() -@pytest.mark.asyncio +@pytest.mark.asyncio() @patch("zeroconf._core._STARTUP_TIMEOUT", 0) @patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): diff --git a/tests/test_dns.py b/tests/test_dns.py index 246c8dcf..5928338c 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -8,7 +8,6 @@ import unittest.mock import pytest - import zeroconf as r from zeroconf import DNSHinfo, DNSText, ServiceInfo, const, current_time_millis from zeroconf._dns import DNSRRSet diff --git a/tests/test_engine.py b/tests/test_engine.py index b7a94c86..5f244804 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -8,7 +8,6 @@ from unittest.mock import patch import pytest - import zeroconf as r from zeroconf import _engine, const from zeroconf.asyncio import AsyncZeroconf @@ -30,7 +29,7 @@ def teardown_module(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_reaper(): with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -65,7 +64,7 @@ async def test_reaper(): assert record_with_1s_ttl not in entries -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_reaper_aborts_when_done(): """Ensure cache cleanup stops when zeroconf is done.""" with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): diff --git a/tests/test_handlers.py b/tests/test_handlers.py index ffa4ff88..58f8ecb1 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -13,7 +13,6 @@ from unittest.mock import patch import pytest - import zeroconf as r from zeroconf import ServiceInfo, Zeroconf, const, current_time_millis from zeroconf._handlers.multicast_outgoing_queue import ( @@ -493,7 +492,7 @@ def test_unicast_response(): zc.close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_probe_answered_immediately(): """Verify probes are responded to immediately.""" # instantiate a zeroconf instance @@ -544,7 +543,7 @@ async def test_probe_answered_immediately(): zc.close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_probe_answered_immediately_with_uppercase_name(): """Verify probes are responded to immediately with an uppercase name.""" # instantiate a zeroconf instance @@ -1092,7 +1091,7 @@ def test_enumeration_query_with_no_registered_services(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_qu_response_only_sends_additionals_if_sends_answer(): """Test that a QU response does not send additionals unless it sends the answer as well.""" # instantiate a zeroconf instance @@ -1258,7 +1257,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_cache_flush_bit(): """Test that the cache flush bit sets the TTL to one for matching records.""" # instantiate a zeroconf instance @@ -1361,7 +1360,7 @@ async def test_cache_flush_bit(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_record_update_manager_add_listener_callsback_existing_records(): """Test that the RecordUpdateManager will callback existing records.""" @@ -1415,7 +1414,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_questions_query_handler_populates_the_question_history_from_qm_questions(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf @@ -1461,7 +1460,7 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_questions_query_handler_does_not_put_qu_questions_in_history(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf @@ -1504,7 +1503,7 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_guard_against_low_ptr_ttl(): """Ensure we enforce a min for PTR record ttls to avoid excessive refresh queries from ServiceBrowsers. @@ -1555,7 +1554,7 @@ async def test_guard_against_low_ptr_ttl(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_duplicate_goodbye_answers_in_packet(): """Ensure we do not throw an exception when there are duplicate goodbye records in a packet.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1587,7 +1586,7 @@ async def test_duplicate_goodbye_answers_in_packet(): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_response_aggregation_timings(run_isolated): """Verify multicast responses are aggregated.""" type_ = "_mservice._tcp.local." @@ -1709,7 +1708,7 @@ async def test_response_aggregation_timings(run_isolated): await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_response_aggregation_timings_multiple(run_isolated, disable_duplicate_packet_suppression): """Verify multicast responses that are aggregated do not take longer than 620ms to send. @@ -1791,7 +1790,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert info2.dns_pointer() in incoming.answers() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_response_aggregation_random_delay(): """Verify the random delay for outgoing multicast will coalesce into a single group @@ -1899,7 +1898,7 @@ async def test_response_aggregation_random_delay(): assert info5.dns_pointer() in outgoing_queue.queue[1].answers -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_future_answers_are_removed_on_send(): """Verify any future answers scheduled to be sent are removed when we send.""" type_ = "_mservice._tcp.local." @@ -1963,7 +1962,7 @@ async def test_future_answers_are_removed_on_send(): assert info2.dns_pointer() in outgoing_queue.queue[0].answers -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_add_listener_warns_when_not_using_record_update_listener(caplog): """Log when a listener is added that is not using RecordUpdateListener as a base class.""" @@ -1988,7 +1987,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_updates_iteration_safe(): """Ensure we can safely iterate over the async_updates.""" @@ -2032,7 +2031,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_updates_complete_iteration_safe(): """Ensure we can safely iterate over the async_updates_complete.""" diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 08d7e600..78fed0e0 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -11,7 +11,6 @@ from typing import cast import pytest - import zeroconf as r from zeroconf import DNSHinfo, DNSIncoming, DNSText, const, current_time_millis diff --git a/tests/test_services.py b/tests/test_services.py index 7d7c3fc7..d192c652 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -11,7 +11,6 @@ from typing import Any import pytest - import zeroconf as r from zeroconf import Zeroconf from zeroconf._services.info import ServiceInfo diff --git a/tests/test_updates.py b/tests/test_updates.py index a057486c..376082e7 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -7,7 +7,6 @@ import time import pytest - import zeroconf as r from zeroconf import Zeroconf, const from zeroconf._record_update import RecordUpdate diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index 7989a82c..4d2ee0ec 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -10,14 +10,13 @@ from unittest.mock import patch import pytest - from zeroconf import EventLoopBlocked from zeroconf._engine import _CLOSE_TIMEOUT from zeroconf._utils import asyncio as aioutils from zeroconf.const import _LOADED_SYSTEM_TIMEOUT -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_async_get_all_tasks() -> None: """Test we can get all tasks in the event loop. @@ -33,7 +32,7 @@ async def test_async_get_all_tasks() -> None: await aioutils._async_get_all_tasks(loop) -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_get_running_loop_from_async() -> None: """Test we can get the event loop.""" assert isinstance(aioutils.get_running_loop(), asyncio.AbstractEventLoop) @@ -44,7 +43,7 @@ def test_get_running_loop_no_loop() -> None: assert aioutils.get_running_loop() is None -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_wait_future_or_timeout_times_out() -> None: """Test wait_future_or_timeout will timeout.""" loop = asyncio.get_running_loop() @@ -118,7 +117,7 @@ def test_cumulative_timeouts_less_than_close_plus_buffer(): ) < 1 + _CLOSE_TIMEOUT + _LOADED_SYSTEM_TIMEOUT -@pytest.mark.asyncio +@pytest.mark.asyncio() async def test_run_coro_with_timeout() -> None: """Test running a coroutine with a timeout raises EventLoopBlocked.""" loop = asyncio.get_event_loop() diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index 1feb7713..3b70c7d4 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -5,7 +5,6 @@ import socket import pytest - from zeroconf import BadTypeInNameException from zeroconf._services.info import ServiceInfo, instance_name_from_service_info from zeroconf._utils import name as nameutils diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index f763b655..17ff6196 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -10,7 +10,6 @@ import ifaddr import pytest - import zeroconf as r from zeroconf._utils import net as netutils From 806e3678c0a6552f9b2f43d38eb673d509006d51 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 13 Mar 2025 13:11:53 -1000 Subject: [PATCH 384/434] chore: update deps (#1548) dependabot is still having issues so lets do this manually for now - Updating certifi (2024.12.14 -> 2025.1.31) - Updating jinja2 (3.1.5 -> 3.1.6) - Updating babel (2.16.0 -> 2.17.0) - Updating coverage (7.6.10 -> 7.6.12) - Updating pytest (8.3.4 -> 8.3.5) - Updating cython (3.0.11 -> 3.0.12) - Updating setuptools (75.8.0 -> 76.0.0) --- poetry.lock | 177 ++++++++++++++++++++++++++-------------------------- 1 file changed, 89 insertions(+), 88 deletions(-) diff --git a/poetry.lock b/poetry.lock index 75863563..8c4713e8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -14,29 +14,29 @@ files = [ [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "certifi" -version = "2024.12.14" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["docs"] files = [ - {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, - {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -236,81 +236,82 @@ files = [ [[package]] name = "coverage" -version = "7.6.10" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "cython" @@ -455,27 +456,27 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.5.0" +version = "8.6.1" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev", "docs"] markers = "python_version < \"3.10\"" files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -492,14 +493,14 @@ files = [ [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["docs"] files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -835,13 +836,13 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] -core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "snowballstemmer" @@ -1096,7 +1097,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1115,11 +1116,11 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] From 5915e5b417be7443e98e869f4fc9ba1ae68414d8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 11:01:55 -1000 Subject: [PATCH 385/434] chore(pre-commit.ci): pre-commit autoupdate (#1549) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(pre-commit.ci): pre-commit autoupdate updates: - [github.com/astral-sh/ruff-pre-commit: v0.1.0 → v0.11.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.1.0...v0.11.0) * chore(pre-commit.ci): auto fixes * chore: fix violations --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 2 +- build_ext.py | 2 +- src/zeroconf/_services/browser.py | 2 +- tests/__init__.py | 1 + tests/benchmarks/test_cache.py | 1 + tests/benchmarks/test_incoming.py | 1 + tests/benchmarks/test_outgoing.py | 1 + tests/benchmarks/test_send.py | 3 +- tests/benchmarks/test_txt_properties.py | 1 + tests/conftest.py | 5 ++- tests/services/test_browser.py | 21 ++++----- tests/services/test_info.py | 33 +++++++------- tests/test_asyncio.py | 59 +++++++++++++------------ tests/test_cache.py | 7 +-- tests/test_circular_imports.py | 2 +- tests/test_core.py | 7 +-- tests/test_dns.py | 1 + tests/test_engine.py | 5 ++- tests/test_handlers.py | 33 +++++++------- tests/test_protocol.py | 1 + tests/test_services.py | 1 + tests/test_updates.py | 3 +- tests/utils/test_asyncio.py | 9 ++-- tests/utils/test_name.py | 1 + tests/utils/test_net.py | 1 + 25 files changed, 112 insertions(+), 91 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a38eaca6..5d03fcde 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.0 + rev: v0.11.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/build_ext.py b/build_ext.py index 7faa607f..ff088f83 100644 --- a/build_ext.py +++ b/build_ext.py @@ -53,7 +53,7 @@ def build_extensions(self) -> None: def build(setup_kwargs: Any) -> None: - if os.environ.get("SKIP_CYTHON", False): + if os.environ.get("SKIP_CYTHON"): return try: from Cython.Build import cythonize diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 6bf3f0f4..ab8c050d 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -278,7 +278,7 @@ def generate_service_query( if not qu_question and question_history.suppresses(question, now_millis, known_answers): log.debug("Asking %s was suppressed by the question history", question) continue - if TYPE_CHECKING: # noqa: SIM108 + if TYPE_CHECKING: pointer_known_answers = cast(set[DNSPointer], known_answers) else: pointer_known_answers = known_answers diff --git a/tests/__init__.py b/tests/__init__.py index 3df09819..a70cca60 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -29,6 +29,7 @@ from unittest import mock import ifaddr + from zeroconf import DNSIncoming, DNSQuestion, DNSRecord, Zeroconf from zeroconf._history import QuestionHistory diff --git a/tests/benchmarks/test_cache.py b/tests/benchmarks/test_cache.py index e32abda0..7813f679 100644 --- a/tests/benchmarks/test_cache.py +++ b/tests/benchmarks/test_cache.py @@ -1,6 +1,7 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture + from zeroconf import DNSCache, DNSPointer, current_time_millis from zeroconf.const import _CLASS_IN, _TYPE_PTR diff --git a/tests/benchmarks/test_incoming.py b/tests/benchmarks/test_incoming.py index 672e5c78..6d31e51e 100644 --- a/tests/benchmarks/test_incoming.py +++ b/tests/benchmarks/test_incoming.py @@ -5,6 +5,7 @@ import socket from pytest_codspeed import BenchmarkFixture + from zeroconf import ( DNSAddress, DNSIncoming, diff --git a/tests/benchmarks/test_outgoing.py b/tests/benchmarks/test_outgoing.py index cc2f3f42..a8db4d6f 100644 --- a/tests/benchmarks/test_outgoing.py +++ b/tests/benchmarks/test_outgoing.py @@ -3,6 +3,7 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture + from zeroconf._protocol.outgoing import State from .helpers import generate_packets diff --git a/tests/benchmarks/test_send.py b/tests/benchmarks/test_send.py index d931b48b..596662a2 100644 --- a/tests/benchmarks/test_send.py +++ b/tests/benchmarks/test_send.py @@ -4,12 +4,13 @@ import pytest from pytest_codspeed import BenchmarkFixture + from zeroconf.asyncio import AsyncZeroconf from .helpers import generate_packets -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_sending_packets(benchmark: BenchmarkFixture) -> None: """Benchmark sending packets.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) diff --git a/tests/benchmarks/test_txt_properties.py b/tests/benchmarks/test_txt_properties.py index b7b0e767..72afa0b6 100644 --- a/tests/benchmarks/test_txt_properties.py +++ b/tests/benchmarks/test_txt_properties.py @@ -1,6 +1,7 @@ from __future__ import annotations from pytest_codspeed import BenchmarkFixture + from zeroconf import ServiceInfo info = ServiceInfo( diff --git a/tests/conftest.py b/tests/conftest.py index 3d891ec4..531c810b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,6 +6,7 @@ from unittest.mock import patch import pytest + from zeroconf import _core, const from zeroconf._handlers import query_handler @@ -19,7 +20,7 @@ def verify_threads_ended(): assert not threads -@pytest.fixture() +@pytest.fixture def run_isolated(): """Change the mDNS port to run the test in isolation.""" with ( @@ -30,7 +31,7 @@ def run_isolated(): yield -@pytest.fixture() +@pytest.fixture def disable_duplicate_packet_suppression(): """Disable duplicate packet suppress. diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index f5237365..d57568f4 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -14,6 +14,7 @@ from unittest.mock import patch import pytest + import zeroconf as r import zeroconf._services.browser as _services_browser from zeroconf import ( @@ -555,7 +556,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): zeroconf_browser.close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_asking_default_is_asking_qm_questions_after_the_first_qu(): """Verify the service browser's first questions are QU and refresh queries are QM.""" service_added = asyncio.Event() @@ -657,7 +658,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_ttl_refresh_cancelled_rescue_query(): """Verify seeing a name again cancels the rescue query.""" service_added = asyncio.Event() @@ -767,7 +768,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_asking_qm_questions(): """Verify explicitly asking QM questions.""" type_ = "_quservice._tcp.local." @@ -806,7 +807,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_asking_qu_questions(): """Verify the service browser can ask QU questions.""" type_ = "_quservice._tcp.local." @@ -898,7 +899,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() - assert len(updates) + assert updates assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 zc.remove_listener(listener) @@ -1138,7 +1139,7 @@ def test_group_ptr_queries_with_known_answers(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_generate_service_query_suppress_duplicate_questions(): """Generate a service query for sending with zeroconf.send.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1191,7 +1192,7 @@ async def test_generate_service_query_suppress_duplicate_questions(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_query_scheduler(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} @@ -1284,7 +1285,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_query_scheduler_rescue_records(): delay = const._BROWSER_TIME types_ = {"_hap._tcp.local.", "_http._tcp.local."} @@ -1579,7 +1580,7 @@ def test_scheduled_ptr_query_dunder_methods(): assert query75 >= other # type: ignore[operator] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_close_zeroconf_without_browser_before_start_up_queries(): """Test that we stop sending startup queries if zeroconf is closed out from under the browser.""" service_added = asyncio.Event() @@ -1647,7 +1648,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await browser.async_cancel() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_close_zeroconf_without_browser_after_start_up_queries(): """Test that we stop sending rescue queries if zeroconf is closed out from under the browser.""" service_added = asyncio.Event() diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 8b912bea..3d4c5302 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -14,6 +14,7 @@ from unittest.mock import patch import pytest + import zeroconf as r from zeroconf import DNSAddress, RecordUpdate, const from zeroconf._services import info @@ -827,7 +828,7 @@ def test_scoped_addresses_from_cache(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_multiple_a_addresses_newest_address_first(): """Test that info.addresses returns the newest seen address first.""" type_ = "_http._tcp.local." @@ -847,7 +848,7 @@ async def test_multiple_a_addresses_newest_address_first(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_invalid_a_addresses(caplog): type_ = "_http._tcp.local." registration_name = f"multiarec.{type_}" @@ -1056,7 +1057,7 @@ def test_request_timeout(): assert (end_time - start_time) < 3000 + 1000 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_we_try_four_times_with_random_delay(): """Verify we try four times even with the random delay.""" type_ = "_typethatisnothere._tcp.local." @@ -1079,7 +1080,7 @@ def async_send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): assert request_count == 4 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_release_wait_when_new_recorded_added(): """Test that async_request returns as soon as new matching records are added to the cache.""" type_ = "_http._tcp.local." @@ -1144,7 +1145,7 @@ async def test_release_wait_when_new_recorded_added(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_port_changes_are_seen(): """Test that port changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1227,7 +1228,7 @@ async def test_port_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_port_changes_are_seen_with_directed_request(): """Test that port changes are seen by async_request with a directed request.""" type_ = "_http._tcp.local." @@ -1310,7 +1311,7 @@ async def test_port_changes_are_seen_with_directed_request(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_ipv4_changes_are_seen(): """Test that ipv4 changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1398,7 +1399,7 @@ async def test_ipv4_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_ipv6_changes_are_seen(): """Test that ipv6 changes are seen by async_request.""" type_ = "_http._tcp.local." @@ -1493,7 +1494,7 @@ async def test_ipv6_changes_are_seen(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_bad_ip_addresses_ignored_in_cache(): """Test that bad ip address in the cache are ignored async_request.""" type_ = "_http._tcp.local." @@ -1547,7 +1548,7 @@ async def test_bad_ip_addresses_ignored_in_cache(): assert info.addresses_by_version(IPVersion.V4Only) == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_name_change_as_seen_has_ip_in_cache(): """Test that service name changes are seen by async_request when the ip is in the cache.""" type_ = "_http._tcp.local." @@ -1629,7 +1630,7 @@ async def test_service_name_change_as_seen_has_ip_in_cache(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_name_change_as_seen_ip_not_in_cache(): """Test that service name changes are seen by async_request when the ip is not in the cache.""" type_ = "_http._tcp.local." @@ -1711,7 +1712,7 @@ async def test_service_name_change_as_seen_ip_not_in_cache(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio @patch.object(info, "_LISTENER_TIME", 10000000) async def test_release_wait_when_new_recorded_added_concurrency(): """Test that concurrent async_request returns as soon as new matching records are added to the cache.""" @@ -1783,7 +1784,7 @@ async def test_release_wait_when_new_recorded_added_concurrency(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_info_nsec_records(): """Test we can generate nsec records from ServiceInfo.""" type_ = "_http._tcp.local." @@ -1798,7 +1799,7 @@ async def test_service_info_nsec_records(): assert nsec_record.rdtypes == [const._TYPE_A, const._TYPE_AAAA] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_address_resolver(): """Test that the address resolver works.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1822,7 +1823,7 @@ async def test_address_resolver(): assert resolver.addresses == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_address_resolver_ipv4(): """Test that the IPv4 address resolver works.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1846,7 +1847,7 @@ async def test_address_resolver_ipv4(): assert resolver.addresses == [b"\x7f\x00\x00\x01"] -@pytest.mark.asyncio() +@pytest.mark.asyncio @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") async def test_address_resolver_ipv6(): diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index e3102507..40ecf816 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -11,6 +11,7 @@ from unittest.mock import ANY, call, patch import pytest + import zeroconf._services.browser as _services_browser from zeroconf import ( DNSAddress, @@ -78,14 +79,14 @@ def verify_threads_ended(): assert not threads -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_basic_usage() -> None: """Test we can create and close the instance.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_close_twice() -> None: """Test we can close twice.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -93,7 +94,7 @@ async def test_async_close_twice() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_with_sync_passed_in() -> None: """Test we can create and close the instance when passing in a sync Zeroconf.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -102,7 +103,7 @@ async def test_async_with_sync_passed_in() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_with_sync_passed_in_closed_in_async() -> None: """Test caller closes the sync version in async.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -112,7 +113,7 @@ async def test_async_with_sync_passed_in_closed_in_async() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_sync_within_event_loop_executor() -> None: """Test sync version still works from an executor within an event loop.""" @@ -124,7 +125,7 @@ def sync_code(): await asyncio.get_event_loop().run_in_executor(None, sync_code) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration() -> None: """Test registering services broadcasts the registration by default.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -191,7 +192,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_with_server_missing() -> None: """Test registering a service with the server not specified. @@ -258,7 +259,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_same_server_different_ports() -> None: """Test registering services with the same server with different srv records.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -325,7 +326,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_same_server_same_ports() -> None: """Test registering services with the same server with the exact same srv record.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -392,7 +393,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_name_conflict() -> None: """Test registering services throws on name conflict.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -440,7 +441,7 @@ async def test_async_service_registration_name_conflict() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_name_does_not_match_type() -> None: """Test registering services throws when the name does not match the type.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -466,7 +467,7 @@ async def test_async_service_registration_name_does_not_match_type() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_registration_name_strict_check() -> None: """Test registering services throws when the name does not comply.""" zc = Zeroconf(interfaces=["127.0.0.1"]) @@ -501,7 +502,7 @@ async def test_async_service_registration_name_strict_check() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_tasks() -> None: """Test awaiting broadcast tasks""" @@ -567,7 +568,7 @@ def update_service(self, zeroconf: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_wait_unblocks_on_update() -> None: """Test async_wait will unblock on update.""" @@ -603,7 +604,7 @@ async def test_async_wait_unblocks_on_update() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_info_async_request() -> None: """Test registering services broadcasts and query with AsyncServceInfo.async_request.""" if not has_working_ipv6() or os.environ.get("SKIP_IPV6"): @@ -712,7 +713,7 @@ async def test_service_info_async_request() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_service_browser() -> None: """Test AsyncServiceBrowser.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -772,7 +773,7 @@ def update_service(self, aiozc: Zeroconf, type: str, name: str) -> None: ] -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_context_manager() -> None: """Test using an async context manager.""" type_ = "_test10-sr-type._tcp.local." @@ -796,7 +797,7 @@ async def test_async_context_manager() -> None: assert aiosinfo is not None -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_browser_cancel_async_context_manager(): """Test we can cancel an AsyncServiceBrowser with it being used as an async context manager.""" @@ -822,7 +823,7 @@ class MyServiceListener(ServiceListener): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_unregister_all_services() -> None: """Test unregistering all services.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -881,7 +882,7 @@ async def test_async_unregister_all_services() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_zeroconf_service_types(): type_ = "_test-srvc-type._tcp.local." name = "xxxyyy" @@ -915,7 +916,7 @@ async def test_async_zeroconf_service_types(): await zeroconf_registrar.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_guard_against_running_serviceinfo_request_event_loop() -> None: """Test that running ServiceInfo.request from the event loop throws.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -926,7 +927,7 @@ async def test_guard_against_running_serviceinfo_request_event_loop() -> None: await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_browser_instantiation_generates_add_events_from_cache(): """Test that the ServiceBrowser will generate Add events with the existing cache when starting.""" @@ -975,7 +976,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_integration(): service_added = asyncio.Event() service_removed = asyncio.Event() @@ -1123,7 +1124,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT, v6_flow_scope=()): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_info_asking_default_is_asking_qm_questions_after_the_first_qu(): """Verify the service info first question is QU and subsequent ones are QM questions.""" type_ = "_quservice._tcp.local." @@ -1177,7 +1178,7 @@ def send(out, addr=const._MDNS_ADDR, port=const._MDNS_PORT): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_service_browser_ignores_unrelated_updates(): """Test that the ServiceBrowser ignores unrelated updates.""" @@ -1274,7 +1275,7 @@ def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-de await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_request_timeout(): """Test that the timeout does not throw an exception and finishes close to the actual timeout.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1288,7 +1289,7 @@ async def test_async_request_timeout(): assert (end_time - start_time) < 3000 + 1000 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_request_non_running_instance(): """Test that the async_request throws when zeroconf is not running.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1297,7 +1298,7 @@ async def test_async_request_non_running_instance(): await aiozc.async_get_service_info("_notfound.local.", "notthere._notfound.local.") -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_legacy_unicast_response(run_isolated): """Verify legacy unicast responses include questions and correct id.""" type_ = "_mservice._tcp.local." @@ -1338,7 +1339,7 @@ async def test_legacy_unicast_response(run_isolated): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_update_with_uppercase_names(run_isolated): """Test an ip update from a shelly which uses uppercase names.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) diff --git a/tests/test_cache.py b/tests/test_cache.py index 5bd6a869..9d55435d 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -7,6 +7,7 @@ from heapq import heapify, heappop import pytest + import zeroconf as r from zeroconf import const @@ -363,7 +364,7 @@ def test_async_get_unique_returns_newest_record(): assert record is record2 -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_cache_heap_cleanup() -> None: """Test that the heap gets cleaned up when there are many old expirations.""" cache = r.DNSCache() @@ -415,7 +416,7 @@ async def test_cache_heap_cleanup() -> None: assert not cache.async_entries_with_name(name), cache._expire_heap -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_cache_heap_multi_name_cleanup() -> None: """Test cleanup with multiple names.""" cache = r.DNSCache() @@ -451,7 +452,7 @@ async def test_cache_heap_multi_name_cleanup() -> None: assert not cache.async_entries_with_name(name), cache._expire_heap -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_cache_heap_pops_order() -> None: """Test cache heap is popped in order.""" cache = r.DNSCache() diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index 79d58ae1..74ed1f12 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -8,7 +8,7 @@ import pytest -@pytest.mark.asyncio() +@pytest.mark.asyncio @pytest.mark.timeout(30) # cloud can take > 9s @pytest.mark.parametrize( "module", diff --git a/tests/test_core.py b/tests/test_core.py index 1dfb9806..fcfdf424 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -15,6 +15,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest + import zeroconf as r from zeroconf import NotRunningException, Zeroconf, const, current_time_millis from zeroconf._listener import AsyncListener, _WrappedTransport @@ -664,7 +665,7 @@ def test_tc_bit_defers_last_response_missing(): zc.close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_open_close_twice_from_async() -> None: """Test we can close twice from a coroutine when using Zeroconf. @@ -684,7 +685,7 @@ async def test_open_close_twice_from_async() -> None: await asyncio.sleep(0) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_multiple_sync_instances_stared_from_async_close(): """Test we can shutdown multiple sync instances from async.""" @@ -740,7 +741,7 @@ def _background_register(): bgthread.join() -@pytest.mark.asyncio() +@pytest.mark.asyncio @patch("zeroconf._core._STARTUP_TIMEOUT", 0) @patch("zeroconf._core.AsyncEngine._async_setup", new_callable=AsyncMock) async def test_event_loop_blocked(mock_start): diff --git a/tests/test_dns.py b/tests/test_dns.py index 5928338c..246c8dcf 100644 --- a/tests/test_dns.py +++ b/tests/test_dns.py @@ -8,6 +8,7 @@ import unittest.mock import pytest + import zeroconf as r from zeroconf import DNSHinfo, DNSText, ServiceInfo, const, current_time_millis from zeroconf._dns import DNSRRSet diff --git a/tests/test_engine.py b/tests/test_engine.py index 5f244804..b7a94c86 100644 --- a/tests/test_engine.py +++ b/tests/test_engine.py @@ -8,6 +8,7 @@ from unittest.mock import patch import pytest + import zeroconf as r from zeroconf import _engine, const from zeroconf.asyncio import AsyncZeroconf @@ -29,7 +30,7 @@ def teardown_module(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_reaper(): with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -64,7 +65,7 @@ async def test_reaper(): assert record_with_1s_ttl not in entries -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_reaper_aborts_when_done(): """Ensure cache cleanup stops when zeroconf is done.""" with patch.object(_engine, "_CACHE_CLEANUP_INTERVAL", 0.01): diff --git a/tests/test_handlers.py b/tests/test_handlers.py index 58f8ecb1..ffa4ff88 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -13,6 +13,7 @@ from unittest.mock import patch import pytest + import zeroconf as r from zeroconf import ServiceInfo, Zeroconf, const, current_time_millis from zeroconf._handlers.multicast_outgoing_queue import ( @@ -492,7 +493,7 @@ def test_unicast_response(): zc.close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_probe_answered_immediately(): """Verify probes are responded to immediately.""" # instantiate a zeroconf instance @@ -543,7 +544,7 @@ async def test_probe_answered_immediately(): zc.close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_probe_answered_immediately_with_uppercase_name(): """Verify probes are responded to immediately with an uppercase name.""" # instantiate a zeroconf instance @@ -1091,7 +1092,7 @@ def test_enumeration_query_with_no_registered_services(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_qu_response_only_sends_additionals_if_sends_answer(): """Test that a QU response does not send additionals unless it sends the answer as well.""" # instantiate a zeroconf instance @@ -1257,7 +1258,7 @@ async def test_qu_response_only_sends_additionals_if_sends_answer(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_cache_flush_bit(): """Test that the cache flush bit sets the TTL to one for matching records.""" # instantiate a zeroconf instance @@ -1360,7 +1361,7 @@ async def test_cache_flush_bit(): # This test uses asyncio because it needs to access the cache directly # which is not threadsafe -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_record_update_manager_add_listener_callsback_existing_records(): """Test that the RecordUpdateManager will callback existing records.""" @@ -1414,7 +1415,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_questions_query_handler_populates_the_question_history_from_qm_questions(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf @@ -1460,7 +1461,7 @@ async def test_questions_query_handler_populates_the_question_history_from_qm_qu await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_questions_query_handler_does_not_put_qu_questions_in_history(): aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) zc = aiozc.zeroconf @@ -1503,7 +1504,7 @@ async def test_questions_query_handler_does_not_put_qu_questions_in_history(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_guard_against_low_ptr_ttl(): """Ensure we enforce a min for PTR record ttls to avoid excessive refresh queries from ServiceBrowsers. @@ -1554,7 +1555,7 @@ async def test_guard_against_low_ptr_ttl(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_duplicate_goodbye_answers_in_packet(): """Ensure we do not throw an exception when there are duplicate goodbye records in a packet.""" aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) @@ -1586,7 +1587,7 @@ async def test_duplicate_goodbye_answers_in_packet(): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_response_aggregation_timings(run_isolated): """Verify multicast responses are aggregated.""" type_ = "_mservice._tcp.local." @@ -1708,7 +1709,7 @@ async def test_response_aggregation_timings(run_isolated): await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_response_aggregation_timings_multiple(run_isolated, disable_duplicate_packet_suppression): """Verify multicast responses that are aggregated do not take longer than 620ms to send. @@ -1790,7 +1791,7 @@ async def test_response_aggregation_timings_multiple(run_isolated, disable_dupli assert info2.dns_pointer() in incoming.answers() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_response_aggregation_random_delay(): """Verify the random delay for outgoing multicast will coalesce into a single group @@ -1898,7 +1899,7 @@ async def test_response_aggregation_random_delay(): assert info5.dns_pointer() in outgoing_queue.queue[1].answers -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_future_answers_are_removed_on_send(): """Verify any future answers scheduled to be sent are removed when we send.""" type_ = "_mservice._tcp.local." @@ -1962,7 +1963,7 @@ async def test_future_answers_are_removed_on_send(): assert info2.dns_pointer() in outgoing_queue.queue[0].answers -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_add_listener_warns_when_not_using_record_update_listener(caplog): """Log when a listener is added that is not using RecordUpdateListener as a base class.""" @@ -1987,7 +1988,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_updates_iteration_safe(): """Ensure we can safely iterate over the async_updates.""" @@ -2031,7 +2032,7 @@ def async_update_records(self, zc: Zeroconf, now: float, records: list[r.RecordU await aiozc.async_close() -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_updates_complete_iteration_safe(): """Ensure we can safely iterate over the async_updates_complete.""" diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 78fed0e0..08d7e600 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -11,6 +11,7 @@ from typing import cast import pytest + import zeroconf as r from zeroconf import DNSHinfo, DNSIncoming, DNSText, const, current_time_millis diff --git a/tests/test_services.py b/tests/test_services.py index d192c652..7d7c3fc7 100644 --- a/tests/test_services.py +++ b/tests/test_services.py @@ -11,6 +11,7 @@ from typing import Any import pytest + import zeroconf as r from zeroconf import Zeroconf from zeroconf._services.info import ServiceInfo diff --git a/tests/test_updates.py b/tests/test_updates.py index 376082e7..ec1296f7 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -7,6 +7,7 @@ import time import pytest + import zeroconf as r from zeroconf import Zeroconf, const from zeroconf._record_update import RecordUpdate @@ -80,7 +81,7 @@ def on_service_state_change(zeroconf, service_type, state_change, name): browser.cancel() - assert len(updates) + assert updates assert len([isinstance(update, r.DNSPointer) and update.name == type_ for update in updates]) >= 1 zc.remove_listener(listener) diff --git a/tests/utils/test_asyncio.py b/tests/utils/test_asyncio.py index 4d2ee0ec..7989a82c 100644 --- a/tests/utils/test_asyncio.py +++ b/tests/utils/test_asyncio.py @@ -10,13 +10,14 @@ from unittest.mock import patch import pytest + from zeroconf import EventLoopBlocked from zeroconf._engine import _CLOSE_TIMEOUT from zeroconf._utils import asyncio as aioutils from zeroconf.const import _LOADED_SYSTEM_TIMEOUT -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_async_get_all_tasks() -> None: """Test we can get all tasks in the event loop. @@ -32,7 +33,7 @@ async def test_async_get_all_tasks() -> None: await aioutils._async_get_all_tasks(loop) -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_get_running_loop_from_async() -> None: """Test we can get the event loop.""" assert isinstance(aioutils.get_running_loop(), asyncio.AbstractEventLoop) @@ -43,7 +44,7 @@ def test_get_running_loop_no_loop() -> None: assert aioutils.get_running_loop() is None -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_wait_future_or_timeout_times_out() -> None: """Test wait_future_or_timeout will timeout.""" loop = asyncio.get_running_loop() @@ -117,7 +118,7 @@ def test_cumulative_timeouts_less_than_close_plus_buffer(): ) < 1 + _CLOSE_TIMEOUT + _LOADED_SYSTEM_TIMEOUT -@pytest.mark.asyncio() +@pytest.mark.asyncio async def test_run_coro_with_timeout() -> None: """Test running a coroutine with a timeout raises EventLoopBlocked.""" loop = asyncio.get_event_loop() diff --git a/tests/utils/test_name.py b/tests/utils/test_name.py index 3b70c7d4..1feb7713 100644 --- a/tests/utils/test_name.py +++ b/tests/utils/test_name.py @@ -5,6 +5,7 @@ import socket import pytest + from zeroconf import BadTypeInNameException from zeroconf._services.info import ServiceInfo, instance_name_from_service_info from zeroconf._utils import name as nameutils diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 17ff6196..f763b655 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -10,6 +10,7 @@ import ifaddr import pytest + import zeroconf as r from zeroconf._utils import net as netutils From 33bf0e4ef2e3468b7c9df1a53709ea0d9e35f32c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 23 Mar 2025 19:13:09 -1000 Subject: [PATCH 386/434] chore(deps-dev): bump setuptools from 76.0.0 to 77.0.3 (#1550) --- poetry.lock | 32 ++++++++++++++++---------------- pyproject.toml | 2 +- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8c4713e8..ec600ab6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -25,7 +25,7 @@ files = [ ] [package.extras] -dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "certifi" @@ -311,7 +311,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cython" @@ -471,12 +471,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -825,24 +825,24 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "76.0.0" +version = "77.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-76.0.0-py3-none-any.whl", hash = "sha256:199466a166ff664970d0ee145839f5582cb9bca7a0a3a2e795b6a9cb2308e9c6"}, - {file = "setuptools-76.0.0.tar.gz", hash = "sha256:43b4ee60e10b0d0ee98ad11918e114c70701bc6051662a9a675a0496c1a158f4"}, + {file = "setuptools-77.0.3-py3-none-any.whl", hash = "sha256:67122e78221da5cf550ddd04cf8742c8fe12094483749a792d56cd669d6cf58c"}, + {file = "setuptools-77.0.3.tar.gz", hash = "sha256:583b361c8da8de57403743e756609670de6fb2345920e36dc5c2d914c319c945"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "snowballstemmer" @@ -1097,7 +1097,7 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -1116,14 +1116,14 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "f866b539caf6f0140faba8aa19f4e1fae2013a48fc3346747f104dfe62ef290b" +content-hash = "6185b531e93844e1dbd399c197c9376fc7d2efa2cbff6bdb7585484dc6dbfb86" diff --git a/pyproject.toml b/pyproject.toml index 9c92f362..198605f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.26.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<77.0.0" +setuptools = ">=65.6.3,<78.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" From f05b0127774ac69db5a6a7ba02ecdf57e46b4f9b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 24 Mar 2025 10:02:29 -1000 Subject: [PATCH 387/434] chore(pre-commit.ci): pre-commit autoupdate (#1551) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.11.0 → v0.11.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.0...v0.11.2) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d03fcde..cf19bfa2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.0 + rev: v0.11.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 741b6ef3639334cb558b16ce568b33bf308e6688 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 19:10:38 -1000 Subject: [PATCH 388/434] chore(deps-dev): bump setuptools from 77.0.3 to 78.1.0 (#1552) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ec600ab6..b16f7e87 100644 --- a/poetry.lock +++ b/poetry.lock @@ -825,14 +825,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "77.0.3" +version = "78.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-77.0.3-py3-none-any.whl", hash = "sha256:67122e78221da5cf550ddd04cf8742c8fe12094483749a792d56cd669d6cf58c"}, - {file = "setuptools-77.0.3.tar.gz", hash = "sha256:583b361c8da8de57403743e756609670de6fb2345920e36dc5c2d914c319c945"}, + {file = "setuptools-78.1.0-py3-none-any.whl", hash = "sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8"}, + {file = "setuptools-78.1.0.tar.gz", hash = "sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54"}, ] [package.extras] @@ -1126,4 +1126,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "6185b531e93844e1dbd399c197c9376fc7d2efa2cbff6bdb7585484dc6dbfb86" +content-hash = "94e87573380ca1c563c3af5fbd6363399a4c333c9f697c1b2191835714d1ffaa" diff --git a/pyproject.toml b/pyproject.toml index 198605f9..608b849b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.26.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<78.0.0" +setuptools = ">=65.6.3,<79.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" From 0fe79d7a53789719225509bce8c124950aed6237 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 30 Mar 2025 19:21:52 -1000 Subject: [PATCH 389/434] chore(deps-dev): bump pytest-asyncio from 0.25.3 to 0.26.0 (#1553) Bumps [pytest-asyncio](https://github.com/pytest-dev/pytest-asyncio) from 0.25.3 to 0.26.0. - [Release notes](https://github.com/pytest-dev/pytest-asyncio/releases) - [Commits](https://github.com/pytest-dev/pytest-asyncio/compare/v0.25.3...v0.26.0) --- updated-dependencies: - dependency-name: pytest-asyncio dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 9 +++++---- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index b16f7e87..845974d6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -697,18 +697,19 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments [[package]] name = "pytest-asyncio" -version = "0.25.3" +version = "0.26.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, - {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, + {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, + {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, ] [package.dependencies] pytest = ">=8.2,<9" +typing-extensions = {version = ">=4.12", markers = "python_version < \"3.10\""} [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] @@ -1126,4 +1127,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "94e87573380ca1c563c3af5fbd6363399a4c333c9f697c1b2191835714d1ffaa" +content-hash = "e3c96e694e9c149b96323081d51675d7a9d5ad8243f4338ff149e643a65417cb" diff --git a/pyproject.toml b/pyproject.toml index 608b849b..569fe977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" -pytest-asyncio = ">=0.20.3,<0.26.0" +pytest-asyncio = ">=0.20.3,<0.27.0" cython = "^3.0.5" setuptools = ">=65.6.3,<79.0.0" pytest-timeout = "^2.1.0" From 34043735e13ba254cb5e31e03b6d672447ba6e57 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 30 Mar 2025 19:29:47 -1000 Subject: [PATCH 390/434] chore: pin GitHub actions to SHAs to mitigate supply chain attacks (#1554) * chore: pin GitHub actions to SHAs to mitigate supply chain attacks * chore: pin GitHub actions to SHAs to mitigate supply chain attacks --- .github/workflows/ci.yml | 52 +++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2fb9b06f..b61e5e45 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,11 +14,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 with: python-version: "3.12" - - uses: pre-commit/action@v3.0.1 + - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 # Make sure commit messages follow the conventional commits convention: # https://www.conventionalcommits.org @@ -26,10 +26,10 @@ jobs: name: Lint Commit Messages runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 - - uses: wagoid/commitlint-github-action@v6 + - uses: wagoid/commitlint-github-action@b948419dd99f3fd78a6548d48f94e3df7f6bf3ed # v6 test: strategy: @@ -65,11 +65,11 @@ jobs: python-version: "pypy-3.10" runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 with: python-version: ${{ matrix.python-version }} cache: "poetry" @@ -87,25 +87,25 @@ jobs: - name: Test with Pytest run: poetry run pytest --durations=20 --timeout=60 -v --cov=zeroconf --cov-branch --cov-report xml --cov-report html --cov-report term-missing tests - name: Upload coverage to Codecov - uses: codecov/codecov-action@v5 + uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5 with: token: ${{ secrets.CODECOV_TOKEN }} benchmark: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Setup Python 3.13 - uses: actions/setup-python@v5 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 with: python-version: 3.13 - - uses: snok/install-poetry@v1.4.1 + - uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a # v1.4.1 - name: Install Dependencies run: | REQUIRE_CYTHON=1 poetry install --only=main,dev shell: bash - name: Run benchmarks - uses: CodSpeedHQ/action@v3 + uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3 with: token: ${{ secrets.CODSPEED_TOKEN }} run: poetry run pytest --no-cov -vvvvv --codspeed tests/benchmarks @@ -128,32 +128,32 @@ jobs: newest_release_tag: ${{ steps.release.outputs.tag }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 ref: ${{ github.head_ref || github.ref_name }} # Do a dry run of PSR - name: Test release - uses: python-semantic-release/python-semantic-release@v9.21.0 + uses: python-semantic-release/python-semantic-release@26bb37cfab71a5a372e3db0f48a6eac57519a4a6 # v9.21.0 if: github.ref_name != 'master' with: root_options: --noop # On main branch: actual PSR + upload to PyPI & GitHub - name: Release - uses: python-semantic-release/python-semantic-release@v9.21.0 + uses: python-semantic-release/python-semantic-release@26bb37cfab71a5a372e3db0f48a6eac57519a4a6 # v9.21.0 id: release if: github.ref_name == 'master' with: github_token: ${{ secrets.GITHUB_TOKEN }} - name: Publish package distributions to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1 if: steps.release.outputs.released == 'true' - name: Publish package distributions to GitHub Releases - uses: python-semantic-release/upload-to-gh-release@main + uses: python-semantic-release/upload-to-gh-release@0a92b5d7ebfc15a84f9801ebd1bf706343d43711 # main if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} @@ -228,18 +228,18 @@ jobs: pyver: cp313 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 ref: "master" # Used to host cibuildwheel - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 with: python-version: "3.12" - name: Set up QEMU if: ${{ matrix.qemu }} - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 with: platforms: all # This should be temporary @@ -262,20 +262,20 @@ jobs: echo "CIBW_BUILD=${{ matrix.pyver }}*" >> $GITHUB_ENV fi - - uses: actions/checkout@v4 + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: ref: ${{ needs.release.outputs.newest_release_tag }} fetch-depth: 0 - name: Build wheels ${{ matrix.musl }} (${{ matrix.qemu }}) - uses: pypa/cibuildwheel@v2.23.0 + uses: pypa/cibuildwheel@6cccd09a31908ffd175b012fb8bf4e1dbda3bc6c # v2.23.0 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_BEFORE_ALL_LINUX: apt install -y gcc || yum install -y gcc || apk add gcc REQUIRE_CYTHON: 1 - - uses: actions/upload-artifact@v4 + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 with: path: ./wheelhouse/*.whl name: wheels-${{ matrix.os }}-${{ matrix.musl }}-${{ matrix.qemu }}-${{ matrix.pyver }} @@ -288,7 +288,7 @@ jobs: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/download-artifact@v4 + - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4 with: # unpacks default artifact into dist/ # if `name: artifact` is omitted, the action will create extra parent dir @@ -297,6 +297,4 @@ jobs: merge-multiple: true - uses: - pypa/gh-action-pypi-publish@v1.12.4 - - # To test: repository_url: https://test.pypi.org/legacy/ + pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 From 54eb3830dc794d78b8419153f8233713e1dff840 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 19:19:24 -1000 Subject: [PATCH 391/434] chore(ci): bump pypa/cibuildwheel from 2.23.0 to 2.23.2 in the github-actions group (#1556) --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b61e5e45..ffe20f82 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -268,7 +268,7 @@ jobs: fetch-depth: 0 - name: Build wheels ${{ matrix.musl }} (${{ matrix.qemu }}) - uses: pypa/cibuildwheel@6cccd09a31908ffd175b012fb8bf4e1dbda3bc6c # v2.23.0 + uses: pypa/cibuildwheel@d04cacbc9866d432033b1d09142936e6a0e2121a # v2.23.2 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} From b757ddf98d7d04c366281a4281a449c5c2cb897d Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Tue, 1 Apr 2025 21:11:16 +0200 Subject: [PATCH 392/434] fix: create listener socket with specific IP version (#1557) * fix: create listener socket with specific IP version Create listener sockets when using unicast with specific IP version as well, just like in `new_respond_socket()`. * chore(tests): add unit test for socket creation with unicast addressing --- src/zeroconf/_utils/net.py | 5 +++-- tests/utils/test_net.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index c2312e01..b4f3ef77 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -421,11 +421,12 @@ def create_sockets( else: respond_socket = None else: + is_v6 = isinstance(i, tuple) respond_socket = new_socket( port=0, - ip_version=ip_version, + ip_version=IPVersion.V6Only if is_v6 else IPVersion.V4Only, apple_p2p=apple_p2p, - bind_addr=i[0] if isinstance(i, tuple) else (i,), + bind_addr=cast(tuple[tuple[str, int, int], int], i)[0] if is_v6 else (cast(str, i),), ) if respond_socket is not None: diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index f763b655..ad8648de 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -298,3 +298,35 @@ def test_new_respond_socket_new_socket_returns_none(): """Test new_respond_socket returns None if new_socket returns None.""" with patch.object(netutils, "new_socket", return_value=None): assert netutils.new_respond_socket(("0.0.0.0", 0)) is None # type: ignore[arg-type] + + +def test_create_sockets(): + """Test create_sockets with unicast and IPv4.""" + + with ( + patch("zeroconf._utils.net.new_socket") as mock_new_socket, + patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=_generate_mock_adapters(), + ), + ): + mock_socket = Mock(spec=socket.socket) + mock_new_socket.return_value = mock_socket + + listen_socket, respond_sockets = r.create_sockets( + interfaces=r.InterfaceChoice.All, unicast=True, ip_version=r.IPVersion.All + ) + + assert listen_socket is None + mock_new_socket.assert_any_call( + port=0, + ip_version=r.IPVersion.V6Only, + apple_p2p=False, + bind_addr=("2001:db8::", 1, 1), + ) + mock_new_socket.assert_any_call( + port=0, + ip_version=r.IPVersion.V4Only, + apple_p2p=False, + bind_addr=("192.168.1.5",), + ) From 94620b084addfff6d7b73dd5d7ed69c1a213415e Mon Sep 17 00:00:00 2001 From: semantic-release Date: Tue, 1 Apr 2025 19:20:01 +0000 Subject: [PATCH 393/434] 0.146.2 Automatically generated by python-semantic-release --- CHANGELOG.md | 16 ++++++++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f28b0022..0ffa0f63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,22 @@ # CHANGELOG +## v0.146.2 (2025-04-01) + +### Bug Fixes + +- Create listener socket with specific IP version + ([#1557](https://github.com/python-zeroconf/python-zeroconf/pull/1557), + [`b757ddf`](https://github.com/python-zeroconf/python-zeroconf/commit/b757ddf98d7d04c366281a4281a449c5c2cb897d)) + +* fix: create listener socket with specific IP version + +Create listener sockets when using unicast with specific IP version as well, just like in + `new_respond_socket()`. + +* chore(tests): add unit test for socket creation with unicast addressing + + ## v0.146.1 (2025-03-05) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 569fe977..b2850113 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.1" +version = "0.146.2" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index b915b8d7..01496e22 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.1" +__version__ = "0.146.2" __license__ = "LGPL" From bd643a227bc4d6a949d558850ad1431bc2940d74 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Wed, 2 Apr 2025 19:41:16 +0200 Subject: [PATCH 394/434] fix: correctly override question type flag for requests (#1558) * fix: correctly override question type flag for requests Currently even when setting the explicit question type flag, the implementation ignores it for subsequent queries. This commit ensures that all queries respect the explicit question type flag. * chore(tests): add test for explicit question type flag Add unit test to validate that the explicit question type flag is set correctly in outgoing requests. --- src/zeroconf/_services/info.py | 2 +- tests/services/test_info.py | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index 9cd8df16..fff9e125 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -859,7 +859,7 @@ async def async_request( if last <= now: return False if next_ <= now: - this_question_type = question_type or QU_QUESTION if first_request else QM_QUESTION + this_question_type = question_type or (QU_QUESTION if first_request else QM_QUESTION) out = self._generate_request_query(zc, now, this_question_type) first_request = False if out.questions: diff --git a/tests/services/test_info.py b/tests/services/test_info.py index 3d4c5302..660b56d2 100644 --- a/tests/services/test_info.py +++ b/tests/services/test_info.py @@ -17,6 +17,7 @@ import zeroconf as r from zeroconf import DNSAddress, RecordUpdate, const +from zeroconf._protocol.outgoing import DNSOutgoing from zeroconf._services import info from zeroconf._services.info import ServiceInfo from zeroconf._utils.net import IPVersion @@ -1871,3 +1872,23 @@ async def test_address_resolver_ipv6(): aiozc.zeroconf.async_send(outgoing) assert await resolve_task assert resolver.ip_addresses_by_version(IPVersion.All) == [ip_address("fe80::52e:c2f2:bc5f:e9c6")] + + +@pytest.mark.asyncio +async def test_unicast_flag_if_requested() -> None: + """Verify we try four times even with the random delay.""" + type_ = "_typethatisnothere._tcp.local." + aiozc = AsyncZeroconf(interfaces=["127.0.0.1"]) + + def async_send(out: DNSOutgoing, addr: str | None = None, port: int = const._MDNS_PORT) -> None: + """Sends an outgoing packet.""" + for question in out.questions: + assert question.unicast + + # patch the zeroconf send + with patch.object(aiozc.zeroconf, "async_send", async_send): + await aiozc.async_get_service_info( + f"willnotbefound.{type_}", type_, question_type=r.DNSQuestionType.QU + ) + + await aiozc.async_close() From 16c257c0ca2772a024c6e9920df2375436bfc73c Mon Sep 17 00:00:00 2001 From: semantic-release Date: Wed, 2 Apr 2025 17:51:12 +0000 Subject: [PATCH 395/434] 0.146.3 Automatically generated by python-semantic-release --- CHANGELOG.md | 19 +++++++++++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ffa0f63..ccb6bdd7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,25 @@ # CHANGELOG +## v0.146.3 (2025-04-02) + +### Bug Fixes + +- Correctly override question type flag for requests + ([#1558](https://github.com/python-zeroconf/python-zeroconf/pull/1558), + [`bd643a2`](https://github.com/python-zeroconf/python-zeroconf/commit/bd643a227bc4d6a949d558850ad1431bc2940d74)) + +* fix: correctly override question type flag for requests + +Currently even when setting the explicit question type flag, the implementation ignores it for + subsequent queries. This commit ensures that all queries respect the explicit question type flag. + +* chore(tests): add test for explicit question type flag + +Add unit test to validate that the explicit question type flag is set correctly in outgoing + requests. + + ## v0.146.2 (2025-04-01) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index b2850113..7e21a38f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.2" +version = "0.146.3" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 01496e22..c266c318 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.2" +__version__ = "0.146.3" __license__ = "LGPL" From b044d2af9c3d357a49c010380f49471e92684f7e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 2 Apr 2025 09:33:10 -1000 Subject: [PATCH 396/434] chore(pre-commit.ci): pre-commit autoupdate (#1555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(pre-commit.ci): pre-commit autoupdate updates: - [github.com/PyCQA/flake8: 7.1.2 → 7.2.0](https://github.com/PyCQA/flake8/compare/7.1.2...7.2.0) * chore: remove useless nonlocal statements --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 2 +- tests/services/test_browser.py | 13 ------------- tests/test_asyncio.py | 9 --------- tests/test_updates.py | 1 - tests/utils/test_net.py | 1 - 5 files changed, 1 insertion(+), 25 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cf19bfa2..985d54b6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: hooks: - id: codespell - repo: https://github.com/PyCQA/flake8 - rev: 7.1.2 + rev: 7.2.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy diff --git a/tests/services/test_browser.py b/tests/services/test_browser.py index d57568f4..e9135bb6 100644 --- a/tests/services/test_browser.py +++ b/tests/services/test_browser.py @@ -866,7 +866,6 @@ class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" def update_record(self, zc: Zeroconf, now: float, record: r.DNSRecord) -> None: - nonlocal updates updates.append(record) listener = LegacyRecordUpdateListener() @@ -923,7 +922,6 @@ def test_service_browser_is_aware_of_port_changes(): # dummy service callback def on_service_state_change(zeroconf, service_type, state_change, name): """Dummy callback.""" - nonlocal callbacks if name == registration_name: callbacks.append((service_type, state_change, name)) @@ -985,17 +983,14 @@ def test_service_browser_listeners_update_service(): class MyServiceListener(r.ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("update", type_, name)) @@ -1050,12 +1045,10 @@ def test_service_browser_listeners_no_update_service(): class MyServiceListener(r.ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) @@ -1374,17 +1367,14 @@ def test_service_browser_matching(): class MyServiceListener(r.ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("update", type_, name)) @@ -1465,17 +1455,14 @@ def test_service_browser_expire_callbacks(): class MyServiceListener(r.ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("update", type_, name)) diff --git a/tests/test_asyncio.py b/tests/test_asyncio.py index 40ecf816..b6e124aa 100644 --- a/tests/test_asyncio.py +++ b/tests/test_asyncio.py @@ -940,17 +940,14 @@ async def test_service_browser_instantiation_generates_add_events_from_cache(): class MyServiceListener(ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("update", type_, name)) @@ -1191,17 +1188,14 @@ async def test_service_browser_ignores_unrelated_updates(): class MyServiceListener(ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks if name == registration_name: callbacks.append(("update", type_, name)) @@ -1349,15 +1343,12 @@ async def test_update_with_uppercase_names(run_isolated): class MyServiceListener(ServiceListener): def add_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks callbacks.append(("add", type_, name)) def remove_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks callbacks.append(("remove", type_, name)) def update_service(self, zc, type_, name) -> None: # type: ignore[no-untyped-def] - nonlocal callbacks callbacks.append(("update", type_, name)) listener = MyServiceListener() diff --git a/tests/test_updates.py b/tests/test_updates.py index ec1296f7..d8b16083 100644 --- a/tests/test_updates.py +++ b/tests/test_updates.py @@ -48,7 +48,6 @@ class LegacyRecordUpdateListener(r.RecordUpdateListener): """A RecordUpdateListener that does not implement update_records.""" def update_record(self, zc: Zeroconf, now: float, record: r.DNSRecord) -> None: - nonlocal updates updates.append(record) listener = LegacyRecordUpdateListener() diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index ad8648de..6bdafb37 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -127,7 +127,6 @@ def test_disable_ipv6_only_or_raise(): errors_logged = [] def _log_error(*args): - nonlocal errors_logged errors_logged.append(args) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) From f89a90e610094b721ec536f9b0ddee41592838fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 6 Apr 2025 18:43:34 -1000 Subject: [PATCH 397/434] chore(deps-dev): bump pytest-cov from 6.0.0 to 6.1.1 (#1560) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 845974d6..367374ed 100644 --- a/poetry.lock +++ b/poetry.lock @@ -750,14 +750,14 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-cov" -version = "6.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, ] [package.dependencies] From 389a8a2724d3f6d328fee0bef38d7addc29d19c4 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Apr 2025 08:47:19 -1000 Subject: [PATCH 398/434] chore(pre-commit.ci): pre-commit autoupdate (#1561) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/commitizen-tools/commitizen: v4.4.1 → v4.5.0](https://github.com/commitizen-tools/commitizen/compare/v4.4.1...v4.5.0) - [github.com/astral-sh/ruff-pre-commit: v0.11.2 → v0.11.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.2...v0.11.4) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 985d54b6..1faee010 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.4.1 + rev: v4.5.0 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.2 + rev: v0.11.4 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 83594887521507cf77bfc0a397becabaaab287c2 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Mon, 14 Apr 2025 11:33:33 +0200 Subject: [PATCH 399/434] fix: avoid loading adapter list twice (#1564) --- src/zeroconf/_utils/net.py | 40 +++++++++++++++++++++++++++++--------- tests/utils/test_net.py | 40 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 69 insertions(+), 11 deletions(-) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index b4f3ef77..e687ab60 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -28,7 +28,8 @@ import socket import struct import sys -from collections.abc import Sequence +import warnings +from collections.abc import Iterable, Sequence from typing import Any, Union, cast import ifaddr @@ -73,19 +74,39 @@ def _encode_address(address: str) -> bytes: return socket.inet_pton(address_family, address) -def get_all_addresses() -> list[str]: - return list({addr.ip for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv4}) # type: ignore[misc] +def get_all_addresses_ipv4(adapters: Iterable[ifaddr.Adapter]) -> list[str]: + return list({addr.ip for iface in adapters for addr in iface.ips if addr.is_IPv4}) # type: ignore[misc] -def get_all_addresses_v6() -> list[tuple[tuple[str, int, int], int]]: +def get_all_addresses_ipv6(adapters: Iterable[ifaddr.Adapter]) -> list[tuple[tuple[str, int, int], int]]: # IPv6 multicast uses positive indexes for interfaces # TODO: What about multi-address interfaces? return list( - {(addr.ip, iface.index) for iface in ifaddr.get_adapters() for addr in iface.ips if addr.is_IPv6} # type: ignore[misc] + {(addr.ip, iface.index) for iface in adapters for addr in iface.ips if addr.is_IPv6} # type: ignore[misc] + ) + + +def get_all_addresses() -> list[str]: + warnings.warn( + "get_all_addresses is deprecated, and will be removed in a future version. Use ifaddr" + "directly instead to get a list of adapters.", + DeprecationWarning, + stacklevel=2, + ) + return get_all_addresses_ipv4(ifaddr.get_adapters()) + + +def get_all_addresses_v6() -> list[tuple[tuple[str, int, int], int]]: + warnings.warn( + "get_all_addresses_v6 is deprecated, and will be removed in a future version. Use ifaddr" + "directly instead to get a list of adapters.", + DeprecationWarning, + stacklevel=2, ) + return get_all_addresses_ipv6(ifaddr.get_adapters()) -def ip6_to_address_and_index(adapters: list[ifaddr.Adapter], ip: str) -> tuple[tuple[str, int, int], int]: +def ip6_to_address_and_index(adapters: Iterable[ifaddr.Adapter], ip: str) -> tuple[tuple[str, int, int], int]: if "%" in ip: ip = ip[: ip.index("%")] # Strip scope_id. ipaddr = ipaddress.ip_address(ip) @@ -102,7 +123,7 @@ def ip6_to_address_and_index(adapters: list[ifaddr.Adapter], ip: str) -> tuple[t raise RuntimeError(f"No adapter found for IP address {ip}") -def interface_index_to_ip6_address(adapters: list[ifaddr.Adapter], index: int) -> tuple[str, int, int]: +def interface_index_to_ip6_address(adapters: Iterable[ifaddr.Adapter], index: int) -> tuple[str, int, int]: for adapter in adapters: if adapter.index == index: for adapter_ip in adapter.ips: @@ -152,10 +173,11 @@ def normalize_interface_choice( if ip_version != IPVersion.V6Only: result.append("0.0.0.0") elif choice is InterfaceChoice.All: + adapters = ifaddr.get_adapters() if ip_version != IPVersion.V4Only: - result.extend(get_all_addresses_v6()) + result.extend(get_all_addresses_ipv6(adapters)) if ip_version != IPVersion.V6Only: - result.extend(get_all_addresses()) + result.extend(get_all_addresses_ipv4(adapters)) if not result: raise RuntimeError( f"No interfaces to listen on, check that any interfaces have IP version {ip_version}" diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 6bdafb37..eff2befd 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -6,12 +6,14 @@ import socket import sys import unittest +import warnings from unittest.mock import MagicMock, Mock, patch import ifaddr import pytest import zeroconf as r +from zeroconf import get_all_addresses, get_all_addresses_v6 from zeroconf._utils import net as netutils @@ -35,6 +37,40 @@ def _generate_mock_adapters(): return [mock_eth0, mock_lo0, mock_eth1, mock_vtun0] +def test_get_all_addresses() -> None: + """Test public get_all_addresses API.""" + with ( + patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=_generate_mock_adapters(), + ), + warnings.catch_warnings(record=True) as warned, + ): + addresses = get_all_addresses() + assert isinstance(addresses, list) + assert len(addresses) == 3 + assert len(warned) == 1 + first_warning = warned[0] + assert "get_all_addresses is deprecated" in str(first_warning.message) + + +def test_get_all_addresses_v6() -> None: + """Test public get_all_addresses_v6 API.""" + with ( + patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=_generate_mock_adapters(), + ), + warnings.catch_warnings(record=True) as warned, + ): + addresses = get_all_addresses_v6() + assert isinstance(addresses, list) + assert len(addresses) == 1 + assert len(warned) == 1 + first_warning = warned[0] + assert "get_all_addresses_v6 is deprecated" in str(first_warning.message) + + def test_ip6_to_address_and_index(): """Test we can extract from mocked adapters.""" adapters = _generate_mock_adapters() @@ -84,8 +120,8 @@ def test_ip6_addresses_to_indexes(): def test_normalize_interface_choice_errors(): """Test we generate exception on invalid input.""" with ( - patch("zeroconf._utils.net.get_all_addresses", return_value=[]), - patch("zeroconf._utils.net.get_all_addresses_v6", return_value=[]), + patch("zeroconf._utils.net.get_all_addresses_ipv4", return_value=[]), + patch("zeroconf._utils.net.get_all_addresses_ipv6", return_value=[]), pytest.raises(RuntimeError), ): netutils.normalize_interface_choice(r.InterfaceChoice.All) From 79016f12055272e700d0f1aca38e9bcd2f89aa3e Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 14 Apr 2025 09:45:11 +0000 Subject: [PATCH 400/434] 0.146.4 Automatically generated by python-semantic-release --- CHANGELOG.md | 9 +++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ccb6bdd7..3c56284d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,15 @@ # CHANGELOG +## v0.146.4 (2025-04-14) + +### Bug Fixes + +- Avoid loading adapter list twice + ([#1564](https://github.com/python-zeroconf/python-zeroconf/pull/1564), + [`8359488`](https://github.com/python-zeroconf/python-zeroconf/commit/83594887521507cf77bfc0a397becabaaab287c2)) + + ## v0.146.3 (2025-04-02) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index 7e21a38f..e4de325f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.3" +version = "0.146.4" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index c266c318..89b622c2 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.3" +__version__ = "0.146.4" __license__ = "LGPL" From 77a6717e0f2185ff8da090b6442404bb3c8a9919 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Mon, 14 Apr 2025 11:54:26 +0200 Subject: [PATCH 401/434] chore(test): fix resource warnings in test_net.py (#1565) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- tests/utils/test_net.py | 175 ++++++++++++++++++++++------------------ 1 file changed, 96 insertions(+), 79 deletions(-) diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index eff2befd..2ed0c6f2 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -165,8 +165,8 @@ def test_disable_ipv6_only_or_raise(): def _log_error(*args): errors_logged.append(args) - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) with ( + socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock, pytest.raises(OSError), patch.object(netutils.log, "error", _log_error), patch("socket.socket.setsockopt", side_effect=OSError), @@ -182,19 +182,21 @@ def _log_error(*args): @pytest.mark.skipif(not hasattr(socket, "SO_REUSEPORT"), reason="System does not have SO_REUSEPORT") def test_set_so_reuseport_if_available_is_present(): """Test that setting socket.SO_REUSEPORT only OSError errno.ENOPROTOOPT is trapped.""" - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError): - netutils.set_so_reuseport_if_available(sock) + with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: + with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError): + netutils.set_so_reuseport_if_available(sock) - with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): - netutils.set_so_reuseport_if_available(sock) + with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): + netutils.set_so_reuseport_if_available(sock) @pytest.mark.skipif(hasattr(socket, "SO_REUSEPORT"), reason="System has SO_REUSEPORT") def test_set_so_reuseport_if_available_not_present(): """Test that we do not try to set SO_REUSEPORT if it is not present.""" - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - with patch("socket.socket.setsockopt", side_effect=OSError): + with ( + socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock, + patch("socket.socket.setsockopt", side_effect=OSError), + ): netutils.set_so_reuseport_if_available(sock) @@ -202,80 +204,95 @@ def test_set_mdns_port_socket_options_for_ip_version(): """Test OSError with errno with EINVAL and bind address ''. from setsockopt IP_MULTICAST_TTL does not raise.""" - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - - # Should raise on EPERM always - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) - - # Should raise on EINVAL always when bind address is not '' - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("127.0.0.1",), r.IPVersion.V4Only) - - # Should not raise on EINVAL when bind address is '' - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) + with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: + # Should raise on EPERM always + with ( + pytest.raises(OSError), + patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)), + ): + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) + + # Should raise on EINVAL always when bind address is not '' + with ( + pytest.raises(OSError), + patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)), + ): + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("127.0.0.1",), r.IPVersion.V4Only) + + # Should not raise on EINVAL when bind address is '' + with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): + netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) def test_add_multicast_member(caplog: pytest.LogCaptureFixture) -> None: - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - interface = "127.0.0.1" - - # EPERM should always raise - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)): - netutils.add_multicast_member(sock, interface) - - # EADDRINUSE should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRINUSE, None)): - assert netutils.add_multicast_member(sock, interface) is False - - # EADDRNOTAVAIL should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None)): - assert netutils.add_multicast_member(sock, interface) is False - - # EINVAL should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - assert netutils.add_multicast_member(sock, interface) is False - - # ENOPROTOOPT should return False - with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): - assert netutils.add_multicast_member(sock, interface) is False - - # ENODEV should raise for ipv4 - with pytest.raises(OSError), patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): - assert netutils.add_multicast_member(sock, interface) is False - - # ENODEV should return False for ipv6 - with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): - assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] - - # No IPv6 support should return False for IPv6 - with patch("socket.inet_pton", side_effect=OSError()): - assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] - - # No error should return True - with patch("socket.socket.setsockopt"): - assert netutils.add_multicast_member(sock, interface) is True - - # Ran out of IGMP memberships is forgiving and logs about igmp_max_memberships on linux - caplog.clear() - with ( - patch.object(sys, "platform", "linux"), - patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available")), - ): - assert netutils.add_multicast_member(sock, interface) is False - assert "No buffer space available" in caplog.text - assert "net.ipv4.igmp_max_memberships" in caplog.text - - # Ran out of IGMP memberships is forgiving and logs - caplog.clear() - with ( - patch.object(sys, "platform", "darwin"), - patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available")), - ): - assert netutils.add_multicast_member(sock, interface) is False - assert "No buffer space available" in caplog.text - assert "net.ipv4.igmp_max_memberships" not in caplog.text + with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: + interface = "127.0.0.1" + + # EPERM should always raise + with ( + pytest.raises(OSError), + patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)), + ): + netutils.add_multicast_member(sock, interface) + + # EADDRINUSE should return False + with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRINUSE, None)): + assert netutils.add_multicast_member(sock, interface) is False + + # EADDRNOTAVAIL should return False + with patch("socket.socket.setsockopt", side_effect=OSError(errno.EADDRNOTAVAIL, None)): + assert netutils.add_multicast_member(sock, interface) is False + + # EINVAL should return False + with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): + assert netutils.add_multicast_member(sock, interface) is False + + # ENOPROTOOPT should return False + with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENOPROTOOPT, None)): + assert netutils.add_multicast_member(sock, interface) is False + + # ENODEV should raise for ipv4 + with ( + pytest.raises(OSError), + patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)), + ): + assert netutils.add_multicast_member(sock, interface) is False + + # ENODEV should return False for ipv6 + with patch("socket.socket.setsockopt", side_effect=OSError(errno.ENODEV, None)): + assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] + + # No IPv6 support should return False for IPv6 + with patch("socket.inet_pton", side_effect=OSError()): + assert netutils.add_multicast_member(sock, ("2001:db8::", 1, 1)) is False # type: ignore[arg-type] + + # No error should return True + with patch("socket.socket.setsockopt"): + assert netutils.add_multicast_member(sock, interface) is True + + # Ran out of IGMP memberships is forgiving and logs about igmp_max_memberships on linux + caplog.clear() + with ( + patch.object(sys, "platform", "linux"), + patch( + "socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available") + ), + ): + assert netutils.add_multicast_member(sock, interface) is False + assert "No buffer space available" in caplog.text + assert "net.ipv4.igmp_max_memberships" in caplog.text + + # Ran out of IGMP memberships is forgiving and logs + caplog.clear() + with ( + patch.object(sys, "platform", "darwin"), + patch( + "socket.socket.setsockopt", side_effect=OSError(errno.ENOBUFS, "No buffer space available") + ), + ): + assert netutils.add_multicast_member(sock, interface) is False + assert "No buffer space available" in caplog.text + assert "net.ipv4.igmp_max_memberships" not in caplog.text def test_bind_raises_skips_address(): From cb2f5b15403df8d4f8abb6f7dcac6d867756fb9a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 07:42:27 -1000 Subject: [PATCH 402/434] chore(pre-commit.ci): pre-commit autoupdate (#1566) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1faee010..19efa1c7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.5.0 + rev: v4.6.0 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.4 + rev: v0.11.5 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From cc0f8350c30c82409b1a9bfecb19ff9b3368d6a7 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Mon, 14 Apr 2025 23:10:36 +0200 Subject: [PATCH 403/434] fix: address non-working socket configuration (#1563) Co-authored-by: J. Nick Koston --- src/zeroconf/_utils/net.py | 97 ++++++++++++++++++++++--------------- tests/test_core.py | 19 ++++++-- tests/utils/test_net.py | 99 +++++++++++++++++++++++++++++--------- 3 files changed, 148 insertions(+), 67 deletions(-) diff --git a/src/zeroconf/_utils/net.py b/src/zeroconf/_utils/net.py index e687ab60..e67edf78 100644 --- a/src/zeroconf/_utils/net.py +++ b/src/zeroconf/_utils/net.py @@ -168,8 +168,17 @@ def normalize_interface_choice( result: list[str | tuple[tuple[str, int, int], int]] = [] if choice is InterfaceChoice.Default: if ip_version != IPVersion.V4Only: - # IPv6 multicast uses interface 0 to mean the default - result.append((("", 0, 0), 0)) + # IPv6 multicast uses interface 0 to mean the default. However, + # the default interface can't be used for outgoing IPv6 multicast + # requests. In a way, interface choice default isn't really working + # with IPv6. Inform the user accordingly. + message = ( + "IPv6 multicast requests can't be sent using default interface. " + "Use V4Only, InterfaceChoice.All or an explicit list of interfaces." + ) + log.error(message) + warnings.warn(message, DeprecationWarning, stacklevel=2) + result.append((("::", 0, 0), 0)) if ip_version != IPVersion.V6Only: result.append("0.0.0.0") elif choice is InterfaceChoice.All: @@ -220,28 +229,33 @@ def set_so_reuseport_if_available(s: socket.socket) -> None: raise -def set_mdns_port_socket_options_for_ip_version( +def set_respond_socket_multicast_options( s: socket.socket, - bind_addr: tuple[str] | tuple[str, int, int], ip_version: IPVersion, ) -> None: - """Set ttl/hops and loop for mdns port.""" - if ip_version != IPVersion.V6Only: - ttl = struct.pack(b"B", 255) - loop = struct.pack(b"B", 1) + """Set ttl/hops and loop for mDNS respond socket.""" + if ip_version == IPVersion.V4Only: # OpenBSD needs the ttl and loop values for the IP_MULTICAST_TTL and # IP_MULTICAST_LOOP socket options as an unsigned char. - try: - s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) - s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, loop) - except OSError as e: - if bind_addr[0] != "" or get_errno(e) != errno.EINVAL: # Fails to set on MacOS - raise - - if ip_version != IPVersion.V4Only: + ttl = struct.pack(b"B", 255) + loop = struct.pack(b"B", 1) + s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) + s.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, loop) + elif ip_version == IPVersion.V6Only: # However, char doesn't work here (at least on Linux) s.setsockopt(_IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, 255) s.setsockopt(_IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, True) + else: + # A shared sender socket is not really possible, especially with link-local + # multicast addresses (ff02::/16), the kernel needs to know which interface + # to use for routing. + # + # It seems that macOS even refuses to take IPv4 socket options if this is an + # AF_INET6 socket. + # + # In theory we could reconfigure the socket on each send, but that is not + # really practical for Python Zerconf. + raise RuntimeError("Dual-stack responder socket not supported") def new_socket( @@ -266,14 +280,12 @@ def new_socket( s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) set_so_reuseport_if_available(s) - if port == _MDNS_PORT: - set_mdns_port_socket_options_for_ip_version(s, bind_addr, ip_version) - if apple_p2p: # SO_RECV_ANYIF = 0x1104 # https://opensource.apple.com/source/xnu/xnu-4570.41.2/bsd/sys/socket.h s.setsockopt(socket.SOL_SOCKET, 0x1104, 1) + # Bind expects (address, port) for AF_INET and (address, port, flowinfo, scope_id) for AF_INET6 bind_tup = (bind_addr[0], port, *bind_addr[1:]) try: s.bind(bind_tup) @@ -392,15 +404,27 @@ def add_multicast_member( def new_respond_socket( interface: str | tuple[tuple[str, int, int], int], apple_p2p: bool = False, + unicast: bool = False, ) -> socket.socket | None: + """Create interface specific socket for responding to multicast queries.""" is_v6 = isinstance(interface, tuple) + + # For response sockets: + # - Bind explicitly to the interface address + # - Use ephemeral ports if in unicast mode + # - Create socket according to the interface IP type (IPv4 or IPv6) respond_socket = new_socket( + bind_addr=cast(tuple[tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), + port=0 if unicast else _MDNS_PORT, ip_version=(IPVersion.V6Only if is_v6 else IPVersion.V4Only), apple_p2p=apple_p2p, - bind_addr=cast(tuple[tuple[str, int, int], int], interface)[0] if is_v6 else (cast(str, interface),), ) + if unicast: + return respond_socket + if not respond_socket: return None + log.debug("Configuring socket %s with multicast interface %s", respond_socket, interface) if is_v6: iface_bin = struct.pack("@I", cast(int, interface[1])) @@ -411,6 +435,7 @@ def new_respond_socket( socket.IP_MULTICAST_IF, socket.inet_aton(cast(str, interface)), ) + set_respond_socket_multicast_options(respond_socket, IPVersion.V6Only if is_v6 else IPVersion.V4Only) return respond_socket @@ -423,33 +448,27 @@ def create_sockets( if unicast: listen_socket = None else: - listen_socket = new_socket(ip_version=ip_version, apple_p2p=apple_p2p, bind_addr=("",)) + listen_socket = new_socket(bind_addr=("",), ip_version=ip_version, apple_p2p=apple_p2p) normalized_interfaces = normalize_interface_choice(interfaces, ip_version) - # If we are using InterfaceChoice.Default we can use + # If we are using InterfaceChoice.Default with only IPv4 or only IPv6, we can use # a single socket to listen and respond. - if not unicast and interfaces is InterfaceChoice.Default: - for i in normalized_interfaces: - add_multicast_member(cast(socket.socket, listen_socket), i) + if not unicast and interfaces is InterfaceChoice.Default and ip_version != IPVersion.All: + for interface in normalized_interfaces: + add_multicast_member(cast(socket.socket, listen_socket), interface) + # Sent responder socket options to the dual-use listen socket + set_respond_socket_multicast_options(cast(socket.socket, listen_socket), ip_version) return listen_socket, [cast(socket.socket, listen_socket)] respond_sockets = [] - for i in normalized_interfaces: - if not unicast: - if add_multicast_member(cast(socket.socket, listen_socket), i): - respond_socket = new_respond_socket(i, apple_p2p=apple_p2p) - else: - respond_socket = None - else: - is_v6 = isinstance(i, tuple) - respond_socket = new_socket( - port=0, - ip_version=IPVersion.V6Only if is_v6 else IPVersion.V4Only, - apple_p2p=apple_p2p, - bind_addr=cast(tuple[tuple[str, int, int], int], i)[0] if is_v6 else (cast(str, i),), - ) + for interface in normalized_interfaces: + # Only create response socket if unicast or becoming multicast member was successful + if not unicast and not add_multicast_member(cast(socket.socket, listen_socket), interface): + continue + + respond_socket = new_respond_socket(interface, apple_p2p=apple_p2p, unicast=unicast) if respond_socket is not None: respond_sockets.append(respond_socket) diff --git a/tests/test_core.py b/tests/test_core.py index fcfdf424..8c53d207 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -11,6 +11,7 @@ import time import unittest import unittest.mock +import warnings from typing import cast from unittest.mock import AsyncMock, Mock, patch @@ -87,16 +88,26 @@ def test_close_multiple_times(self): def test_launch_and_close_v4_v6(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.All) rv.close() - rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All) - rv.close() + with warnings.catch_warnings(record=True) as warned: + rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.All) + rv.close() + first_warning = warned[0] + assert "IPv6 multicast requests can't be sent using default interface" in str( + first_warning.message + ) @unittest.skipIf(not has_working_ipv6(), "Requires IPv6") @unittest.skipIf(os.environ.get("SKIP_IPV6"), "IPv6 tests disabled") def test_launch_and_close_v6_only(self): rv = r.Zeroconf(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.V6Only) rv.close() - rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only) - rv.close() + with warnings.catch_warnings(record=True) as warned: + rv = r.Zeroconf(interfaces=r.InterfaceChoice.Default, ip_version=r.IPVersion.V6Only) + rv.close() + first_warning = warned[0] + assert "IPv6 multicast requests can't be sent using default interface" in str( + first_warning.message + ) @unittest.skipIf(sys.platform == "darwin", reason="apple_p2p failure path not testable on mac") def test_launch_and_close_apple_p2p_not_mac(self): diff --git a/tests/utils/test_net.py b/tests/utils/test_net.py index 2ed0c6f2..7de10661 100644 --- a/tests/utils/test_net.py +++ b/tests/utils/test_net.py @@ -7,7 +7,7 @@ import sys import unittest import warnings -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import MagicMock, Mock, call, patch import ifaddr import pytest @@ -20,11 +20,11 @@ def _generate_mock_adapters(): mock_lo0 = Mock(spec=ifaddr.Adapter) mock_lo0.nice_name = "lo0" - mock_lo0.ips = [ifaddr.IP("127.0.0.1", 8, "lo0")] + mock_lo0.ips = [ifaddr.IP("127.0.0.1", 8, "lo0"), ifaddr.IP(("::1", 0, 0), 128, "lo")] mock_lo0.index = 0 mock_eth0 = Mock(spec=ifaddr.Adapter) mock_eth0.nice_name = "eth0" - mock_eth0.ips = [ifaddr.IP(("2001:db8::", 1, 1), 8, "eth0")] + mock_eth0.ips = [ifaddr.IP(("2001:db8::", 1, 1), 8, "eth0"), ifaddr.IP(("fd00:db8::", 1, 1), 8, "eth0")] mock_eth0.index = 1 mock_eth1 = Mock(spec=ifaddr.Adapter) mock_eth1.nice_name = "eth1" @@ -65,7 +65,7 @@ def test_get_all_addresses_v6() -> None: ): addresses = get_all_addresses_v6() assert isinstance(addresses, list) - assert len(addresses) == 1 + assert len(addresses) == 3 assert len(warned) == 1 first_warning = warned[0] assert "get_all_addresses_v6 is deprecated" in str(first_warning.message) @@ -200,28 +200,20 @@ def test_set_so_reuseport_if_available_not_present(): netutils.set_so_reuseport_if_available(sock) -def test_set_mdns_port_socket_options_for_ip_version(): +def test_set_respond_socket_multicast_options(): """Test OSError with errno with EINVAL and bind address ''. from setsockopt IP_MULTICAST_TTL does not raise.""" - with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock: - # Should raise on EPERM always - with ( - pytest.raises(OSError), - patch("socket.socket.setsockopt", side_effect=OSError(errno.EPERM, None)), - ): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) - - # Should raise on EINVAL always when bind address is not '' - with ( - pytest.raises(OSError), - patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)), - ): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("127.0.0.1",), r.IPVersion.V4Only) + # Should raise on EINVAL always + with ( + socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as sock, + pytest.raises(OSError), + patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)), + ): + netutils.set_respond_socket_multicast_options(sock, r.IPVersion.V4Only) - # Should not raise on EINVAL when bind address is '' - with patch("socket.socket.setsockopt", side_effect=OSError(errno.EINVAL, None)): - netutils.set_mdns_port_socket_options_for_ip_version(sock, ("",), r.IPVersion.V4Only) + with pytest.raises(RuntimeError): + netutils.set_respond_socket_multicast_options(sock, r.IPVersion.All) def test_add_multicast_member(caplog: pytest.LogCaptureFixture) -> None: @@ -352,8 +344,8 @@ def test_new_respond_socket_new_socket_returns_none(): assert netutils.new_respond_socket(("0.0.0.0", 0)) is None # type: ignore[arg-type] -def test_create_sockets(): - """Test create_sockets with unicast and IPv4.""" +def test_create_sockets_interfaces_all_unicast(): + """Test create_sockets with unicast.""" with ( patch("zeroconf._utils.net.new_socket") as mock_new_socket, @@ -382,3 +374,62 @@ def test_create_sockets(): apple_p2p=False, bind_addr=("192.168.1.5",), ) + + +def test_create_sockets_interfaces_all() -> None: + """Test create_sockets with all interfaces. + + Tests if a responder socket is created for every successful multicast + join. + """ + adapters = _generate_mock_adapters() + + # Additional IPv6 addresses usually fail to add membership + failure_interface = ("fd00:db8::", 1, 1) + + expected_calls = [] + for adapter in adapters: + for ip in adapter.ips: + if ip.ip == failure_interface: + continue + + if ip.is_IPv4: + bind_addr = (ip.ip,) + ip_version = r.IPVersion.V4Only + else: + bind_addr = ip.ip + ip_version = r.IPVersion.V6Only + + expected_calls.append( + call( + port=5353, + ip_version=ip_version, + apple_p2p=False, + bind_addr=bind_addr, + ) + ) + + def _patched_add_multicast_member(sock, interface): + return interface[0] != failure_interface + + with ( + patch("zeroconf._utils.net.new_socket") as mock_new_socket, + patch( + "zeroconf._utils.net.ifaddr.get_adapters", + return_value=adapters, + ), + patch("zeroconf._utils.net.add_multicast_member", side_effect=_patched_add_multicast_member), + ): + mock_socket = Mock(spec=socket.socket) + mock_new_socket.return_value = mock_socket + + r.create_sockets(interfaces=r.InterfaceChoice.All, ip_version=r.IPVersion.All) + + def call_to_tuple(c): + return (c.args, tuple(sorted(c.kwargs.items()))) + + # Exclude first new_socket call as this is the listen socket + actual_calls_set = {call_to_tuple(c) for c in mock_new_socket.call_args_list[1:]} + expected_calls_set = {call_to_tuple(c) for c in expected_calls} + + assert actual_calls_set == expected_calls_set From d2517387dccf8b55b71bbbc62919ded55c8359d2 Mon Sep 17 00:00:00 2001 From: semantic-release Date: Mon, 14 Apr 2025 21:20:26 +0000 Subject: [PATCH 404/434] 0.146.5 Automatically generated by python-semantic-release --- CHANGELOG.md | 11 +++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c56284d..6d107aa5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,17 @@ # CHANGELOG +## v0.146.5 (2025-04-14) + +### Bug Fixes + +- Address non-working socket configuration + ([#1563](https://github.com/python-zeroconf/python-zeroconf/pull/1563), + [`cc0f835`](https://github.com/python-zeroconf/python-zeroconf/commit/cc0f8350c30c82409b1a9bfecb19ff9b3368d6a7)) + +Co-authored-by: J. Nick Koston + + ## v0.146.4 (2025-04-14) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index e4de325f..189d9ddc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.4" +version = "0.146.5" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 89b622c2..2449e835 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.4" +__version__ = "0.146.5" __license__ = "LGPL" From a11abc45fe2d9ebc5574092f9d4b3048ff3833fd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 08:37:59 -1000 Subject: [PATCH 405/434] chore(pre-commit.ci): pre-commit autoupdate (#1570) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19efa1c7..cc85aa90 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.5 + rev: v0.11.6 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From b6d5aa36444cb30c87a17903021f041b4dbbe252 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Apr 2025 08:38:12 -1000 Subject: [PATCH 406/434] chore(deps-dev): bump setuptools from 78.1.0 to 79.0.0 (#1569) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 367374ed..6fa6edce 100644 --- a/poetry.lock +++ b/poetry.lock @@ -826,14 +826,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "78.1.0" +version = "79.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-78.1.0-py3-none-any.whl", hash = "sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8"}, - {file = "setuptools-78.1.0.tar.gz", hash = "sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54"}, + {file = "setuptools-79.0.0-py3-none-any.whl", hash = "sha256:b9ab3a104bedb292323f53797b00864e10e434a3ab3906813a7169e4745b912a"}, + {file = "setuptools-79.0.0.tar.gz", hash = "sha256:9828422e7541213b0aacb6e10bbf9dd8febeaa45a48570e09b6d100e063fc9f9"}, ] [package.extras] @@ -1127,4 +1127,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "e3c96e694e9c149b96323081d51675d7a9d5ad8243f4338ff149e643a65417cb" +content-hash = "bcb9007a7aedbd388c0e4a757d21ccb2443fe58d07e8bc57493ee4d5f54eb998" diff --git a/pyproject.toml b/pyproject.toml index 189d9ddc..96c0aec0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.27.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<79.0.0" +setuptools = ">=65.6.3,<80.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" From 2874924c27d822fd6eaea12126e071b60effb6fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 27 Apr 2025 23:31:18 -0500 Subject: [PATCH 407/434] chore(deps-dev): bump setuptools from 79.0.0 to 80.0.0 (#1571) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6fa6edce..13e12b6e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -826,14 +826,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "79.0.0" +version = "80.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-79.0.0-py3-none-any.whl", hash = "sha256:b9ab3a104bedb292323f53797b00864e10e434a3ab3906813a7169e4745b912a"}, - {file = "setuptools-79.0.0.tar.gz", hash = "sha256:9828422e7541213b0aacb6e10bbf9dd8febeaa45a48570e09b6d100e063fc9f9"}, + {file = "setuptools-80.0.0-py3-none-any.whl", hash = "sha256:a38f898dcd6e5380f4da4381a87ec90bd0a7eec23d204a5552e80ee3cab6bd27"}, + {file = "setuptools-80.0.0.tar.gz", hash = "sha256:c40a5b3729d58dd749c0f08f1a07d134fb8a0a3d7f87dc33e7c5e1f762138650"}, ] [package.extras] @@ -1127,4 +1127,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "bcb9007a7aedbd388c0e4a757d21ccb2443fe58d07e8bc57493ee4d5f54eb998" +content-hash = "972988da838067a7f2d12b8212ce54ba946cb38a4f63576a520dd1ed40ac3e9b" diff --git a/pyproject.toml b/pyproject.toml index 96c0aec0..a1390502 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" pytest-asyncio = ">=0.20.3,<0.27.0" cython = "^3.0.5" -setuptools = ">=65.6.3,<80.0.0" +setuptools = ">=65.6.3,<81.0.0" pytest-timeout = "^2.1.0" pytest-codspeed = "^3.1.0" From cb54a65cd1b9a80bf0c19c3e274adf20703cd783 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Apr 2025 12:46:38 -0400 Subject: [PATCH 408/434] chore(pre-commit.ci): pre-commit autoupdate (#1572) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cc85aa90..8ad48a33 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.6 + rev: v0.11.7 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From f5c15e9bc412936a6fc943771ea0d66cba73e050 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 1 May 2025 09:49:23 +0200 Subject: [PATCH 409/434] chore(ci): bump the github-actions group with 4 updates (#1573) --- .github/workflows/ci.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ffe20f82..5e8d1ef0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 + - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: "3.12" - uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1 @@ -69,7 +69,7 @@ jobs: - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: ${{ matrix.python-version }} cache: "poetry" @@ -87,7 +87,7 @@ jobs: - name: Test with Pytest run: poetry run pytest --durations=20 --timeout=60 -v --cov=zeroconf --cov-branch --cov-report xml --cov-report html --cov-report term-missing tests - name: Upload coverage to Codecov - uses: codecov/codecov-action@0565863a31f2c772f9f0395002a31e3f06189574 # v5 + uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5 with: token: ${{ secrets.CODECOV_TOKEN }} @@ -96,7 +96,7 @@ jobs: steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Setup Python 3.13 - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: 3.13 - uses: snok/install-poetry@76e04a911780d5b312d89783f7b1cd627778900a # v1.4.1 @@ -234,7 +234,7 @@ jobs: ref: "master" # Used to host cibuildwheel - name: Set up Python - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5 + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5 with: python-version: "3.12" - name: Set up QEMU @@ -268,7 +268,7 @@ jobs: fetch-depth: 0 - name: Build wheels ${{ matrix.musl }} (${{ matrix.qemu }}) - uses: pypa/cibuildwheel@d04cacbc9866d432033b1d09142936e6a0e2121a # v2.23.2 + uses: pypa/cibuildwheel@faf86a6ed7efa889faf6996aa23820831055001a # v2.23.3 # to supply options, put them in 'env', like: env: CIBW_SKIP: cp36-* cp37-* pp36-* pp37-* pp38-* cp38-* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} @@ -288,7 +288,7 @@ jobs: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - - uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # v4 + - uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4 with: # unpacks default artifact into dist/ # if `name: artifact` is omitted, the action will create extra parent dir From 02eef34ca5df803b05ff337a9103d7994458988d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20G=C3=B3rny?= Date: Sat, 3 May 2025 16:50:43 +0200 Subject: [PATCH 410/434] chore: some Cython 3.1.0rc1 build failures (#1574) --- src/zeroconf/_listener.pxd | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/zeroconf/_listener.pxd b/src/zeroconf/_listener.pxd index 20084b47..4cbc5d00 100644 --- a/src/zeroconf/_listener.pxd +++ b/src/zeroconf/_listener.pxd @@ -50,7 +50,7 @@ cdef class AsyncListener: cpdef _respond_query( self, - object msg, + DNSIncoming msg, object addr, object port, object transport, From 66b673cb768eaa15581ea60a8de590382806937c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 May 2025 10:03:16 -0500 Subject: [PATCH 411/434] chore: make zeroconf._services.info compatible with Cython 3.1 (#1576) --- src/zeroconf/_services/info.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/zeroconf/_services/info.py b/src/zeroconf/_services/info.py index fff9e125..9b38de9d 100644 --- a/src/zeroconf/_services/info.py +++ b/src/zeroconf/_services/info.py @@ -577,7 +577,7 @@ def _process_record_threadsafe(self, zc: Zeroconf, record: DNSRecord, now: float def dns_addresses( self, - override_ttl: int | None = None, + override_ttl: int_ | None = None, version: IPVersion = IPVersion.All, ) -> list[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" @@ -585,7 +585,7 @@ def dns_addresses( def _dns_addresses( self, - override_ttl: int | None, + override_ttl: int_ | None, version: IPVersion, ) -> list[DNSAddress]: """Return matching DNSAddress from ServiceInfo.""" @@ -611,11 +611,11 @@ def _dns_addresses( self._dns_address_cache = records return records - def dns_pointer(self, override_ttl: int | None = None) -> DNSPointer: + def dns_pointer(self, override_ttl: int_ | None = None) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" return self._dns_pointer(override_ttl) - def _dns_pointer(self, override_ttl: int | None) -> DNSPointer: + def _dns_pointer(self, override_ttl: int_ | None) -> DNSPointer: """Return DNSPointer from ServiceInfo.""" cacheable = override_ttl is None if self._dns_pointer_cache is not None and cacheable: @@ -632,11 +632,11 @@ def _dns_pointer(self, override_ttl: int | None) -> DNSPointer: self._dns_pointer_cache = record return record - def dns_service(self, override_ttl: int | None = None) -> DNSService: + def dns_service(self, override_ttl: int_ | None = None) -> DNSService: """Return DNSService from ServiceInfo.""" return self._dns_service(override_ttl) - def _dns_service(self, override_ttl: int | None) -> DNSService: + def _dns_service(self, override_ttl: int_ | None) -> DNSService: """Return DNSService from ServiceInfo.""" cacheable = override_ttl is None if self._dns_service_cache is not None and cacheable: @@ -659,11 +659,11 @@ def _dns_service(self, override_ttl: int | None) -> DNSService: self._dns_service_cache = record return record - def dns_text(self, override_ttl: int | None = None) -> DNSText: + def dns_text(self, override_ttl: int_ | None = None) -> DNSText: """Return DNSText from ServiceInfo.""" return self._dns_text(override_ttl) - def _dns_text(self, override_ttl: int | None) -> DNSText: + def _dns_text(self, override_ttl: int_ | None) -> DNSText: """Return DNSText from ServiceInfo.""" cacheable = override_ttl is None if self._dns_text_cache is not None and cacheable: @@ -680,11 +680,11 @@ def _dns_text(self, override_ttl: int | None) -> DNSText: self._dns_text_cache = record return record - def dns_nsec(self, missing_types: list[int], override_ttl: int | None = None) -> DNSNsec: + def dns_nsec(self, missing_types: list[int], override_ttl: int_ | None = None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return self._dns_nsec(missing_types, override_ttl) - def _dns_nsec(self, missing_types: list[int], override_ttl: int | None) -> DNSNsec: + def _dns_nsec(self, missing_types: list[int], override_ttl: int_ | None) -> DNSNsec: """Return DNSNsec from ServiceInfo.""" return DNSNsec( self._name, @@ -696,11 +696,11 @@ def _dns_nsec(self, missing_types: list[int], override_ttl: int | None) -> DNSNs 0.0, ) - def get_address_and_nsec_records(self, override_ttl: int | None = None) -> set[DNSRecord]: + def get_address_and_nsec_records(self, override_ttl: int_ | None = None) -> set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" return self._get_address_and_nsec_records(override_ttl) - def _get_address_and_nsec_records(self, override_ttl: int | None) -> set[DNSRecord]: + def _get_address_and_nsec_records(self, override_ttl: int_ | None) -> set[DNSRecord]: """Build a set of address records and NSEC records for non-present record types.""" cacheable = override_ttl is None if self._get_address_and_nsec_records_cache is not None and cacheable: From 5a72fd4ca0c10c9759341517c3bfb0fd0bf062c8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 May 2025 10:09:58 -0500 Subject: [PATCH 412/434] chore: migrate TTL to only accept int (#1577) --- src/zeroconf/_cache.pxd | 2 +- src/zeroconf/_cache.py | 2 +- src/zeroconf/_dns.pxd | 18 +++++++++--------- src/zeroconf/_dns.py | 23 +++++++++++------------ src/zeroconf/_handlers/record_manager.pxd | 2 +- src/zeroconf/_services/browser.py | 3 +-- src/zeroconf/const.py | 2 +- tests/test_protocol.py | 2 +- 8 files changed, 26 insertions(+), 28 deletions(-) diff --git a/src/zeroconf/_cache.pxd b/src/zeroconf/_cache.pxd index 273d46c3..05a40c0f 100644 --- a/src/zeroconf/_cache.pxd +++ b/src/zeroconf/_cache.pxd @@ -83,5 +83,5 @@ cdef class DNSCache: self, DNSRecord record, double now, - cython.float ttl + unsigned int ttl ) diff --git a/src/zeroconf/_cache.py b/src/zeroconf/_cache.py index c8e2686e..c7ca8472 100644 --- a/src/zeroconf/_cache.py +++ b/src/zeroconf/_cache.py @@ -317,7 +317,7 @@ def async_mark_unique_records_older_than_1s_to_expire( # Expire in 1s self._async_set_created_ttl(record, now, 1) - def _async_set_created_ttl(self, record: DNSRecord, now: _float, ttl: _float) -> None: + def _async_set_created_ttl(self, record: DNSRecord, now: _float, ttl: _int) -> None: """Set the created time and ttl of a record.""" # It would be better if we made a copy instead of mutating the record # in place, but records currently don't have a copy method. diff --git a/src/zeroconf/_dns.pxd b/src/zeroconf/_dns.pxd index 5ff98a8d..7ef1dbec 100644 --- a/src/zeroconf/_dns.pxd +++ b/src/zeroconf/_dns.pxd @@ -44,10 +44,10 @@ cdef class DNSQuestion(DNSEntry): cdef class DNSRecord(DNSEntry): - cdef public cython.float ttl + cdef public unsigned int ttl cdef public double created - cdef _fast_init_record(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, double created) + cdef _fast_init_record(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, double created) cdef bint _suppressed_by_answer(self, DNSRecord answer) @@ -66,7 +66,7 @@ cdef class DNSRecord(DNSEntry): cpdef bint is_recent(self, double now) - cdef _set_created_ttl(self, double now, cython.float ttl) + cdef _set_created_ttl(self, double now, unsigned int ttl) cdef class DNSAddress(DNSRecord): @@ -74,7 +74,7 @@ cdef class DNSAddress(DNSRecord): cdef public bytes address cdef public object scope_id - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, bytes address, object scope_id, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, bytes address, object scope_id, double created) cdef bint _eq(self, DNSAddress other) @@ -87,7 +87,7 @@ cdef class DNSHinfo(DNSRecord): cdef public str cpu cdef public str os - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str cpu, str os, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, str cpu, str os, double created) cdef bint _eq(self, DNSHinfo other) @@ -99,7 +99,7 @@ cdef class DNSPointer(DNSRecord): cdef public str alias cdef public str alias_key - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str alias, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, str alias, double created) cdef bint _eq(self, DNSPointer other) @@ -110,7 +110,7 @@ cdef class DNSText(DNSRecord): cdef public cython.int _hash cdef public bytes text - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, bytes text, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, bytes text, double created) cdef bint _eq(self, DNSText other) @@ -125,7 +125,7 @@ cdef class DNSService(DNSRecord): cdef public str server cdef public str server_key - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, cython.uint priority, cython.uint weight, cython.uint port, str server, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, cython.uint priority, cython.uint weight, cython.uint port, str server, double created) cdef bint _eq(self, DNSService other) @@ -137,7 +137,7 @@ cdef class DNSNsec(DNSRecord): cdef public str next_name cdef public cython.list rdtypes - cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, cython.float ttl, str next_name, cython.list rdtypes, double created) + cdef _fast_init(self, str name, cython.uint type_, cython.uint class_, unsigned int ttl, str next_name, cython.list rdtypes, double created) cdef bint _eq(self, DNSNsec other) diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 591eb018..60df14b1 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -166,18 +166,17 @@ class DNSRecord(DNSEntry): __slots__ = ("created", "ttl") - # TODO: Switch to just int ttl def __init__( self, name: str, type_: int, class_: int, - ttl: float | int, + ttl: _int, created: float | None = None, ) -> None: self._fast_init_record(name, type_, class_, ttl, created or current_time_millis()) - def _fast_init_record(self, name: str, type_: _int, class_: _int, ttl: _float, created: _float) -> None: + def _fast_init_record(self, name: str, type_: _int, class_: _int, ttl: _int, created: _float) -> None: """Fast init for reuse.""" self._fast_init_entry(name, type_, class_) self.ttl = ttl @@ -227,7 +226,7 @@ def is_recent(self, now: _float) -> bool: """Returns true if the record more than one quarter of its TTL remaining.""" return self.created + (_RECENT_TIME_MS * self.ttl) > now - def _set_created_ttl(self, created: _float, ttl: float | int) -> None: + def _set_created_ttl(self, created: _float, ttl: _int) -> None: """Set the created and ttl of a record.""" # It would be better if we made a copy instead of mutating the record # in place, but records currently don't have a copy method. @@ -266,7 +265,7 @@ def _fast_init( name: str, type_: _int, class_: _int, - ttl: _float, + ttl: _int, address: bytes, scope_id: _int | None, created: _float, @@ -327,7 +326,7 @@ def __init__( self._fast_init(name, type_, class_, ttl, cpu, os, created or current_time_millis()) def _fast_init( - self, name: str, type_: _int, class_: _int, ttl: _float, cpu: str, os: str, created: _float + self, name: str, type_: _int, class_: _int, ttl: _int, cpu: str, os: str, created: _float ) -> None: """Fast init for reuse.""" self._fast_init_record(name, type_, class_, ttl, created) @@ -374,7 +373,7 @@ def __init__( self._fast_init(name, type_, class_, ttl, alias, created or current_time_millis()) def _fast_init( - self, name: str, type_: _int, class_: _int, ttl: _float, alias: str, created: _float + self, name: str, type_: _int, class_: _int, ttl: _int, alias: str, created: _float ) -> None: self._fast_init_record(name, type_, class_, ttl, created) self.alias = alias @@ -429,7 +428,7 @@ def __init__( self._fast_init(name, type_, class_, ttl, text, created or current_time_millis()) def _fast_init( - self, name: str, type_: _int, class_: _int, ttl: _float, text: bytes, created: _float + self, name: str, type_: _int, class_: _int, ttl: _int, text: bytes, created: _float ) -> None: self._fast_init_record(name, type_, class_, ttl, created) self.text = text @@ -468,7 +467,7 @@ def __init__( name: str, type_: int, class_: int, - ttl: float | int, + ttl: int, priority: int, weight: int, port: int, @@ -484,7 +483,7 @@ def _fast_init( name: str, type_: _int, class_: _int, - ttl: _float, + ttl: _int, priority: _int, weight: _int, port: _int, @@ -539,7 +538,7 @@ def __init__( name: str, type_: int, class_: int, - ttl: int | float, + ttl: _int, next_name: str, rdtypes: list[int], created: float | None = None, @@ -551,7 +550,7 @@ def _fast_init( name: str, type_: _int, class_: _int, - ttl: _float, + ttl: _int, next_name: str, rdtypes: list[_int], created: _float, diff --git a/src/zeroconf/_handlers/record_manager.pxd b/src/zeroconf/_handlers/record_manager.pxd index 37232b13..b9bde975 100644 --- a/src/zeroconf/_handlers/record_manager.pxd +++ b/src/zeroconf/_handlers/record_manager.pxd @@ -8,7 +8,7 @@ from .._updates cimport RecordUpdateListener from .._utils.time cimport current_time_millis from .._record_update cimport RecordUpdate -cdef cython.float _DNS_PTR_MIN_TTL +cdef unsigned int _DNS_PTR_MIN_TTL cdef cython.uint _TYPE_PTR cdef object _ADDRESS_RECORD_TYPES cdef bint TYPE_CHECKING diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index ab8c050d..1f60e8f9 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -394,9 +394,8 @@ def _schedule_ptr_refresh( refresh_time_millis: float_, ) -> None: """Schedule a query for a pointer.""" - ttl = int(pointer.ttl) if isinstance(pointer.ttl, float) else pointer.ttl scheduled_ptr_query = _ScheduledPTRQuery( - pointer.alias, pointer.name, ttl, expire_time_millis, refresh_time_millis + pointer.alias, pointer.name, pointer.ttl, expire_time_millis, refresh_time_millis ) self._schedule_ptr_query(scheduled_ptr_query) diff --git a/src/zeroconf/const.py b/src/zeroconf/const.py index 3b4b3abc..c3a62875 100644 --- a/src/zeroconf/const.py +++ b/src/zeroconf/const.py @@ -57,7 +57,7 @@ # ServiceBrowsers generating excessive queries refresh queries. # Apple uses a 15s minimum TTL, however we do not have the same # level of rate limit and safe guards so we use 1/4 of the recommended value -_DNS_PTR_MIN_TTL = _DNS_OTHER_TTL / 4 +_DNS_PTR_MIN_TTL = 1125 _DNS_PACKET_HEADER_LEN = 12 diff --git a/tests/test_protocol.py b/tests/test_protocol.py index 08d7e600..edd87c2e 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -196,7 +196,7 @@ def test_suppress_answer(self): "testname2.local.", const._TYPE_SRV, const._CLASS_IN | const._CLASS_UNIQUE, - const._DNS_HOST_TTL / 2, + int(const._DNS_HOST_TTL / 2), 0, 0, 80, From daaf8d6981c778fe4ba0a63371d9368cf217891a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 May 2025 10:19:16 -0500 Subject: [PATCH 413/434] feat: Cython 3.1 support (#1578) From 1569383c6cf8ce8977427cfdaf5c7104ce52ab08 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 May 2025 11:04:00 -0500 Subject: [PATCH 414/434] feat: cython 3.11 support (#1579) From 1d9c94a82d8da16b8f5355131e6167b69293da6c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 May 2025 11:12:27 -0500 Subject: [PATCH 415/434] feat: add cython 3.1 support (#1580) From 4cf513f69169b5992a73fe0bc431ec17f8f5040d Mon Sep 17 00:00:00 2001 From: semantic-release Date: Sat, 3 May 2025 16:22:31 +0000 Subject: [PATCH 416/434] 0.147.0 Automatically generated by python-semantic-release --- CHANGELOG.md | 14 ++++++++++++++ pyproject.toml | 2 +- src/zeroconf/__init__.py | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d107aa5..d8a3d4cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,20 @@ # CHANGELOG +## v0.147.0 (2025-05-03) + +### Features + +- Add cython 3.1 support ([#1580](https://github.com/python-zeroconf/python-zeroconf/pull/1580), + [`1d9c94a`](https://github.com/python-zeroconf/python-zeroconf/commit/1d9c94a82d8da16b8f5355131e6167b69293da6c)) + +- Cython 3.1 support ([#1578](https://github.com/python-zeroconf/python-zeroconf/pull/1578), + [`daaf8d6`](https://github.com/python-zeroconf/python-zeroconf/commit/daaf8d6981c778fe4ba0a63371d9368cf217891a)) + +- Cython 3.11 support ([#1579](https://github.com/python-zeroconf/python-zeroconf/pull/1579), + [`1569383`](https://github.com/python-zeroconf/python-zeroconf/commit/1569383c6cf8ce8977427cfdaf5c7104ce52ab08)) + + ## v0.146.5 (2025-04-14) ### Bug Fixes diff --git a/pyproject.toml b/pyproject.toml index a1390502..d47a1966 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "zeroconf" -version = "0.146.5" +version = "0.147.0" description = "A pure python implementation of multicast DNS service discovery" authors = ["Paul Scott-Murphy", "William McBrine", "Jakub Stasiak", "J. Nick Koston"] license = "LGPL-2.1-or-later" diff --git a/src/zeroconf/__init__.py b/src/zeroconf/__init__.py index 2449e835..439ffceb 100644 --- a/src/zeroconf/__init__.py +++ b/src/zeroconf/__init__.py @@ -88,7 +88,7 @@ __author__ = "Paul Scott-Murphy, William McBrine" __maintainer__ = "Jakub Stasiak " -__version__ = "0.146.5" +__version__ = "0.147.0" __license__ = "LGPL" From f278ed994e73f4316cc410bcdc5023294329117f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 4 May 2025 23:58:05 -0500 Subject: [PATCH 417/434] chore(deps-dev): bump setuptools from 80.0.0 to 80.3.1 (#1581) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 13e12b6e..950e3cbd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -826,14 +826,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "80.0.0" +version = "80.3.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.0.0-py3-none-any.whl", hash = "sha256:a38f898dcd6e5380f4da4381a87ec90bd0a7eec23d204a5552e80ee3cab6bd27"}, - {file = "setuptools-80.0.0.tar.gz", hash = "sha256:c40a5b3729d58dd749c0f08f1a07d134fb8a0a3d7f87dc33e7c5e1f762138650"}, + {file = "setuptools-80.3.1-py3-none-any.whl", hash = "sha256:ea8e00d7992054c4c592aeb892f6ad51fe1b4d90cc6947cc45c45717c40ec537"}, + {file = "setuptools-80.3.1.tar.gz", hash = "sha256:31e2c58dbb67c99c289f51c16d899afedae292b978f8051efaf6262d8212f927"}, ] [package.extras] From 53592faf8de7efbc8a8f12d333b4daffad035701 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 5 May 2025 11:54:45 -0500 Subject: [PATCH 418/434] chore(pre-commit.ci): pre-commit autoupdate (#1582) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ad48a33..429bea6e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.6.0 + rev: v4.6.1 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.7 + rev: v0.11.8 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 61a7b8bbf5b0f97aaa275ee9ee54f24c5fec772b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 May 2025 12:37:22 -0500 Subject: [PATCH 419/434] chore: fix mocking with PyPy and new Cython 3.1 (#1586) --- tests/test_handlers.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_handlers.py b/tests/test_handlers.py index ffa4ff88..31354980 100644 --- a/tests/test_handlers.py +++ b/tests/test_handlers.py @@ -1863,6 +1863,7 @@ async def test_response_aggregation_random_delay(): addresses=[socket.inet_aton("10.0.1.2")], ) mocked_zc = unittest.mock.MagicMock() + mocked_zc.loop = asyncio.get_running_loop() outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 500) now = current_time_millis() @@ -1930,6 +1931,7 @@ async def test_future_answers_are_removed_on_send(): addresses=[socket.inet_aton("10.0.1.3")], ) mocked_zc = unittest.mock.MagicMock() + mocked_zc.loop = asyncio.get_running_loop() outgoing_queue = MulticastOutgoingQueue(mocked_zc, 0, 0) now = current_time_millis() From e827cb1e2cb1ab2d4663e60068c1c7de6634c6ba Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 13:02:38 -0500 Subject: [PATCH 420/434] chore(pre-commit.ci): pre-commit autoupdate (#1585) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 429bea6e..50a1dd37 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.6.1 + rev: v4.7.0 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.8 + rev: v0.11.9 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 97c0ce6863a5d8476dd63a35670c39e38fdc1c63 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 13:29:12 -0500 Subject: [PATCH 421/434] chore(deps-dev): bump pytest-timeout from 2.3.1 to 2.4.0 (#1583) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 950e3cbd..0b5bd93e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -769,14 +769,14 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-timeout" -version = "2.3.1" +version = "2.4.0" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, - {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, + {file = "pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2"}, + {file = "pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a"}, ] [package.dependencies] From 9acbd4cc58917a77d5b18bfeb87ce90c2ce1a1dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 May 2025 13:29:27 -0500 Subject: [PATCH 422/434] chore(deps-dev): bump setuptools from 80.3.1 to 80.4.0 (#1584) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0b5bd93e..501a47e1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -826,14 +826,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "80.3.1" +version = "80.4.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.3.1-py3-none-any.whl", hash = "sha256:ea8e00d7992054c4c592aeb892f6ad51fe1b4d90cc6947cc45c45717c40ec537"}, - {file = "setuptools-80.3.1.tar.gz", hash = "sha256:31e2c58dbb67c99c289f51c16d899afedae292b978f8051efaf6262d8212f927"}, + {file = "setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2"}, + {file = "setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006"}, ] [package.extras] From 6360ec09368d91ad15ff773baf193606142486d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 01:53:40 -0400 Subject: [PATCH 423/434] chore(deps-dev): bump setuptools from 80.4.0 to 80.7.1 (#1587) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 501a47e1..c9e4642f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -826,14 +826,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "80.4.0" +version = "80.7.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2"}, - {file = "setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006"}, + {file = "setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009"}, + {file = "setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552"}, ] [package.extras] From 9b1e55ecbf6d787f5ead341ca3b51b817350abda Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 25 May 2025 23:48:24 -0500 Subject: [PATCH 424/434] chore(deps-dev): bump cython from 3.0.12 to 3.1.1 (#1590) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 130 ++++++++++++++++++++++++++-------------------------- 1 file changed, 64 insertions(+), 66 deletions(-) diff --git a/poetry.lock b/poetry.lock index c9e4642f..c5862217 100644 --- a/poetry.lock +++ b/poetry.lock @@ -315,76 +315,74 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cython" -version = "3.0.12" +version = "3.1.1" description = "The Cython compiler for writing C extensions in the Python language." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "Cython-3.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba67eee9413b66dd9fbacd33f0bc2e028a2a120991d77b5fd4b19d0b1e4039b9"}, - {file = "Cython-3.0.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee2717e5b5f7d966d0c6e27d2efe3698c357aa4d61bb3201997c7a4f9fe485a"}, - {file = "Cython-3.0.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cffc3464f641c8d0dda942c7c53015291beea11ec4d32421bed2f13b386b819"}, - {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d3a8f81980ffbd74e52f9186d8f1654e347d0c44bfea6b5997028977f481a179"}, - {file = "Cython-3.0.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8d32856716c369d01f2385ad9177cdd1a11079ac89ea0932dc4882de1aa19174"}, - {file = "Cython-3.0.12-cp310-cp310-win32.whl", hash = "sha256:712c3f31adec140dc60d064a7f84741f50e2c25a8edd7ae746d5eb4d3ef7072a"}, - {file = "Cython-3.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:d6945694c5b9170cfbd5f2c0d00ef7487a2de7aba83713a64ee4ebce7fad9e05"}, - {file = "Cython-3.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:feb86122a823937cc06e4c029d80ff69f082ebb0b959ab52a5af6cdd271c5dc3"}, - {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfdbea486e702c328338314adb8e80f5f9741f06a0ae83aaec7463bc166d12e8"}, - {file = "Cython-3.0.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563de1728c8e48869d2380a1b76bbc1b1b1d01aba948480d68c1d05e52d20c92"}, - {file = "Cython-3.0.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398d4576c1e1f6316282aa0b4a55139254fbed965cba7813e6d9900d3092b128"}, - {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1e5eadef80143026944ea8f9904715a008f5108d1d644a89f63094cc37351e73"}, - {file = "Cython-3.0.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a93cbda00a5451175b97dea5a9440a3fcee9e54b4cba7a7dbcba9a764b22aec"}, - {file = "Cython-3.0.12-cp311-cp311-win32.whl", hash = "sha256:3109e1d44425a2639e9a677b66cd7711721a5b606b65867cb2d8ef7a97e2237b"}, - {file = "Cython-3.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:d4b70fc339adba1e2111b074ee6119fe9fd6072c957d8597bce9a0dd1c3c6784"}, - {file = "Cython-3.0.12-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fe030d4a00afb2844f5f70896b7f2a1a0d7da09bf3aa3d884cbe5f73fff5d310"}, - {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7fec4f052b8fe173fe70eae75091389955b9a23d5cec3d576d21c5913b49d47"}, - {file = "Cython-3.0.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0faa5e39e5c8cdf6f9c3b1c3f24972826e45911e7f5b99cf99453fca5432f45e"}, - {file = "Cython-3.0.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d53de996ed340e9ab0fc85a88aaa8932f2591a2746e1ab1c06e262bd4ec4be7"}, - {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ea3a0e19ab77266c738aa110684a753a04da4e709472cadeff487133354d6ab8"}, - {file = "Cython-3.0.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c151082884be468f2f405645858a857298ac7f7592729e5b54788b5c572717ba"}, - {file = "Cython-3.0.12-cp312-cp312-win32.whl", hash = "sha256:3083465749911ac3b2ce001b6bf17f404ac9dd35d8b08469d19dc7e717f5877a"}, - {file = "Cython-3.0.12-cp312-cp312-win_amd64.whl", hash = "sha256:c0b91c7ebace030dd558ea28730de8c580680b50768e5af66db2904a3716c3e3"}, - {file = "Cython-3.0.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4ee6f1ea1bead8e6cbc4e64571505b5d8dbdb3b58e679d31f3a84160cebf1a1a"}, - {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57aefa6d3341109e46ec1a13e3a763aaa2cbeb14e82af2485b318194be1d9170"}, - {file = "Cython-3.0.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:879ae9023958d63c0675015369384642d0afb9c9d1f3473df9186c42f7a9d265"}, - {file = "Cython-3.0.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36fcd584dae547de6f095500a380f4a0cce72b7a7e409e9ff03cb9beed6ac7a1"}, - {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62b79dcc0de49efe9e84b9d0e2ae0a6fc9b14691a65565da727aa2e2e63c6a28"}, - {file = "Cython-3.0.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4aa255781b093a8401109d8f2104bbb2e52de7639d5896aefafddc85c30e0894"}, - {file = "Cython-3.0.12-cp313-cp313-win32.whl", hash = "sha256:77d48f2d4bab9fe1236eb753d18f03e8b2619af5b6f05d51df0532a92dfb38ab"}, - {file = "Cython-3.0.12-cp313-cp313-win_amd64.whl", hash = "sha256:86c304b20bd57c727c7357e90d5ba1a2b6f1c45492de2373814d7745ef2e63b4"}, - {file = "Cython-3.0.12-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ff5c0b6a65b08117d0534941d404833d516dac422eee88c6b4fd55feb409a5ed"}, - {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:680f1d6ed4436ae94805db264d6155ed076d2835d84f20dcb31a7a3ad7f8668c"}, - {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc24609613fa06d0d896309f7164ba168f7e8d71c1e490ed2a08d23351c3f41"}, - {file = "Cython-3.0.12-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1879c073e2b34924ce9b7ca64c212705dcc416af4337c45f371242b2e5f6d32"}, - {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:bfb75123dd4ff767baa37d7036da0de2dfb6781ff256eef69b11b88b9a0691d1"}, - {file = "Cython-3.0.12-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:f39640f8df0400cde6882e23c734f15bb8196de0a008ae5dc6c8d1ec5957d7c8"}, - {file = "Cython-3.0.12-cp36-cp36m-win32.whl", hash = "sha256:8c9efe9a0895abee3cadfdad4130b30f7b5e57f6e6a51ef2a44f9fc66a913880"}, - {file = "Cython-3.0.12-cp36-cp36m-win_amd64.whl", hash = "sha256:63d840f2975e44d74512f8f34f1f7cb8121c9428e26a3f6116ff273deb5e60a2"}, - {file = "Cython-3.0.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:75c5acd40b97cff16fadcf6901a91586cbca5dcdba81f738efaf1f4c6bc8dccb"}, - {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e62564457851db1c40399bd95a5346b9bb99e17a819bf583b362f418d8f3457a"}, - {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ccd1228cc203b1f1b8a3d403f5a20ad1c40e5879b3fbf5851ce09d948982f2c"}, - {file = "Cython-3.0.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25529ee948f44d9a165ff960c49d4903267c20b5edf2df79b45924802e4cca6e"}, - {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:90cf599372c5a22120609f7d3a963f17814799335d56dd0dcf8fe615980a8ae1"}, - {file = "Cython-3.0.12-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9f8c48748a9c94ea5d59c26ab49ad0fad514d36f894985879cf3c3ca0e600bf4"}, - {file = "Cython-3.0.12-cp37-cp37m-win32.whl", hash = "sha256:3e4fa855d98bc7bd6a2049e0c7dc0dcf595e2e7f571a26e808f3efd84d2db374"}, - {file = "Cython-3.0.12-cp37-cp37m-win_amd64.whl", hash = "sha256:120681093772bf3600caddb296a65b352a0d3556e962b9b147efcfb8e8c9801b"}, - {file = "Cython-3.0.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:731d719423e041242c9303c80cae4327467299b90ffe62d4cc407e11e9ea3160"}, - {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3238a29f37999e27494d120983eca90d14896b2887a0bd858a381204549137a"}, - {file = "Cython-3.0.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b588c0a089a9f4dd316d2f9275230bad4a7271e5af04e1dc41d2707c816be44b"}, - {file = "Cython-3.0.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab9f5198af74eb16502cc143cdde9ca1cbbf66ea2912e67440dd18a36e3b5fa"}, - {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8ee841c0e114efa1e849c281ac9b8df8aa189af10b4a103b1c5fd71cbb799679"}, - {file = "Cython-3.0.12-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:43c48b5789398b228ea97499f5b864843ba9b1ab837562a9227c6f58d16ede8b"}, - {file = "Cython-3.0.12-cp38-cp38-win32.whl", hash = "sha256:5e5f17c48a4f41557fbcc7ee660ccfebe4536a34c557f553b6893c1b3c83df2d"}, - {file = "Cython-3.0.12-cp38-cp38-win_amd64.whl", hash = "sha256:309c081057930bb79dc9ea3061a1af5086c679c968206e9c9c2ec90ab7cb471a"}, - {file = "Cython-3.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54115fcc126840926ff3b53cfd2152eae17b3522ae7f74888f8a41413bd32f25"}, - {file = "Cython-3.0.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:629db614b9c364596d7c975fa3fb3978e8c5349524353dbe11429896a783fc1e"}, - {file = "Cython-3.0.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af081838b0f9e12a83ec4c3809a00a64c817f489f7c512b0e3ecaf5f90a2a816"}, - {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:34ce459808f7d8d5d4007bc5486fe50532529096b43957af6cbffcb4d9cc5c8d"}, - {file = "Cython-3.0.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6c6cd6a75c8393e6805d17f7126b96a894f310a1a9ea91c47d141fb9341bfa8"}, - {file = "Cython-3.0.12-cp39-cp39-win32.whl", hash = "sha256:a4032e48d4734d2df68235d21920c715c451ac9de15fa14c71b378e8986b83be"}, - {file = "Cython-3.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:dcdc3e5d4ce0e7a4af6903ed580833015641e968d18d528d8371e2435a34132c"}, - {file = "Cython-3.0.12-py2.py3-none-any.whl", hash = "sha256:0038c9bae46c459669390e53a1ec115f8096b2e4647ae007ff1bf4e6dee92806"}, - {file = "cython-3.0.12.tar.gz", hash = "sha256:b988bb297ce76c671e28c97d017b95411010f7c77fa6623dd0bb47eed1aee1bc"}, + {file = "cython-3.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7adff5b42d2556d073e9f321c2faa639a17fb195ec1de130327f60ec209d8"}, + {file = "cython-3.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b61b99205308c96b1162de59bd67ecadcad3d166a4a1f03a3d9e826c39cd375"}, + {file = "cython-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d14186bd96783d13b8fd0e5b289f2e137a8a25479638b73a1c7e4a99a8d70753"}, + {file = "cython-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e3ccec55e2a534a712db14c6617b66f65ad149c014fad518fc3920f6edde770"}, + {file = "cython-3.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a585796939b09b3205b1980e4a55e745c0251e45a5c637afbcac3c6cc9ad6f90"}, + {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3fa4bd840de63509c74867b4b092541720a01db1e07351206011c34e0777dc96"}, + {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b68f1bc80387554eb43f2b62795c173bed9e37201f39dc5084ac437c90a79c9f"}, + {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e851ab66a31794e40df1bc6f649cdc56c998c637f5a1b9410c97a90f6b6cb855"}, + {file = "cython-3.1.1-cp310-cp310-win32.whl", hash = "sha256:64915259276482fa23417b284d1fdc7e3a618ee2f819bb6ea7f974c075633df6"}, + {file = "cython-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dee554f0a589377bdaea0eb70e212bf3f35dc6a51a2aa86c9351345e21fd2f07"}, + {file = "cython-3.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c360823e1063784efc2335617e0f28573d7a594c5a8a05d85e850a9621cccb1f"}, + {file = "cython-3.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:12e00b88147b03c148a95365f89dc1c45a0fc52f9c35aa75ff770ef65b615839"}, + {file = "cython-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab644415458d782c16ba7252de9cec1e3125371641cafea2e53a8c1cf85dd58d"}, + {file = "cython-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5cb6c054daadaf01a88c8f49f3edd9e829c9b76a82cbb4269e3f9878254540b"}, + {file = "cython-3.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af8f62cc9339b75fe8434325083e6a7cae88c9c21efd74bbb6ba4e3623219469"}, + {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:689c1aad373556bd2ab1aa1c2dad8939a2891465a1fbd2cbbdd42b488fb40ec8"}, + {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:953046c190fa9ab9a09a546a909b847cdbb4c1fe34e9bfa4a15b6ee1585a86aa"}, + {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:755a991601b27dd3555310d0f95b19a05e622a80d7b4e7a91fa6f5f3ef3f3b80"}, + {file = "cython-3.1.1-cp311-cp311-win32.whl", hash = "sha256:83b2af5c327f7da4f08afc34fddfaf6d24fa0c000b6b70a527c8125e493b6080"}, + {file = "cython-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:141ffd6279411c562f6b707adc56b63e965a4fd7f21db83f5d4fcbd8c50ac546"}, + {file = "cython-3.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d7dc0e4d0cd491fac679a61e9ede348c64ca449f99a284f9a01851aa1dbc7f6"}, + {file = "cython-3.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd689910002adfac8734f237cdea1573e38345f27ed7fd445482813b65a29457"}, + {file = "cython-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f0434916994fe213ea7749268b88d77e3ebcbd1b99542cf64bb7d180f45470"}, + {file = "cython-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:873aac4ac0b0fb197557c0ac15458b780b9221daa4a716881cbd1a9016c8459f"}, + {file = "cython-3.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b886a6c8a50b1101ccef2f2f3dc9c699b77633ef5bb5007090226c2ad3f9c2"}, + {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dff0e7dd53a0ca35b64cda843253d5cac944db26663dc097b3a1adf2c49514ad"}, + {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f7954b0b4b3302655d3caa6924261de5907a4e129bc22ace52fe9ae0cd5a758"}, + {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dfa500fd7ae95ca152a5f8062b870532fa3e27efcef6d00612e1f28b9f72615f"}, + {file = "cython-3.1.1-cp312-cp312-win32.whl", hash = "sha256:cd748fab8e4426dbcb2e0fa2979558333934d24365e0de5672fbabfe337d880c"}, + {file = "cython-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:307f216ed319ea07644f2ef9974406c830f01bc8e677e2147e9bfcdf9e3ca8ad"}, + {file = "cython-3.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb5661941707bd41ec7a9c273d698113ac50392444f785088e9d9706c6a5937b"}, + {file = "cython-3.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:28b174f41718a7041cfbe0f48913020875ff1aaa4793942b2451ac6d2baf3f07"}, + {file = "cython-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c740a10cd0f50321d048c8ca318eefb4c42b8bffef982dcd89c946d374192702"}, + {file = "cython-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7da069ca769903c5dee56c5f7ab47b2b7b91030eee48912630db5f4f3ec5954a"}, + {file = "cython-3.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24c640c0746d984789fe2787a098f06cda456ef2dd78b90164d17884b350839a"}, + {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:426d78565eb91d3366569b20e92b8f14bffef5f57b2acd05b60bbb9ce5c056a1"}, + {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b181158b5761bdaf40f6854f016ab7ddff64d3db4fca55cb3ca0f73813dd76d6"}, + {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7489559e6c5ecbba49d535c2e03cf77c2594a3190b6aca7da5b508ba1664a89a"}, + {file = "cython-3.1.1-cp313-cp313-win32.whl", hash = "sha256:263cb0e497910fb5e0a361ad1393b6d728b092178afecc56e8a786f3739960c3"}, + {file = "cython-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:e000f0533eedf3d6dfbe30bb3c58a054c58f0a7778390342fa577a0dc47adab3"}, + {file = "cython-3.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdf53dc4b2a13bd072d6c2c18ac073dbf0f798555bc27ba4f7546a275eb16a0f"}, + {file = "cython-3.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ce82070ccf92c3599d331b9eaaefd9d4562976fb86a8d6bccf05c4a0b8389f2a"}, + {file = "cython-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:020089f9c9f10269181f17660a2cada7d4577bd8eea24b7d2b14e6b64b6996be"}, + {file = "cython-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:402f86c00b08f875cd0990f0c4dc52eb3e0bc5d630066cdf3c798631976f1937"}, + {file = "cython-3.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54a8934cb3bf13b1f8f6cbdae8e382e25a26e67de08ea6ebfd0a467131b67227"}, + {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6ea77ad1e649cec38f8622ba28dcdfbe7bf519bc132abbcf5df759b3975b5a73"}, + {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e5cad896af896482240979b996bf4136b0d18dc40c56c72c5641bf0ea085dfb"}, + {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16d9870654946375b28280371d370d541641d1071da123d0d64d2c7ebba0cc56"}, + {file = "cython-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8aaa29e763adf3496ab9d371e3caed8da5d3ce5ff8fb57433e2a2f2b5036e5c8"}, + {file = "cython-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:011cdcbf7725f0cfc1abc55ec83d326e788050711272131daf3cc24a19c34bb2"}, + {file = "cython-3.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40f50b07c479eaf33981d81cad274c68cf9fb81dbe79cbf991f59491c88a4705"}, + {file = "cython-3.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a92f6bd395eadea6eed722a8188d3bdd49db1c9fa3c38710456d6148ab71bad7"}, + {file = "cython-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:268420b92307ae6c5a16e3cf0e2ba1ae3c861650e992893922a0ce08db07cfdb"}, + {file = "cython-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19188ecd385cdc649e3fec370f38d5fd7f1651aeed0b3fb403180f38fc88e8a"}, + {file = "cython-3.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fff6526bb6f4eea615663117b86de6ede0d17c477b600d3d8302be3502bd3c3"}, + {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3192a61c2a532d3faccdff508bc8427de9530b587888218bfc0226eb33a84e11"}, + {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56c6768a6f601f93daab7c2487f9f110548a896a91e00a6e119445ada2575323"}, + {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:50ad80e2f438e9127a87c10927e6ac16a987df39c248b19ab2cd31330129be3c"}, + {file = "cython-3.1.1-cp39-cp39-win32.whl", hash = "sha256:b194a65a0fd91f305d2d1e7010f44111774a28533e1e44dd2a76e7de81a219b9"}, + {file = "cython-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c8b8be01fd40b3e38a76c60a524f956548a3a7566e5530a833a48a695f3d6c12"}, + {file = "cython-3.1.1-py3-none-any.whl", hash = "sha256:07621e044f332d18139df2ccfcc930151fd323c2f61a58c82f304cffc9eb5280"}, + {file = "cython-3.1.1.tar.gz", hash = "sha256:505ccd413669d5132a53834d792c707974248088c4f60c497deb1b416e366397"}, ] [[package]] From c0687c72c2650297ca9f8f562ddc4b48b0a51e98 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 25 May 2025 23:48:36 -0500 Subject: [PATCH 425/434] chore(deps-dev): bump setuptools from 80.7.1 to 80.8.0 (#1591) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c5862217..80377477 100644 --- a/poetry.lock +++ b/poetry.lock @@ -824,14 +824,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "80.7.1" +version = "80.8.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.7.1-py3-none-any.whl", hash = "sha256:ca5cc1069b85dc23070a6628e6bcecb3292acac802399c7f8edc0100619f9009"}, - {file = "setuptools-80.7.1.tar.gz", hash = "sha256:f6ffc5f0142b1bd8d0ca94ee91b30c0ca862ffd50826da1ea85258a06fd94552"}, + {file = "setuptools-80.8.0-py3-none-any.whl", hash = "sha256:95a60484590d24103af13b686121328cc2736bee85de8936383111e421b9edc0"}, + {file = "setuptools-80.8.0.tar.gz", hash = "sha256:49f7af965996f26d43c8ae34539c8d99c5042fbff34302ea151eaa9c207cd257"}, ] [package.extras] From 2d14af68598b16cd36fd1aa8f0b215010274d710 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 26 May 2025 11:48:12 -0500 Subject: [PATCH 426/434] chore(pre-commit.ci): pre-commit autoupdate (#1588) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 50a1dd37..923b38e6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.7.0 + rev: v4.8.2 hooks: - id: commitizen stages: [commit-msg] @@ -35,12 +35,12 @@ repos: args: ["--tab-width", "2"] files: ".(css|html|js|json|md|toml|yaml)$" - repo: https://github.com/asottile/pyupgrade - rev: v3.19.1 + rev: v3.20.0 hooks: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.9 + rev: v0.11.11 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 7bd977521f644e406b88ca70a09fa87fe1d5c669 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 16:47:47 -0500 Subject: [PATCH 427/434] chore(deps-dev): bump setuptools from 80.8.0 to 80.9.0 (#1593) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 80377477..c0ad30ef 100644 --- a/poetry.lock +++ b/poetry.lock @@ -824,14 +824,14 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "setuptools" -version = "80.8.0" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "setuptools-80.8.0-py3-none-any.whl", hash = "sha256:95a60484590d24103af13b686121328cc2736bee85de8936383111e421b9edc0"}, - {file = "setuptools-80.8.0.tar.gz", hash = "sha256:49f7af965996f26d43c8ae34539c8d99c5042fbff34302ea151eaa9c207cd257"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] From 4454ec8a20312a96ca6ea83add488148f68f3bd9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 27 Jun 2025 16:48:00 -0500 Subject: [PATCH 428/434] chore(deps-dev): bump requests from 2.32.3 to 2.32.4 in the pip group (#1596) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index c0ad30ef..665cd38c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,7 +33,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -125,7 +125,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -419,7 +419,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -782,19 +782,19 @@ pytest = ">=7.0.0" [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["docs"] +groups = ["dev", "docs"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -1089,7 +1089,7 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, From b7345129c800ff32a3161a67e7a70c7626ecba23 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Jul 2025 12:01:26 -0500 Subject: [PATCH 429/434] chore(deps-dev): bump pytest from 8.3.5 to 8.4.1 (#1599) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index 665cd38c..e0ab88b0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -33,7 +33,7 @@ version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, @@ -125,7 +125,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -419,7 +419,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -672,26 +672,27 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.3.5" +version = "8.4.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, - {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, + {file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"}, + {file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"}, ] [package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" pluggy = ">=1.5,<2" +pygments = ">=2.7.2" tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" @@ -786,7 +787,7 @@ version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -1089,7 +1090,7 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, From d4d4adff2dec57fc36c1fe183c71025b8f8f6323 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 19:28:47 -1000 Subject: [PATCH 430/434] chore(deps-dev): bump pytest-asyncio from 0.26.0 to 1.1.0 (#1605) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 22 ++++++++++++++++++---- pyproject.toml | 2 +- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index e0ab88b0..0d80c218 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,6 +27,19 @@ files = [ [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +description = "Backport of asyncio.Runner, a context manager that controls event loop life cycle." +optional = false +python-versions = "<3.11,>=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"}, + {file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"}, +] + [[package]] name = "certifi" version = "2025.1.31" @@ -696,17 +709,18 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests [[package]] name = "pytest-asyncio" -version = "0.26.0" +version = "1.1.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"}, - {file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"}, + {file = "pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf"}, + {file = "pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea"}, ] [package.dependencies] +backports-asyncio-runner = {version = ">=1.1,<2", markers = "python_version < \"3.11\""} pytest = ">=8.2,<9" typing-extensions = {version = ">=4.12", markers = "python_version < \"3.10\""} @@ -1126,4 +1140,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "972988da838067a7f2d12b8212ce54ba946cb38a4f63576a520dd1ed40ac3e9b" +content-hash = "41eb7ce775d30ab9ea32c70f622d10c6dca9904c29635d00e7c9a9893b7cefd4" diff --git a/pyproject.toml b/pyproject.toml index d47a1966..9396b8c0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ ifaddr = ">=0.1.7" [tool.poetry.group.dev.dependencies] pytest = ">=7.2,<9.0" pytest-cov = ">=4,<7" -pytest-asyncio = ">=0.20.3,<0.27.0" +pytest-asyncio = ">=0.20.3,<1.2.0" cython = "^3.0.5" setuptools = ">=65.6.3,<81.0.0" pytest-timeout = "^2.1.0" From 9eb4a57d822e4dc325cf1e2242f37019a4ee8fe3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 19:29:14 -1000 Subject: [PATCH 431/434] chore(deps-dev): bump urllib3 from 2.3.0 to 2.5.0 in the pip group (#1601) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0d80c218..65bf0d86 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1100,14 +1100,14 @@ files = [ [[package]] name = "urllib3" -version = "2.3.0" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["docs"] +groups = ["dev", "docs"] files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] From 6846b6684c2021238994e6cf50b3dd79fc83ee92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 20:01:48 -1000 Subject: [PATCH 432/434] chore(deps-dev): bump pytest-codspeed from 3.2.0 to 4.0.0 (#1604) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 32 ++++++++++++++++---------------- pyproject.toml | 2 +- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 65bf0d86..ea717845 100644 --- a/poetry.lock +++ b/poetry.lock @@ -730,24 +730,24 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-codspeed" -version = "3.2.0" +version = "4.0.0" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, - {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, - {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035"}, - {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58"}, - {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b"}, - {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2"}, - {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f"}, - {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b"}, - {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:109f9f4dd1088019c3b3f887d003b7d65f98a7736ca1d457884f5aa293e8e81c"}, - {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f69a03b52c9bb041aec1b8ee54b7b6c37a6d0a948786effa4c71157765b6da"}, - {file = "pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39"}, - {file = "pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155"}, + {file = "pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2517731b20a6aa9fe61d04822b802e1637ee67fd865189485b384a9d5897117f"}, + {file = "pytest_codspeed-4.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e5076bb5119d4f8248822b5cd6b768f70a18c7e1a7fbcd96a99cd4a6430096e"}, + {file = "pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:06b324acdfe2076a0c97a9d31e8645f820822d6f0e766c73426767ff887a9381"}, + {file = "pytest_codspeed-4.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ebdac1a4d6138e1ca4f5391e7e3cafad6e3aa6d5660d1b243871b691bc1396c"}, + {file = "pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f3def79d4072867d038a33e7f35bc7fb1a2a75236a624b3a690c5540017cb38"}, + {file = "pytest_codspeed-4.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d29d4538c2d111c0034f71811bcce577304506d22af4dd65df87fadf3ab495"}, + {file = "pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90894c93c9e23f12487b7fdf16c28da8f6275d565056772072beb41a72a54cf9"}, + {file = "pytest_codspeed-4.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:79e9c40852fa7fc76776db4f1d290eceaeee2d6c5d2dc95a66c7cc690d83889e"}, + {file = "pytest_codspeed-4.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7330b6eadd6a729d4dba95d26496ee1c6f1649d552f515ef537b14a43908eb67"}, + {file = "pytest_codspeed-4.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1271cd28e895132b20d12875554a544ee041f7acfb8112af8a5c3cb201f2fc8"}, + {file = "pytest_codspeed-4.0.0-py3-none-any.whl", hash = "sha256:c5debd4b127dc1c507397a8304776f52cabbfa53aad6f51eae329a5489df1e06"}, + {file = "pytest_codspeed-4.0.0.tar.gz", hash = "sha256:0e9af08ca93ad897b376771db92693a81aa8990eecc2a778740412e00a6f6eaf"}, ] [package.dependencies] @@ -758,7 +758,7 @@ rich = ">=13.8.1" [package.extras] compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"] -lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.6.5,<0.7.0)"] +lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.11.12,<0.12.0)"] test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] @@ -1104,7 +1104,7 @@ version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["dev", "docs"] +groups = ["docs"] files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, @@ -1140,4 +1140,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "41eb7ce775d30ab9ea32c70f622d10c6dca9904c29635d00e7c9a9893b7cefd4" +content-hash = "a02185106a3a8390d2fa889ab86239f0990d8b42aad5e1ebed4e1dd78b5eaa47" diff --git a/pyproject.toml b/pyproject.toml index 9396b8c0..f298a914 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ pytest-asyncio = ">=0.20.3,<1.2.0" cython = "^3.0.5" setuptools = ">=65.6.3,<81.0.0" pytest-timeout = "^2.1.0" -pytest-codspeed = "^3.1.0" +pytest-codspeed = ">=3.1,<5.0" [tool.poetry.group.docs.dependencies] sphinx = "^7.4.7 || ^8.1.3" From cef7a17ec1a7ed5eb535b11d3ba019ca8981c6e3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 20:15:16 -1000 Subject: [PATCH 433/434] chore(pre-commit.ci): pre-commit autoupdate (#1594) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: J. Nick Koston --- .pre-commit-config.yaml | 10 +++++----- build_ext.py | 2 +- src/zeroconf/_dns.py | 4 ++-- src/zeroconf/_services/browser.py | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 923b38e6..86a8ee7f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,7 +9,7 @@ ci: repos: - repo: https://github.com/commitizen-tools/commitizen - rev: v4.8.2 + rev: v4.8.3 hooks: - id: commitizen stages: [commit-msg] @@ -40,7 +40,7 @@ repos: - id: pyupgrade args: [--py39-plus] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.11.11 + rev: v0.12.5 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] @@ -50,16 +50,16 @@ repos: hooks: - id: codespell - repo: https://github.com/PyCQA/flake8 - rev: 7.2.0 + rev: 7.3.0 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.15.0 + rev: v1.17.0 hooks: - id: mypy additional_dependencies: [ifaddr] - repo: https://github.com/MarcoGorelli/cython-lint - rev: v0.16.6 + rev: v0.16.7 hooks: - id: cython-lint - id: double-quote-cython-strings diff --git a/build_ext.py b/build_ext.py index ff088f83..412bff3c 100644 --- a/build_ext.py +++ b/build_ext.py @@ -56,7 +56,7 @@ def build(setup_kwargs: Any) -> None: if os.environ.get("SKIP_CYTHON"): return try: - from Cython.Build import cythonize + from Cython.Build import cythonize # noqa: PLC0415 setup_kwargs.update( { diff --git a/src/zeroconf/_dns.py b/src/zeroconf/_dns.py index 60df14b1..93069eb3 100644 --- a/src/zeroconf/_dns.py +++ b/src/zeroconf/_dns.py @@ -63,7 +63,7 @@ class DNSQuestionType(enum.Enum): QM = 2 -class DNSEntry: +class DNSEntry: # noqa: PLW1641 """A DNS entry""" __slots__ = ("class_", "key", "name", "type", "unique") @@ -161,7 +161,7 @@ def __repr__(self) -> str: ) -class DNSRecord(DNSEntry): +class DNSRecord(DNSEntry): # noqa: PLW1641 """A DNS record - like a DNS entry, but has a TTL""" __slots__ = ("created", "ttl") diff --git a/src/zeroconf/_services/browser.py b/src/zeroconf/_services/browser.py index 1f60e8f9..897b5dd6 100644 --- a/src/zeroconf/_services/browser.py +++ b/src/zeroconf/_services/browser.py @@ -99,7 +99,7 @@ heappush = heapq.heappush -class _ScheduledPTRQuery: +class _ScheduledPTRQuery: # noqa: PLW1641 __slots__ = ( "alias", "cancelled", From cb4ac97ef60bec816dd61e5edae2131f81234f2f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Jul 2025 20:15:29 -1000 Subject: [PATCH 434/434] chore(deps-dev): bump cython from 3.1.1 to 3.1.2 (#1602) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 126 ++++++++++++++++++++++++++-------------------------- 1 file changed, 63 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index ea717845..297103a8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -328,74 +328,74 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cython" -version = "3.1.1" +version = "3.1.2" description = "The Cython compiler for writing C extensions in the Python language." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "cython-3.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7adff5b42d2556d073e9f321c2faa639a17fb195ec1de130327f60ec209d8"}, - {file = "cython-3.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b61b99205308c96b1162de59bd67ecadcad3d166a4a1f03a3d9e826c39cd375"}, - {file = "cython-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d14186bd96783d13b8fd0e5b289f2e137a8a25479638b73a1c7e4a99a8d70753"}, - {file = "cython-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e3ccec55e2a534a712db14c6617b66f65ad149c014fad518fc3920f6edde770"}, - {file = "cython-3.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a585796939b09b3205b1980e4a55e745c0251e45a5c637afbcac3c6cc9ad6f90"}, - {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3fa4bd840de63509c74867b4b092541720a01db1e07351206011c34e0777dc96"}, - {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b68f1bc80387554eb43f2b62795c173bed9e37201f39dc5084ac437c90a79c9f"}, - {file = "cython-3.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e851ab66a31794e40df1bc6f649cdc56c998c637f5a1b9410c97a90f6b6cb855"}, - {file = "cython-3.1.1-cp310-cp310-win32.whl", hash = "sha256:64915259276482fa23417b284d1fdc7e3a618ee2f819bb6ea7f974c075633df6"}, - {file = "cython-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dee554f0a589377bdaea0eb70e212bf3f35dc6a51a2aa86c9351345e21fd2f07"}, - {file = "cython-3.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c360823e1063784efc2335617e0f28573d7a594c5a8a05d85e850a9621cccb1f"}, - {file = "cython-3.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:12e00b88147b03c148a95365f89dc1c45a0fc52f9c35aa75ff770ef65b615839"}, - {file = "cython-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab644415458d782c16ba7252de9cec1e3125371641cafea2e53a8c1cf85dd58d"}, - {file = "cython-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5cb6c054daadaf01a88c8f49f3edd9e829c9b76a82cbb4269e3f9878254540b"}, - {file = "cython-3.1.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af8f62cc9339b75fe8434325083e6a7cae88c9c21efd74bbb6ba4e3623219469"}, - {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:689c1aad373556bd2ab1aa1c2dad8939a2891465a1fbd2cbbdd42b488fb40ec8"}, - {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:953046c190fa9ab9a09a546a909b847cdbb4c1fe34e9bfa4a15b6ee1585a86aa"}, - {file = "cython-3.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:755a991601b27dd3555310d0f95b19a05e622a80d7b4e7a91fa6f5f3ef3f3b80"}, - {file = "cython-3.1.1-cp311-cp311-win32.whl", hash = "sha256:83b2af5c327f7da4f08afc34fddfaf6d24fa0c000b6b70a527c8125e493b6080"}, - {file = "cython-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:141ffd6279411c562f6b707adc56b63e965a4fd7f21db83f5d4fcbd8c50ac546"}, - {file = "cython-3.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d7dc0e4d0cd491fac679a61e9ede348c64ca449f99a284f9a01851aa1dbc7f6"}, - {file = "cython-3.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fd689910002adfac8734f237cdea1573e38345f27ed7fd445482813b65a29457"}, - {file = "cython-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f0434916994fe213ea7749268b88d77e3ebcbd1b99542cf64bb7d180f45470"}, - {file = "cython-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:873aac4ac0b0fb197557c0ac15458b780b9221daa4a716881cbd1a9016c8459f"}, - {file = "cython-3.1.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b886a6c8a50b1101ccef2f2f3dc9c699b77633ef5bb5007090226c2ad3f9c2"}, - {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dff0e7dd53a0ca35b64cda843253d5cac944db26663dc097b3a1adf2c49514ad"}, - {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f7954b0b4b3302655d3caa6924261de5907a4e129bc22ace52fe9ae0cd5a758"}, - {file = "cython-3.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dfa500fd7ae95ca152a5f8062b870532fa3e27efcef6d00612e1f28b9f72615f"}, - {file = "cython-3.1.1-cp312-cp312-win32.whl", hash = "sha256:cd748fab8e4426dbcb2e0fa2979558333934d24365e0de5672fbabfe337d880c"}, - {file = "cython-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:307f216ed319ea07644f2ef9974406c830f01bc8e677e2147e9bfcdf9e3ca8ad"}, - {file = "cython-3.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb5661941707bd41ec7a9c273d698113ac50392444f785088e9d9706c6a5937b"}, - {file = "cython-3.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:28b174f41718a7041cfbe0f48913020875ff1aaa4793942b2451ac6d2baf3f07"}, - {file = "cython-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c740a10cd0f50321d048c8ca318eefb4c42b8bffef982dcd89c946d374192702"}, - {file = "cython-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7da069ca769903c5dee56c5f7ab47b2b7b91030eee48912630db5f4f3ec5954a"}, - {file = "cython-3.1.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24c640c0746d984789fe2787a098f06cda456ef2dd78b90164d17884b350839a"}, - {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:426d78565eb91d3366569b20e92b8f14bffef5f57b2acd05b60bbb9ce5c056a1"}, - {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b181158b5761bdaf40f6854f016ab7ddff64d3db4fca55cb3ca0f73813dd76d6"}, - {file = "cython-3.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7489559e6c5ecbba49d535c2e03cf77c2594a3190b6aca7da5b508ba1664a89a"}, - {file = "cython-3.1.1-cp313-cp313-win32.whl", hash = "sha256:263cb0e497910fb5e0a361ad1393b6d728b092178afecc56e8a786f3739960c3"}, - {file = "cython-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:e000f0533eedf3d6dfbe30bb3c58a054c58f0a7778390342fa577a0dc47adab3"}, - {file = "cython-3.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdf53dc4b2a13bd072d6c2c18ac073dbf0f798555bc27ba4f7546a275eb16a0f"}, - {file = "cython-3.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ce82070ccf92c3599d331b9eaaefd9d4562976fb86a8d6bccf05c4a0b8389f2a"}, - {file = "cython-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:020089f9c9f10269181f17660a2cada7d4577bd8eea24b7d2b14e6b64b6996be"}, - {file = "cython-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:402f86c00b08f875cd0990f0c4dc52eb3e0bc5d630066cdf3c798631976f1937"}, - {file = "cython-3.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54a8934cb3bf13b1f8f6cbdae8e382e25a26e67de08ea6ebfd0a467131b67227"}, - {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6ea77ad1e649cec38f8622ba28dcdfbe7bf519bc132abbcf5df759b3975b5a73"}, - {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e5cad896af896482240979b996bf4136b0d18dc40c56c72c5641bf0ea085dfb"}, - {file = "cython-3.1.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:16d9870654946375b28280371d370d541641d1071da123d0d64d2c7ebba0cc56"}, - {file = "cython-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8aaa29e763adf3496ab9d371e3caed8da5d3ce5ff8fb57433e2a2f2b5036e5c8"}, - {file = "cython-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:011cdcbf7725f0cfc1abc55ec83d326e788050711272131daf3cc24a19c34bb2"}, - {file = "cython-3.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40f50b07c479eaf33981d81cad274c68cf9fb81dbe79cbf991f59491c88a4705"}, - {file = "cython-3.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a92f6bd395eadea6eed722a8188d3bdd49db1c9fa3c38710456d6148ab71bad7"}, - {file = "cython-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:268420b92307ae6c5a16e3cf0e2ba1ae3c861650e992893922a0ce08db07cfdb"}, - {file = "cython-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19188ecd385cdc649e3fec370f38d5fd7f1651aeed0b3fb403180f38fc88e8a"}, - {file = "cython-3.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fff6526bb6f4eea615663117b86de6ede0d17c477b600d3d8302be3502bd3c3"}, - {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:3192a61c2a532d3faccdff508bc8427de9530b587888218bfc0226eb33a84e11"}, - {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56c6768a6f601f93daab7c2487f9f110548a896a91e00a6e119445ada2575323"}, - {file = "cython-3.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:50ad80e2f438e9127a87c10927e6ac16a987df39c248b19ab2cd31330129be3c"}, - {file = "cython-3.1.1-cp39-cp39-win32.whl", hash = "sha256:b194a65a0fd91f305d2d1e7010f44111774a28533e1e44dd2a76e7de81a219b9"}, - {file = "cython-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c8b8be01fd40b3e38a76c60a524f956548a3a7566e5530a833a48a695f3d6c12"}, - {file = "cython-3.1.1-py3-none-any.whl", hash = "sha256:07621e044f332d18139df2ccfcc930151fd323c2f61a58c82f304cffc9eb5280"}, - {file = "cython-3.1.1.tar.gz", hash = "sha256:505ccd413669d5132a53834d792c707974248088c4f60c497deb1b416e366397"}, + {file = "cython-3.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0f2add8b23cb19da3f546a688cd8f9e0bfc2776715ebf5e283bc3113b03ff008"}, + {file = "cython-3.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0d6248a2ae155ca4c42d7fa6a9a05154d62e695d7736bc17e1b85da6dcc361df"}, + {file = "cython-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262bf49d9da64e2a34c86cbf8de4aa37daffb0f602396f116cca1ed47dc4b9f2"}, + {file = "cython-3.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae53ae93c699d5f113953a9869df2fc269d8e173f9aa0616c6d8d6e12b4e9827"}, + {file = "cython-3.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b417c5d046ce676ee595ec7955ed47a68ad6f419cbf8c2a8708e55a3b38dfa35"}, + {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:af127da4b956e0e906e552fad838dc3fb6b6384164070ceebb0d90982a8ae25a"}, + {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9be3d4954b46fd0f2dceac011d470f658eaf819132db52fbd1cf226ee60348db"}, + {file = "cython-3.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:63da49672c4bb022b4de9d37bab6c29953dbf5a31a2f40dffd0cf0915dcd7a17"}, + {file = "cython-3.1.2-cp310-cp310-win32.whl", hash = "sha256:2d8291dbbc1cb86b8d60c86fe9cbf99ec72de28cb157cbe869c95df4d32efa96"}, + {file = "cython-3.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:e1f30a1339e03c80968a371ef76bf27a6648c5646cccd14a97e731b6957db97a"}, + {file = "cython-3.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5548573e0912d7dc80579827493315384c462e2f15797b91a8ed177686d31eb9"}, + {file = "cython-3.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bf3ea5bc50d80762c490f42846820a868a6406fdb5878ae9e4cc2f11b50228a"}, + {file = "cython-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ce53951d06ab2bca39f153d9c5add1d631c2a44d58bf67288c9d631be9724e"}, + {file = "cython-3.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e05a36224e3002d48c7c1c695b3771343bd16bc57eab60d6c5d5e08f3cbbafd8"}, + {file = "cython-3.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc0fc0777c7ab82297c01c61a1161093a22a41714f62e8c35188a309bd5db8e"}, + {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:18161ef3dd0e90a944daa2be468dd27696712a5f792d6289e97d2a31298ad688"}, + {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ca45020950cd52d82189d6dfb6225737586be6fe7b0b9d3fadd7daca62eff531"}, + {file = "cython-3.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aaae97d6d07610224be2b73a93e9e3dd85c09aedfd8e47054e3ef5a863387dae"}, + {file = "cython-3.1.2-cp311-cp311-win32.whl", hash = "sha256:3d439d9b19e7e70f6ff745602906d282a853dd5219d8e7abbf355de680c9d120"}, + {file = "cython-3.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:8efa44ee2f1876e40eb5e45f6513a19758077c56bf140623ccab43d31f873b61"}, + {file = "cython-3.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c2c4b6f9a941c857b40168b3f3c81d514e509d985c2dcd12e1a4fea9734192e"}, + {file = "cython-3.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bdbc115bbe1b8c1dcbcd1b03748ea87fa967eb8dfc3a1a9bb243d4a382efcff4"}, + {file = "cython-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05111f89db1ca98edc0675cfaa62be47b3ff519a29876eb095532a9f9e052b8"}, + {file = "cython-3.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e7188df8709be32cfdfadc7c3782e361c929df9132f95e1bbc90a340dca3c7"}, + {file = "cython-3.1.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c0ecc71e60a051732c2607b8eb8f2a03a5dac09b28e52b8af323c329db9987b"}, + {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f27143cf88835c8bcc9bf3304953f23f377d1d991e8942982fe7be344c7cfce3"}, + {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d8c43566701133f53bf13485839d8f3f309095fe0d3b9d0cd5873073394d2edc"}, + {file = "cython-3.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a3bb893e85f027a929c1764bb14db4c31cbdf8a96f59a78f608f2ba7cfbbce95"}, + {file = "cython-3.1.2-cp312-cp312-win32.whl", hash = "sha256:12c5902f105e43ca9af7874cdf87a23627f98c15d5a4f6d38bc9d334845145c0"}, + {file = "cython-3.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:06789eb7bd2e55b38b9dd349e9309f794aee0fed99c26ea5c9562d463877763f"}, + {file = "cython-3.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc22e5f18af436c894b90c257130346930fdc860d7f42b924548c591672beeef"}, + {file = "cython-3.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42c7bffb0fe9898996c7eef9eb74ce3654553c7a3a3f3da66e5a49f801904ce0"}, + {file = "cython-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88dc7fd54bfae78c366c6106a759f389000ea4dfe8ed9568af9d2f612825a164"}, + {file = "cython-3.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80d0ce057672ca50728153757d022842d5dcec536b50c79615a22dda2a874ea0"}, + {file = "cython-3.1.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eda6a43f1b78eae0d841698916eef661d15f8bc8439c266a964ea4c504f05612"}, + {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b4c516d103e87c2e9c1ab85227e4d91c7484c1ba29e25f8afbf67bae93fee164"}, + {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7542f1d18ab2cd22debc72974ec9e53437a20623d47d6001466e430538d7df54"}, + {file = "cython-3.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:63335513c06dcec4ecdaa8598f36c969032149ffd92a461f641ee363dc83c7ad"}, + {file = "cython-3.1.2-cp313-cp313-win32.whl", hash = "sha256:b377d542299332bfeb61ec09c57821b10f1597304394ba76544f4d07780a16df"}, + {file = "cython-3.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:8ab1319c77f15b0ae04b3fb03588df3afdec4cf79e90eeea5c961e0ebd8fdf72"}, + {file = "cython-3.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbc1f225cb9f9be7a025589463507e10bb2d76a3258f8d308e0e2d0b966c556e"}, + {file = "cython-3.1.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c1661c1701c96e1866f839e238570c96a97535a81da76a26f45f99ede18b3897"}, + {file = "cython-3.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955bc6032d89ce380458266e65dcf5ae0ed1e7c03a7a4457e3e4773e90ba7373"}, + {file = "cython-3.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b58e859889dd0fc6c3a990445b930f692948b28328bb4f3ed84b51028b7e183"}, + {file = "cython-3.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:992a6504aa3eed50dd1fc3d1fa998928b08c1188130bd526e177b6d7f3383ec4"}, + {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f3d03077938b02ec47a56aa156da7bfc2379193738397d4e88086db5b0a374e0"}, + {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b7e1d3c383a5f4ca5319248b9cb1b16a04fb36e153d651e558897171b7dbabb9"}, + {file = "cython-3.1.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:58d4d45e40cadf4f602d96b7016cf24ccfe4d954c61fa30b79813db8ccb7818f"}, + {file = "cython-3.1.2-cp38-cp38-win32.whl", hash = "sha256:919ff38a93f7c21829a519693b336979feb41a0f7ca35969402d7e211706100e"}, + {file = "cython-3.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:aca994519645ba8fb5e99c0f9d4be28d61435775552aaf893a158c583cd218a5"}, + {file = "cython-3.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe7f1ee4c13f8a773bd6c66b3d25879f40596faeab49f97d28c39b16ace5fff9"}, + {file = "cython-3.1.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9ec7d2baea122d94790624f743ff5b78f4e777bf969384be65b69d92fa4bc3f"}, + {file = "cython-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df57827185874f29240b02402e615547ab995d90182a852c6ec4f91bbae355a4"}, + {file = "cython-3.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1a69b9b4fe0a48a8271027c0703c71ab1993c4caca01791c0fd2e2bd9031aa"}, + {file = "cython-3.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:970cc1558519f0f108c3e2f4b3480de4945228d9292612d5b2bb687e36c646b8"}, + {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604c39cd6d152498a940aeae28b6fd44481a255a3fdf1b0051c30f3873c88b7f"}, + {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:855f2ae06438c7405997cf0df42d5b508ec3248272bb39df4a7a4a82a5f7c8cb"}, + {file = "cython-3.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9e3016ca7a86728bfcbdd52449521e859a977451f296a7ae4967cefa2ec498f7"}, + {file = "cython-3.1.2-cp39-cp39-win32.whl", hash = "sha256:4896fc2b0f90820ea6fcf79a07e30822f84630a404d4e075784124262f6d0adf"}, + {file = "cython-3.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:a965b81eb4f5a5f3f6760b162cb4de3907c71a9ba25d74de1ad7a0e4856f0412"}, + {file = "cython-3.1.2-py3-none-any.whl", hash = "sha256:d23fd7ffd7457205f08571a42b108a3cf993e83a59fe4d72b42e6fc592cf2639"}, + {file = "cython-3.1.2.tar.gz", hash = "sha256:6bbf7a953fa6762dfecdec015e3b054ba51c0121a45ad851fa130f63f5331381"}, ] [[package]] pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy