From befd9a12cbb7ef2152ce129f43be22e70368577d Mon Sep 17 00:00:00 2001 From: Giovanni Ferri Date: Wed, 18 Feb 2026 22:34:05 +0000 Subject: [PATCH 1/3] Fix BLE protocol and MQTT connection bugs - Replace blocking time.sleep() with asyncio.sleep() in HADiscoveryPublisher.connect() to avoid stalling the event loop during MQTT handshake - Make connect_mqtt() and HADiscoveryPublisher.connect() async; consolidate MQTT connection into the single asyncio.run() call in main() - Raise ValueError on invalid PKCS7 padding in _decrypt() instead of silently returning corrupt data - Guard publish_state() and publish_availability() with _connected check to surface failures instead of dropping data silently - Add is_connected guard in _trigger_update() before write_gatt_char() - Replace hard-coded asyncio.sleep(2) after trigger_update() with an event-based wait (clear then await with timeout) to avoid stale signals - Validate required sensor keys (mac_address, device_id, local_key) at config load time so missing keys fail fast at startup Nightshift-Task: bug-finder Nightshift-Ref: https://github.com/marcus/nightshift --- pi-fallback/ble_poller.py | 44 +++++++++++++++++++++-------------- pi-fallback/mqtt_publisher.py | 16 +++++++++---- pi-fallback/tuya_ble.py | 17 ++++++++++---- 3 files changed, 52 insertions(+), 25 deletions(-) diff --git a/pi-fallback/ble_poller.py b/pi-fallback/ble_poller.py index 0243f14..2e903f5 100755 --- a/pi-fallback/ble_poller.py +++ b/pi-fallback/ble_poller.py @@ -64,6 +64,17 @@ def load_config(self) -> bool: logger.error(f"Missing required config section: {section}") return False + # Validate required sensor keys early to catch config errors at startup + required_sensor_keys = ["mac_address", "device_id", "local_key"] + for i, sensor in enumerate(self.config.get("sensors", [])): + for key in required_sensor_keys: + if key not in sensor: + logger.error( + f"Sensor {i} ('{sensor.get('name', 'unnamed')}') " + f"missing required key: '{key}'" + ) + return False + # Set logging level log_level = self.config.get("logging", {}).get("level", "INFO") logging.getLogger().setLevel(getattr(logging, log_level.upper())) @@ -79,7 +90,7 @@ def load_config(self) -> bool: logger.error(f"Config load error: {e}") return False - def connect_mqtt(self) -> bool: + async def connect_mqtt(self) -> bool: """Initialize and connect MQTT publisher.""" mqtt_config = self.config["mqtt"] ha_config = self.config.get("homeassistant", {}) @@ -96,7 +107,7 @@ def connect_mqtt(self) -> bool: ) self.mqtt = HADiscoveryPublisher(config) - return self.mqtt.connect() + return await self.mqtt.connect() async def poll_sensor(self, sensor_config: dict) -> Optional[SensorData]: """ @@ -283,15 +294,10 @@ def main(): # Initialize poller poller = BLEPoller(args.config) - # Load config + # Load config (synchronous) if not poller.load_config(): return 1 - # Connect MQTT - if not poller.connect_mqtt(): - logger.error("Failed to connect to MQTT broker") - return 1 - # Setup signal handlers def signal_handler(sig, frame): poller.shutdown() @@ -299,16 +305,20 @@ def signal_handler(sig, frame): signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) - # Run poller - try: - asyncio.run(poller.run(once=args.once)) - except KeyboardInterrupt: - pass - finally: - if poller.mqtt: - poller.mqtt.disconnect() + async def _run(): + if not await poller.connect_mqtt(): + logger.error("Failed to connect to MQTT broker") + return 1 + try: + await poller.run(once=args.once) + except KeyboardInterrupt: + pass + finally: + if poller.mqtt: + poller.mqtt.disconnect() + return 0 - return 0 + return asyncio.run(_run()) if __name__ == "__main__": diff --git a/pi-fallback/mqtt_publisher.py b/pi-fallback/mqtt_publisher.py index 757175b..ea5d9a8 100644 --- a/pi-fallback/mqtt_publisher.py +++ b/pi-fallback/mqtt_publisher.py @@ -4,6 +4,7 @@ Publishes sensor data to MQTT with automatic Home Assistant entity configuration. """ +import asyncio import json import logging import ssl @@ -42,7 +43,7 @@ def __init__(self, config: MQTTConfig): self._connected = False self._discovery_sent: set = set() - def connect(self) -> bool: + async def connect(self) -> bool: """Connect to MQTT broker.""" try: # Use MQTT v5 protocol @@ -73,12 +74,11 @@ def connect(self) -> bool: self._client.connect(self.config.host, self.config.port, keepalive=60) self._client.loop_start() - # Wait for connection - import time + # Wait for connection without blocking the event loop for _ in range(50): # 5 seconds timeout if self._connected: return True - time.sleep(0.1) + await asyncio.sleep(0.1) logger.error("MQTT connection timeout") return False @@ -215,6 +215,10 @@ def publish_state(self, sensor_config: dict, data: dict): sensor_config: Sensor configuration from config file data: Sensor data dictionary (from SensorData.to_dict()) """ + if not self._connected or not self._client: + logger.error("Not connected to MQTT broker, dropping publish_state") + return + unique_id = sensor_config["unique_id"] state_topic = f"sgs01/{unique_id}/state" @@ -233,6 +237,10 @@ def publish_state(self, sensor_config: dict, data: dict): def publish_availability(self, sensor_config: dict, available: bool): """Publish sensor availability status.""" + if not self._connected or not self._client: + logger.error("Not connected to MQTT broker, dropping publish_availability") + return + unique_id = sensor_config["unique_id"] availability_topic = f"sgs01/{unique_id}/availability" diff --git a/pi-fallback/tuya_ble.py b/pi-fallback/tuya_ble.py index aea1aa9..7bc21fe 100644 --- a/pi-fallback/tuya_ble.py +++ b/pi-fallback/tuya_ble.py @@ -148,9 +148,9 @@ def _decrypt(self, data: bytes) -> bytes: # Remove PKCS7 padding padding_len = decrypted[-1] - if padding_len <= 16: - return decrypted[:-padding_len] - return decrypted + if padding_len == 0 or padding_len > 16: + raise ValueError(f"Invalid PKCS7 padding length: {padding_len}") + return decrypted[:-padding_len] def _build_packet(self, command: int, data: bytes = b"") -> bytes: """Build a Tuya BLE packet.""" @@ -359,8 +359,13 @@ async def read_sensors(self) -> Optional[SensorData]: # If no DPs received, try triggering by writing temp unit if not self._received_dps: + self._response_event.clear() await self._trigger_update() - await asyncio.sleep(2) + if not self._received_dps: + try: + await asyncio.wait_for(self._response_event.wait(), timeout=5.0) + except asyncio.TimeoutError: + logger.warning("No DPs received after trigger update") # Parse received DPs into SensorData return self._parse_sensor_data() @@ -371,6 +376,10 @@ async def read_sensors(self) -> Optional[SensorData]: async def _trigger_update(self): """Trigger sensor to send data by writing temperature unit.""" + if not self._client or not self._client.is_connected: + logger.error("Device disconnected, cannot trigger update") + return + # Write DP 9 (temp_unit) = 0 (celsius) dp_data = struct.pack(">BBHB", 9, DPType.ENUM, 1, 0) From d53a4e9228b4592a12f23aca8a52cc09a7191ed5 Mon Sep 17 00:00:00 2001 From: Giovanni Ferri Date: Wed, 18 Feb 2026 22:40:54 +0000 Subject: [PATCH 2/3] Fix stale event signal in read_sensors() after _trigger_update() Clear _response_event after _trigger_update() returns and before the follow-up wait, so a DP_WRITE ack that resolved the internal wait in _trigger_update() does not cause the outer wait_for() to return immediately with a stale signal instead of properly waiting for the DP_REPORT notification. Nightshift-Task: bug-finder Nightshift-Ref: https://github.com/marcus/nightshift --- pi-fallback/tuya_ble.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pi-fallback/tuya_ble.py b/pi-fallback/tuya_ble.py index 7bc21fe..90033fe 100644 --- a/pi-fallback/tuya_ble.py +++ b/pi-fallback/tuya_ble.py @@ -362,6 +362,9 @@ async def read_sensors(self) -> Optional[SensorData]: self._response_event.clear() await self._trigger_update() if not self._received_dps: + # Clear stale signal from _trigger_update's DP_WRITE ack before + # waiting for the actual DP_REPORT notification + self._response_event.clear() try: await asyncio.wait_for(self._response_event.wait(), timeout=5.0) except asyncio.TimeoutError: From 7fade16a905b46e3ebbb5edb38755d7bb0b25931 Mon Sep 17 00:00:00 2001 From: Giovanni Ferri Date: Wed, 18 Feb 2026 22:49:12 +0000 Subject: [PATCH 3/3] Fix big-endian integer padding in _parse_dps() Use rjust instead of ljust when zero-padding VALUE-type DPs to 4 bytes before big-endian unpacking. ljust appends zeros to the right (LSB side), corrupting temperature and moisture readings for payloads shorter than 4 bytes. rjust correctly left-pads (MSB side) to preserve the value. Nightshift-Task: bug-finder Nightshift-Ref: https://github.com/marcus/nightshift --- pi-fallback/tuya_ble.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pi-fallback/tuya_ble.py b/pi-fallback/tuya_ble.py index 90033fe..26d6c6b 100644 --- a/pi-fallback/tuya_ble.py +++ b/pi-fallback/tuya_ble.py @@ -203,7 +203,7 @@ def _parse_dps(self, data: bytes) -> dict: if dp_type == DPType.BOOL: value = bool(dp_data[0]) if dp_data else False elif dp_type == DPType.VALUE: - value = struct.unpack(">i", dp_data.ljust(4, b'\x00')[:4])[0] + value = struct.unpack(">i", dp_data.rjust(4, b'\x00')[:4])[0] elif dp_type == DPType.STRING: value = dp_data.decode("utf-8", errors="replace") elif dp_type == DPType.ENUM: