-
Notifications
You must be signed in to change notification settings - Fork 30
feat(flo-cloud): add Azure Storage Queue as message queue #253
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,3 +1,4 @@ | ||
| from .blob_storage import AzureBlobStorage | ||
| from .storage_queue import StorageQueue | ||
|
|
||
| __all__ = ['AzureBlobStorage'] | ||
| __all__ = ['AzureBlobStorage', 'StorageQueue'] |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,107 @@ | ||
| import base64 | ||
| import binascii | ||
| import json | ||
| import os | ||
| from typing import List | ||
|
|
||
| from azure.identity import DefaultAzureCredential | ||
| from azure.storage.queue import QueueClient | ||
|
|
||
| from .._types import MessageQueue, MessageQueueDict | ||
|
|
||
|
|
||
| def _decode_message(content: str): | ||
| """Parse message content as JSON, falling back to base64-decode first. | ||
|
|
||
| Event Grid delivers messages to Storage Queue as base64-encoded JSON. | ||
| Messages we send ourselves are plain JSON. | ||
| """ | ||
| try: | ||
| return json.loads(content) | ||
| except json.JSONDecodeError: | ||
| try: | ||
| return json.loads(base64.b64decode(content).decode('utf-8')) | ||
| except (binascii.Error, UnicodeDecodeError) as e: | ||
| raise ValueError( | ||
| f'Message content is neither valid JSON nor base64-encoded JSON: {e}' | ||
| ) | ||
|
|
||
|
|
||
| class StorageQueue(MessageQueue): | ||
| """Azure Storage Queue implementation.""" | ||
|
|
||
| def __init__(self): | ||
| account_url = os.environ.get('AZURE_STORAGE_QUEUE_URL') | ||
| if not account_url: | ||
| raise ValueError('AZURE_STORAGE_QUEUE_URL env var must be set') | ||
|
|
||
| queue_name = os.environ.get('AZURE_STORAGE_QUEUE_NAME') | ||
| if not queue_name: | ||
| raise ValueError('AZURE_STORAGE_QUEUE_NAME env var must be set') | ||
|
|
||
| self._account_url = account_url | ||
| self._queue_name = queue_name | ||
| self._credential = DefaultAzureCredential() | ||
| self._client = QueueClient( | ||
| account_url=account_url, | ||
| queue_name=queue_name, | ||
| credential=self._credential, | ||
| message_encode_policy=None, | ||
| message_decode_policy=None, | ||
| ) | ||
| # Maps pop_receipt (ack_id) -> message_id for delete | ||
| self._pending: dict[str, str] = {} | ||
|
|
||
| def receive_messages( | ||
| self, max_messages=10, wait_time_sec=20 | ||
| ) -> List[MessageQueueDict]: | ||
| """Receive messages from Azure Storage Queue. | ||
|
|
||
| Note: Azure Storage Queue does not support long-polling. This method | ||
| returns immediately, potentially with an empty list. The `wait_time_sec` | ||
| parameter is repurposed as the visibility timeout, controlling how long | ||
| received messages remain hidden from other consumers. | ||
|
|
||
| Args: | ||
| max_messages: Maximum number of messages to receive (1-32). | ||
| wait_time_sec: Visibility timeout in seconds for received messages. | ||
|
|
||
| Returns: | ||
| List of MessageQueueDict, possibly empty. | ||
| """ | ||
| received = [] | ||
| for msg in self._client.receive_messages( | ||
| max_messages=max_messages, | ||
| visibility_timeout=wait_time_sec, | ||
| ): | ||
| self._pending[msg.pop_receipt] = msg.id | ||
| body = _decode_message(msg.content) | ||
| received.append( | ||
| MessageQueueDict(body=body, ack_id=msg.pop_receipt, id=msg.id) | ||
| ) | ||
| return received | ||
|
coderabbitai[bot] marked this conversation as resolved.
|
||
|
|
||
| def delete_message(self, ack_id: str): | ||
| message_id = self._pending.pop(ack_id, None) | ||
| if message_id is None: | ||
| raise ValueError( | ||
| f'No pending message found for ack_id {ack_id!r}. ' | ||
| 'It may have already been deleted or never received.' | ||
| ) | ||
| self._client.delete_message(message_id, ack_id) | ||
|
|
||
| def add_message( | ||
| self, message_body: dict, topic_name_or_queue_url: str | None = None | ||
| ) -> str: | ||
| if topic_name_or_queue_url and topic_name_or_queue_url != self._queue_name: | ||
| client = QueueClient( | ||
| account_url=self._account_url, | ||
| queue_name=topic_name_or_queue_url, | ||
| credential=self._credential, | ||
| message_encode_policy=None, | ||
| message_decode_policy=None, | ||
| ) | ||
| else: | ||
| client = self._client | ||
| result = client.send_message(json.dumps(message_body)) | ||
| return result.id | ||
|
Comment on lines
+93
to
+107
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ambiguous handling of The comparison Consider clarifying the expected input or normalizing the comparison: 🛠️ Suggested fix to handle both URL and name def add_message(
self, message_body: dict, topic_name_or_queue_url: str | None = None
) -> str:
- if topic_name_or_queue_url and topic_name_or_queue_url != self._queue_name:
+ # Normalize: use default queue if not specified or if it matches current queue
+ target_queue = topic_name_or_queue_url
+ if not target_queue or target_queue == self._queue_name:
client = self._client
+ else:
+ # Assume topic_name_or_queue_url is a queue name (not a full URL)
client = QueueClient(
account_url=self._account_url,
- queue_name=topic_name_or_queue_url,
+ queue_name=target_queue,
credential=self._credential,
message_encode_policy=None,
message_decode_policy=None,
)
- else:
- client = self._client
result = client.send_message(json.dumps(message_body))
return result.idAlternatively, rename the parameter to just 🤖 Prompt for AI Agents |
||
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Chain exception using
raise ... from efor proper traceback.The static analysis tool (Ruff B904) correctly identifies that exceptions raised within an
exceptclause should use exception chaining. This preserves the original traceback and makes debugging easier.🛠️ Proposed fix
except (binascii.Error, UnicodeDecodeError) as e: raise ValueError( f'Message content is neither valid JSON nor base64-encoded JSON: {e}' - ) + ) from e🧰 Tools
🪛 Ruff (0.15.6)
[warning] 25-27: Within an
exceptclause, raise exceptions withraise ... from errorraise ... from Noneto distinguish them from errors in exception handling(B904)
🤖 Prompt for AI Agents