From c3b7e996102704ed1c91661e0fed4fd23238fbfc Mon Sep 17 00:00:00 2001 From: yijxie Date: Sat, 18 May 2019 16:56:36 -0700 Subject: [PATCH 01/49] Move to under sdk --- .../azure/eventhub/__init__.py | 20 +- .../azure/eventhub/aio/__init__.py | 9 + .../eventhub/aio/event_hubs_client_async.py | 317 ++++++++++++++ .../azure/eventhub/aio/receiver_async.py | 332 ++++++++++++++ .../azure/eventhub/aio/sender_async.py | 352 +++++++++++++++ .../azure-eventhubs/azure/eventhub/client.py | 414 ++---------------- .../azure/eventhub/client_abstract.py | 312 +++++++++++++ .../azure-eventhubs/azure/eventhub/common.py | 17 +- .../azure/eventhub/configuration.py | 21 + .../azure/eventhub/policies/__init__.py | 23 + .../azure/eventhub/policies/base.py | 17 + .../azure/eventhub/policies/policies.py | 63 +++ .../azure/eventhub/receiver.py | 47 +- .../azure-eventhubs/azure/eventhub/sender.py | 114 +++-- .../eventprocessorhost/eh_partition_pump.py | 5 +- .../eventprocessorhost/partition_manager.py | 5 +- .../debugging/get_eventhub_info.py | 10 + .../azure-eventhubs/debugging/receive.py | 14 + .../debugging/receive_async.py | 26 ++ .../azure-eventhubs/debugging/send.py | 34 ++ .../azure-eventhubs/debugging/send_async.py | 20 + 21 files changed, 1749 insertions(+), 423 deletions(-) create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/aio/__init__.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py create mode 100644 sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py create mode 100644 sdk/eventhub/azure-eventhubs/debugging/receive.py create mode 100644 sdk/eventhub/azure-eventhubs/debugging/receive_async.py create mode 100644 sdk/eventhub/azure-eventhubs/debugging/send.py create mode 100644 sdk/eventhub/azure-eventhubs/debugging/send_async.py diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index 7067761d5ef6..be8547693d2b 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -9,11 +9,17 @@ from azure.eventhub.client import EventHubClient from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver +from uamqp.constants import MessageSendResult +from uamqp.constants import TransportType + +__all__ = [ + "EventData", + "EventHubError", + "Offset", + "EventHubClient", + "Sender", + "Receiver", + "MessageSendResult", + "TransportType", +] -try: - from azure.eventhub.async_ops import ( - EventHubClientAsync, - AsyncSender, - AsyncReceiver) -except (ImportError, SyntaxError): - pass # Python 3 async features not supported diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/__init__.py new file mode 100644 index 000000000000..020392000d1f --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/__init__.py @@ -0,0 +1,9 @@ +from .event_hubs_client_async import EventHubClient +from .receiver_async import Receiver +from .sender_async import Sender + +__all__ = [ + "EventHubClient", + "Receiver", + "Sender" +] diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py new file mode 100644 index 000000000000..d6cb0003ba18 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -0,0 +1,317 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import logging +import asyncio +import time +import datetime + +from uamqp import authentication, constants, types, errors +from uamqp import ( + Message, + AMQPClientAsync, +) + +from azure.eventhub.common import parse_sas_token +from azure.eventhub import ( + EventHubError) +from ..client_abstract import EventHubClientAbstract + +from .sender_async import Sender +from .receiver_async import Receiver + + +log = logging.getLogger(__name__) + + +class EventHubClient(EventHubClientAbstract): + """ + The EventHubClient class defines a high level interface for asynchronously + sending events to and receiving events from the Azure Event Hubs service. + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async] + :end-before: [END create_eventhub_client_async] + :language: python + :dedent: 4 + :caption: Create a new instance of the Event Hub client async. + + """ + + def _create_auth(self, username=None, password=None): + """ + Create an ~uamqp.authentication.cbs_auth_async.SASTokenAuthAsync instance to authenticate + the session. + + :param username: The name of the shared access policy. + :type username: str + :param password: The shared access key. + :type password: str + """ + http_proxy = self.config.http_proxy_policy.http_proxy + transport_type = self.config.transport_type + auth_timeout = self.config.auth_timeout + if self.sas_token: + token = self.sas_token() if callable(self.sas_token) else self.sas_token + try: + expiry = int(parse_sas_token(token)['se']) + except (KeyError, TypeError, IndexError): + raise ValueError("Supplied SAS token has no valid expiry value.") + return authentication.SASTokenAsync( + self.auth_uri, self.auth_uri, token, + expires_at=expiry, + timeout=auth_timeout, + http_proxy=http_proxy, transport_type=transport_type) + + username = username or self._auth_config['username'] + password = password or self._auth_config['password'] + if "@sas.root" in username: + return authentication.SASLPlain( + self.address.hostname, username, password, http_proxy=http_proxy, transport_type=transport_type) + return authentication.SASTokenAsync.from_shared_access_key( + self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) + + async def _close_clients_async(self): + """ + Close all open AsyncSender/AsyncReceiver clients. + """ + await asyncio.gather(*[c.close_async() for c in self.clients]) + + async def _wait_for_client(self, client): + try: + while client.get_handler_state().value == 2: + await client._handler._connection.work_async() # pylint: disable=protected-access + except Exception as exp: # pylint: disable=broad-except + await client.close_async(exception=exp) + + async def _start_client_async(self, client): + try: + if not client.running: + await client.open_async() + except Exception as exp: # pylint: disable=broad-except + log.info("Encountered error while starting handler: %r", exp) + await client.close_async(exception=exp) + log.info("Finished closing failed handler") + + async def _handle_redirect(self, redirects): + if len(redirects) != len(self.clients): + not_redirected = [c for c in self.clients if not c.redirected] + _, timeout = await asyncio.wait([self._wait_for_client(c) for c in not_redirected], timeout=5) + if timeout: + raise EventHubError("Some clients are attempting to redirect the connection.") + redirects = [c.redirected for c in self.clients if c.redirected] + if not all(r.hostname == redirects[0].hostname for r in redirects): + raise EventHubError("Multiple clients attempting to redirect to different hosts.") + self._process_redirect_uri(redirects[0]) + await asyncio.gather(*[c.open_async() for c in self.clients]) + + async def run_async(self): + """ + Run the EventHubClient asynchronously. + Opens the connection and starts running all AsyncSender/AsyncReceiver clients. + Returns a list of the start up results. For a succcesful client start the + result will be `None`, otherwise the exception raised. + If all clients failed to start, then run will fail, shut down the connection + and raise an exception. + If at least one client starts up successfully the run command will succeed. + + :rtype: list[~azure.eventhub.common.EventHubError] + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_run_async] + :end-before: [END eventhub_client_run_async] + :language: python + :dedent: 4 + :caption: Run the EventHubClient asynchronously. + + """ + log.info("%r: Starting %r clients", self.container_id, len(self.clients)) + tasks = [self._start_client_async(c) for c in self.clients] + try: + await asyncio.gather(*tasks) + redirects = [c.redirected for c in self.clients if c.redirected] + failed = [c.error for c in self.clients if c.error] + if failed and len(failed) == len(self.clients): + log.warning("%r: All clients failed to start.", self.container_id) + raise failed[0] + if failed: + log.warning("%r: %r clients failed to start.", self.container_id, len(failed)) + elif redirects: + await self._handle_redirect(redirects) + except EventHubError: + await self.stop_async() + raise + except Exception as exp: + await self.stop_async() + raise EventHubError(str(exp)) + return failed + + async def stop_async(self): + """ + Stop the EventHubClient and all its Sender/Receiver clients. + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_stop] + :end-before: [END eventhub_client_async_stop] + :language: python + :dedent: 4 + :caption: Stop the EventHubClient and all its Sender/Receiver clients. + + """ + log.info("%r: Stopping %r clients", self.container_id, len(self.clients)) + self.stopped = True + await self._close_clients_async() + + async def get_eventhub_info_async(self): + """ + Get details on the specified EventHub async. + + :rtype: dict + """ + alt_creds = { + "username": self._auth_config.get("iot_username"), + "password":self._auth_config.get("iot_password")} + # TODO: add proxy? + try: + mgmt_auth = self._create_auth(**alt_creds) + mgmt_client = AMQPClientAsync(self.mgmt_target, auth=mgmt_auth, debug=self.debug) + await mgmt_client.open_async() + mgmt_msg = Message(application_properties={'name': self.eh_name}) + response = await mgmt_client.mgmt_request_async( + mgmt_msg, + constants.READ_OPERATION, + op_type=b'com.microsoft:eventhub', + status_code_field=b'status-code', + description_fields=b'status-description') + eh_info = response.get_data() + output = {} + if eh_info: + output['name'] = eh_info[b'name'].decode('utf-8') + output['type'] = eh_info[b'type'].decode('utf-8') + output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000) + output['partition_count'] = eh_info[b'partition_count'] + output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']] + return output + finally: + await mgmt_client.close_async() + + def add_async_receiver( + self, consumer_group, partition, offset=None, epoch=None, prefetch=300, operation=None, loop=None): + """ + Add an async receiver to the client for a particular consumer group and partition. + + :param consumer_group: The name of the consumer group. + :type consumer_group: str + :param partition: The ID of the partition. + :type partition: str + :param offset: The offset from which to start receiving. + :type offset: ~azure.eventhub.common.Offset + :param prefetch: The message prefetch count of the receiver. Default is 300. + :type prefetch: int + :operation: An optional operation to be appended to the hostname in the source URL. + The value must start with `/` character. + :type operation: str + :rtype: ~azure.eventhub.aio.receiver_async.ReceiverAsync + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async_receiver] + :end-before: [END create_eventhub_client_async_receiver] + :language: python + :dedent: 4 + :caption: Add an async receiver to the client for a particular consumer group and partition. + + """ + path = self.address.path + operation if operation else self.address.path + source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( + self.address.hostname, path, consumer_group, partition) + handler = Receiver( + self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, loop=loop) + self.clients.append(handler) + return handler + + def add_async_epoch_receiver( + self, consumer_group, partition, epoch, prefetch=300, operation=None, loop=None): + """ + Add an async receiver to the client with an epoch value. Only a single epoch receiver + can connect to a partition at any given time - additional epoch receivers must have + a higher epoch value or they will be rejected. If a 2nd epoch receiver has + connected, the first will be closed. + + :param consumer_group: The name of the consumer group. + :type consumer_group: str + :param partition: The ID of the partition. + :type partition: str + :param epoch: The epoch value for the receiver. + :type epoch: int + :param prefetch: The message prefetch count of the receiver. Default is 300. + :type prefetch: int + :operation: An optional operation to be appended to the hostname in the source URL. + The value must start with `/` character. + :type operation: str + :rtype: ~azure.eventhub.aio.receiver_async.ReceiverAsync + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async_epoch_receiver] + :end-before: [END create_eventhub_client_async_epoch_receiver] + :language: python + :dedent: 4 + :caption: Add an async receiver to the client with an epoch value. + + """ + path = self.address.path + operation if operation else self.address.path + source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( + self.address.hostname, path, consumer_group, partition) + handler = Receiver( + self, source_url, prefetch=prefetch, epoch=epoch, loop=loop) + self.clients.append(handler) + return handler + + def add_async_sender( + self, partition=None, operation=None, send_timeout=60, loop=None): + """ + Add an async sender to the client to send ~azure.eventhub.common.EventData object + to an EventHub. + + :param partition: Optionally specify a particular partition to send to. + If omitted, the events will be distributed to available partitions via + round-robin. + :type partition: str + :operation: An optional operation to be appended to the hostname in the target URL. + The value must start with `/` character. + :type operation: str + :param send_timeout: The timeout in seconds for an individual event to be sent from the time that it is + queued. Default value is 60 seconds. If set to 0, there will be no timeout. + :type send_timeout: int + :param keep_alive: The time interval in seconds between pinging the connection to keep it alive during + periods of inactivity. The default value is 30 seconds. If set to `None`, the connection will not + be pinged. + :type keep_alive: int + :param auto_reconnect: Whether to automatically reconnect the sender if a retryable error occurs. + Default value is `True`. + :type auto_reconnect: bool + :rtype: ~azure.eventhub.aio.sender_async.SenderAsync + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async_sender] + :end-before: [END create_eventhub_client_async_sender] + :language: python + :dedent: 4 + :caption: Add an async sender to the client to + send ~azure.eventhub.common.EventData object to an EventHub. + + """ + target = "amqps://{}{}".format(self.address.hostname, self.address.path) + if operation: + target = target + operation + handler = Sender( + self, target, partition=partition, send_timeout=send_timeout, loop=loop) + self.clients.append(handler) + return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py new file mode 100644 index 000000000000..bad66bcae917 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -0,0 +1,332 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import asyncio +import uuid +import logging + +from uamqp import errors, types +from uamqp import ReceiveClientAsync, Source + +from azure.eventhub import EventHubError, EventData +from azure.eventhub.receiver import Receiver +from azure.eventhub.common import _error_handler + +log = logging.getLogger(__name__) + + +class Receiver(object): + """ + Implements the async API of a Receiver. + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async_receiver_instance] + :end-before: [END create_eventhub_client_async_receiver_instance] + :language: python + :dedent: 4 + :caption: Create a new instance of the Async Receiver. + + """ + timeout = 0 + _epoch = b'com.microsoft:epoch' + + def __init__( # pylint: disable=super-init-not-called + self, client, source, offset=None, prefetch=300, epoch=None, + keep_alive=None, auto_reconnect=True, loop=None): + """ + Instantiate an async receiver. + + :param client: The parent EventHubClientAsync. + :type client: ~azure.eventhub.aio.EventHubClientAsync + :param source: The source EventHub from which to receive events. + :type source: ~uamqp.address.Source + :param prefetch: The number of events to prefetch from the service + for processing. Default is 300. + :type prefetch: int + :param epoch: An optional epoch value. + :type epoch: int + :param loop: An event loop. + """ + self.loop = loop or asyncio.get_event_loop() + self.running = False + self.client = client + self.source = source + self.offset = offset + self.prefetch = prefetch + self.epoch = epoch + self.keep_alive = client.config.keep_alive_policy.keep_alive + self.auto_reconnect = client.config.auto_reconnect_policy.auto_reconnect + self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.reconnect_backoff = 1 + self.redirected = None + self.error = None + self.properties = None + partition = self.source.split('/')[-1] + self.name = "EHReceiver-{}-partition{}".format(uuid.uuid4(), partition) + source = Source(self.source) + if self.offset is not None: + source.set_filter(self.offset.selector()) + if epoch: + self.properties = {types.AMQPSymbol(self._epoch): types.AMQPLong(int(epoch))} + self._handler = ReceiveClientAsync( + source, + auth=self.client.get_auth(), + debug=self.client.config.network_trace_policy.network_trace_logging, + prefetch=self.prefetch, + link_properties=self.properties, + timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + + async def open_async(self): + """ + Open the Receiver using the supplied conneciton. + If the handler has previously been redirected, the redirect + context will be used to create a new handler before opening it. + + :param connection: The underlying client shared connection. + :type: connection: ~uamqp.async_ops.connection_async.ConnectionAsync + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_receiver_open] + :end-before: [END eventhub_client_async_receiver_open] + :language: python + :dedent: 4 + :caption: Open the Receiver using the supplied conneciton. + + """ + # pylint: disable=protected-access + self.running = True + if self.redirected: + self.source = self.redirected.address + source = Source(self.source) + if self.offset is not None: + source.set_filter(self.offset.selector()) + alt_creds = { + "username": self.client._auth_config.get("iot_username"), + "password":self.client._auth_config.get("iot_password")} + self._handler = ReceiveClientAsync( + source, + auth=self.client.get_auth(**alt_creds), + debug=self.client.debug, + prefetch=self.prefetch, + link_properties=self.properties, + timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + await self._handler.open_async() + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) + + async def _reconnect_async(self): # pylint: disable=too-many-statements + # pylint: disable=protected-access + alt_creds = { + "username": self.client._auth_config.get("iot_username"), + "password":self.client._auth_config.get("iot_password")} + await self._handler.close_async() + source = Source(self.source) + if self.offset is not None: + source.set_filter(self.offset.selector()) + self._handler = ReceiveClientAsync( + source, + auth=self.client.get_auth(**alt_creds), + debug=self.client.debug, + prefetch=self.prefetch, + link_properties=self.properties, + timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + try: + await self._handler.open_async() + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) + return True + except errors.TokenExpired as shutdown: + log.info("AsyncReceiver disconnected due to token expiry. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + return False + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + return False + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("AsyncReceiver couldn't authenticate. Attempting reconnect.") + return False + log.info("AsyncReceiver connection error (%r). Shutting down.", shutdown) + error = EventHubError(str(shutdown)) + await self.close_async(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receiver reconnect failed: {}".format(e)) + await self.close_async(exception=error) + raise error + + async def reconnect_async(self): + """If the Receiver was disconnected from the service with + a retryable error - attempt to reconnect.""" + while not await self._reconnect_async(): + await asyncio.sleep(self.reconnect_backoff) + + async def has_started(self): + """ + Whether the handler has completed all start up processes such as + establishing the connection, session, link and authentication, and + is not ready to process messages. + **This function is now deprecated and will be removed in v2.0+.** + + :rtype: bool + """ + # pylint: disable=protected-access + timeout = False + auth_in_progress = False + if self._handler._connection.cbs: + timeout, auth_in_progress = await self._handler._auth.handle_token_async() + if timeout: + raise EventHubError("Authorization timeout.") + if auth_in_progress: + return False + if not await self._handler._client_ready_async(): + return False + return True + + async def close_async(self, exception=None): + """ + Close down the handler. If the handler has already closed, + this will be a no op. An optional exception can be passed in to + indicate that the handler was shutdown due to error. + + :param exception: An optional exception if the handler is closing + due to an error. + :type exception: Exception + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_receiver_close] + :end-before: [END eventhub_client_async_receiver_close] + :language: python + :dedent: 4 + :caption: Close down the handler. + + """ + self.running = False + if self.error: + return + if isinstance(exception, errors.LinkRedirect): + self.redirected = exception + elif isinstance(exception, EventHubError): + self.error = exception + elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): + self.error = EventHubError(str(exception), exception) + elif exception: + self.error = EventHubError(str(exception)) + else: + self.error = EventHubError("This receive handler is now closed.") + await self._handler.close_async() + + async def receive(self, max_batch_size=None, timeout=None): + """ + Receive events asynchronously from the EventHub. + + :param max_batch_size: Receive a batch of events. Batch size will + be up to the maximum specified, but will return as soon as service + returns no new events. If combined with a timeout and no events are + retrieve before the time, the result will be empty. If no batch + size is supplied, the prefetch size will be the maximum. + :type max_batch_size: int + :rtype: list[~azure.eventhub.common.EventData] + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_receive] + :end-before: [END eventhub_client_async_receive] + :language: python + :dedent: 4 + :caption: Sends an event data and asynchronously waits + until acknowledgement is received or operation times out. + + """ + if self.error: + raise self.error + if not self.running: + raise ValueError("Unable to receive until client has been started.") + data_batch = [] + try: + timeout_ms = 1000 * timeout if timeout else 0 + message_batch = await self._handler.receive_message_batch_async( + max_batch_size=max_batch_size, + timeout=timeout_ms) + for message in message_batch: + event_data = EventData(message=message) + self.offset = event_data.offset + data_batch.append(event_data) + return data_batch + except (errors.TokenExpired, errors.AuthenticationException): + log.info("AsyncReceiver disconnected due to token error. Attempting reconnect.") + await self.reconnect_async() + return data_batch + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + await self.reconnect_async() + return data_batch + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + await self.reconnect_async() + return data_batch + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close_async(exception=error) + raise error + + async def __aenter__(self): + await self.open_async() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + self.client.clients.remove(self) + await self.close_async(exc_val) + + def __aiter__(self): + self.messages_iter = self._handler.receive_messages_iter_async() + return self + + async def __anext__(self): + return await self.messages_iter.__anext__() diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py new file mode 100644 index 000000000000..f1f8824e422a --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -0,0 +1,352 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import uuid +import asyncio +import logging + +from uamqp import constants, errors +from uamqp import SendClientAsync + +from azure.eventhub import EventHubError +from azure.eventhub.sender import Sender +from azure.eventhub.common import _error_handler + +log = logging.getLogger(__name__) + + +class Sender(object): + """ + Implements the async API of a Sender. + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START create_eventhub_client_async_sender_instance] + :end-before: [END create_eventhub_client_async_sender_instance] + :language: python + :dedent: 4 + :caption: Create a new instance of the Async Sender. + + """ + + def __init__( # pylint: disable=super-init-not-called + self, client, target, partition=None, send_timeout=60, + keep_alive=None, auto_reconnect=True, loop=None): + """ + Instantiate an EventHub event SenderAsync handler. + + :param client: The parent EventHubClientAsync. + :type client: ~azure.eventhub.aio.EventHubClientAsync + :param target: The URI of the EventHub to send to. + :type target: str + :param partition: The specific partition ID to send to. Default is `None`, in which case the service + will assign to all partitions using round-robin. + :type partition: str + :param send_timeout: The timeout in seconds for an individual event to be sent from the time that it is + queued. Default value is 60 seconds. If set to 0, there will be no timeout. + :type send_timeout: int + :param keep_alive: The time interval in seconds between pinging the connection to keep it alive during + periods of inactivity. The default value is `None`, i.e. no keep alive pings. + :type keep_alive: int + :param auto_reconnect: Whether to automatically reconnect the sender if a retryable error occurs. + Default value is `True`. + :type auto_reconnect: bool + :param loop: An event loop. If not specified the default event loop will be used. + """ + self.loop = loop or asyncio.get_event_loop() + self.running = False + self.client = client + self.target = target + self.partition = partition + self.keep_alive = client.config.keep_alive_policy.keep_alive + self.auto_reconnect = client.config.auto_reconnect_policy.auto_reconnect + self.timeout = send_timeout + self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.reconnect_backoff = 1 + self.name = "EHSender-{}".format(uuid.uuid4()) + self.redirected = None + self.error = None + if partition: + self.target += "/Partitions/" + partition + self.name += "-partition{}".format(partition) + self._handler = SendClientAsync( + self.target, + auth=self.client.get_auth(), + debug=self.client.debug, + msg_timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + self._outcome = None + self._condition = None + + async def open_async(self): + """ + Open the Sender using the supplied conneciton. + If the handler has previously been redirected, the redirect + context will be used to create a new handler before opening it. + + :param connection: The underlying client shared connection. + :type: connection: ~uamqp.async_ops.connection_async.ConnectionAsync + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_sender_open] + :end-before: [END eventhub_client_async_sender_open] + :language: python + :dedent: 4 + :caption: Open the Sender using the supplied conneciton. + + """ + self.running = True + if self.redirected: + self.target = self.redirected.address + self._handler = SendClientAsync( + self.target, + auth=self.client.get_auth(), + debug=self.client.debug, + msg_timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + await self._handler.open_async() + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) + + async def _reconnect_async(self): + await self._handler.close_async() + unsent_events = self._handler.pending_messages + self._handler = SendClientAsync( + self.target, + auth=self.client.get_auth(), + debug=self.client.debug, + msg_timeout=self.timeout, + error_policy=self.retry_policy, + keep_alive_interval=self.keep_alive, + client_name=self.name, + properties=self.client.create_properties(), + loop=self.loop) + try: + await self._handler.open_async() + self._handler.queue_message(*unsent_events) + await self._handler.wait_async() + return True + except errors.TokenExpired as shutdown: + log.info("AsyncSender disconnected due to token expiry. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + return False + log.info("AsyncSender reconnect failed. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + return False + log.info("AsyncSender reconnect failed. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("AsyncSender couldn't authenticate. Attempting reconnect.") + return False + log.info("AsyncSender connection error (%r). Shutting down.", shutdown) + error = EventHubError(str(shutdown)) + await self.close_async(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Sender reconnect failed: {}".format(e)) + await self.close_async(exception=error) + raise error + + async def reconnect_async(self): + """If the Receiver was disconnected from the service with + a retryable error - attempt to reconnect.""" + while not await self._reconnect_async(): + await asyncio.sleep(self.reconnect_backoff) + + async def has_started(self): + """ + Whether the handler has completed all start up processes such as + establishing the connection, session, link and authentication, and + is not ready to process messages. + **This function is now deprecated and will be removed in v2.0+.** + + :rtype: bool + """ + # pylint: disable=protected-access + timeout = False + auth_in_progress = False + if self._handler._connection.cbs: + timeout, auth_in_progress = await self._handler._auth.handle_token_async() + if timeout: + raise EventHubError("Authorization timeout.") + if auth_in_progress: + return False + if not await self._handler._client_ready_async(): + return False + return True + + async def close_async(self, exception=None): + """ + Close down the handler. If the handler has already closed, + this will be a no op. An optional exception can be passed in to + indicate that the handler was shutdown due to error. + + :param exception: An optional exception if the handler is closing + due to an error. + :type exception: Exception + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_sender_close] + :end-before: [END eventhub_client_async_sender_close] + :language: python + :dedent: 4 + :caption: Close down the handler. + + """ + self.running = False + if self.error: + return + if isinstance(exception, errors.LinkRedirect): + self.redirected = exception + elif isinstance(exception, EventHubError): + self.error = exception + elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): + self.error = EventHubError(str(exception), exception) + elif exception: + self.error = EventHubError(str(exception)) + else: + self.error = EventHubError("This send handler is now closed.") + await self._handler.close_async() + + async def send(self, event_data): + """ + Sends an event data and asynchronously waits until + acknowledgement is received or operation times out. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :raises: ~azure.eventhub.common.EventHubError if the message fails to + send. + + Example: + .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py + :start-after: [START eventhub_client_async_send] + :end-before: [END eventhub_client_async_send] + :language: python + :dedent: 4 + :caption: Sends an event data and asynchronously waits + until acknowledgement is received or operation times out. + + """ + if self.error: + raise self.error + if not self.running: + raise ValueError("Unable to send until client has been started.") + if event_data.partition_key and self.partition: + raise ValueError("EventData partition key cannot be used with a partition sender.") + event_data.message.on_send_complete = self._on_outcome + try: + await self._handler.send_message_async(event_data.message) + if self._outcome != constants.MessageSendResult.Ok: + raise Sender._error(self._outcome, self._condition) + except (errors.TokenExpired, errors.AuthenticationException): + log.info("AsyncSender disconnected due to token error. Attempting reconnect.") + await self.reconnect_async() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + await self.reconnect_async() + else: + log.info("AsyncSender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + await self.reconnect_async() + else: + log.info("AsyncSender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Send failed: {}".format(e)) + await self.close_async(exception=error) + raise error + else: + return self._outcome + + async def wait_async(self): + """ + Wait until all transferred events have been sent. + """ + if self.error: + raise self.error + if not self.running: + raise ValueError("Unable to send until client has been started.") + try: + await self._handler.wait_async() + except (errors.TokenExpired, errors.AuthenticationException): + log.info("AsyncSender disconnected due to token error. Attempting reconnect.") + await self.reconnect_async() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + await self.reconnect_async() + else: + log.info("AsyncSender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncSender detached. Attempting reconnect.") + await self.reconnect_async() + else: + log.info("AsyncSender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close_async(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r).", e) + raise EventHubError("Send failed: {}".format(e)) + + def _on_outcome(self, outcome, condition): + """ + Called when the outcome is received for a delivery. + + :param outcome: The outcome of the message delivery - success or failure. + :type outcome: ~uamqp.constants.MessageSendResult + """ + self._outcome = outcome + self._condition = condition + + async def __aenter__(self): + await self.open_async() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + self.client.clients.remove(self) + await self.close_async(exc_val) + + @staticmethod + def _error(outcome, condition): + return None if outcome == constants.MessageSendResult.Ok else EventHubError(outcome, condition) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index a50babfca8c3..688aac92b504 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -25,67 +25,13 @@ from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver from azure.eventhub.common import EventHubError, parse_sas_token +from .client_abstract import EventHubClientAbstract log = logging.getLogger(__name__) -def _parse_conn_str(conn_str): - endpoint = None - shared_access_key_name = None - shared_access_key = None - entity_path = None - for element in conn_str.split(';'): - key, _, value = element.partition('=') - if key.lower() == 'endpoint': - endpoint = value.rstrip('/') - elif key.lower() == 'hostname': - endpoint = value.rstrip('/') - elif key.lower() == 'sharedaccesskeyname': - shared_access_key_name = value - elif key.lower() == 'sharedaccesskey': - shared_access_key = value - elif key.lower() == 'entitypath': - entity_path = value - if not all([endpoint, shared_access_key_name, shared_access_key]): - raise ValueError("Invalid connection string") - return endpoint, shared_access_key_name, shared_access_key, entity_path - - -def _generate_sas_token(uri, policy, key, expiry=None): - """Create a shared access signiture token as a string literal. - :returns: SAS token as string literal. - :rtype: str - """ - from base64 import b64encode, b64decode - from hashlib import sha256 - from hmac import HMAC - if not expiry: - expiry = time.time() + 3600 # Default to 1 hour. - encoded_uri = quote_plus(uri) - ttl = int(expiry) - sign_key = '%s\n%d' % (encoded_uri, ttl) - signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest()) - result = { - 'sr': uri, - 'sig': signature, - 'se': str(ttl)} - if policy: - result['skn'] = policy - return 'SharedAccessSignature ' + urlencode(result) - - -def _build_uri(address, entity): - parsed = urlparse(address) - if parsed.path: - return address - if not entity: - raise ValueError("No EventHub specified") - address += "/" + str(entity) - return address - - -class EventHubClient(object): +class EventHubClient(EventHubClientAbstract): """ The EventHubClient class defines a high level interface for sending events to and receiving events from the Azure Event Hubs service. @@ -100,166 +46,6 @@ class EventHubClient(object): """ - def __init__( - self, address, username=None, password=None, debug=False, - http_proxy=None, auth_timeout=60, sas_token=None): - """ - Constructs a new EventHubClient with the given address URL. - - :param address: The full URI string of the Event Hub. This can optionally - include URL-encoded access name and key. - :type address: str - :param username: The name of the shared access policy. This must be supplied - if not encoded into the address. - :type username: str - :param password: The shared access key. This must be supplied if not encoded - into the address. - :type password: str - :param debug: Whether to output network trace logs to the logger. Default - is `False`. - :type debug: bool - :param http_proxy: HTTP proxy settings. This must be a dictionary with the following - keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). - Additionally the following keys may also be present: 'username', 'password'. - :type http_proxy: dict[str, Any] - :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. - The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. - :type auth_timeout: int - :param sas_token: A SAS token or function that returns a SAS token. If a function is supplied, - it will be used to retrieve subsequent tokens in the case of token expiry. The function should - take no arguments. - :type sas_token: str or callable - """ - self.container_id = "eventhub.pysdk-" + str(uuid.uuid4())[:8] - self.sas_token = sas_token - self.address = urlparse(address) - self.eh_name = self.address.path.lstrip('/') - self.http_proxy = http_proxy - self.mgmt_target = "amqps://{}/{}".format(self.address.hostname, self.eh_name) - url_username = unquote_plus(self.address.username) if self.address.username else None - username = username or url_username - url_password = unquote_plus(self.address.password) if self.address.password else None - password = password or url_password - if (not username or not password) and not sas_token: - raise ValueError("Please supply either username and password, or a SAS token") - self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) - self._auth_config = {'username': username, 'password': password} - self.get_auth = functools.partial(self._create_auth) - self.debug = debug - self.auth_timeout = auth_timeout - - self.clients = [] - self.stopped = False - log.info("%r: Created the Event Hub client", self.container_id) - - @classmethod - def from_sas_token(cls, address, sas_token, eventhub=None, **kwargs): - """Create an EventHubClient from an existing auth token or token generator. - - :param address: The Event Hub address URL - :type address: str - :param sas_token: A SAS token or function that returns a SAS token. If a function is supplied, - it will be used to retrieve subsequent tokens in the case of token expiry. The function should - take no arguments. - :type sas_token: str or callable - :param eventhub: The name of the EventHub, if not already included in the address URL. - :type eventhub: str - :param debug: Whether to output network trace logs to the logger. Default - is `False`. - :type debug: bool - :param http_proxy: HTTP proxy settings. This must be a dictionary with the following - keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). - Additionally the following keys may also be present: 'username', 'password'. - :type http_proxy: dict[str, Any] - :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. - The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. - :type auth_timeout: int - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START create_eventhub_client_sas_token] - :end-before: [END create_eventhub_client_sas_token] - :language: python - :dedent: 4 - :caption: Create an EventHubClient from an existing auth token or token generator. - - """ - address = _build_uri(address, eventhub) - return cls(address, sas_token=sas_token, **kwargs) - - @classmethod - def from_connection_string(cls, conn_str, eventhub=None, **kwargs): - """Create an EventHubClient from a connection string. - - :param conn_str: The connection string. - :type conn_str: str - :param eventhub: The name of the EventHub, if the EntityName is - not included in the connection string. - :type eventhub: str - :param debug: Whether to output network trace logs to the logger. Default - is `False`. - :type debug: bool - :param http_proxy: HTTP proxy settings. This must be a dictionary with the following - keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). - Additionally the following keys may also be present: 'username', 'password'. - :type http_proxy: dict[str, Any] - :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. - The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. - :type auth_timeout: int - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START create_eventhub_client_connstr] - :end-before: [END create_eventhub_client_connstr] - :language: python - :dedent: 4 - :caption: Create an EventHubClient from a connection string. - - """ - address, policy, key, entity = _parse_conn_str(conn_str) - entity = eventhub or entity - address = _build_uri(address, entity) - return cls(address, username=policy, password=key, **kwargs) - - @classmethod - def from_iothub_connection_string(cls, conn_str, **kwargs): - """ - Create an EventHubClient from an IoTHub connection string. - - :param conn_str: The connection string. - :type conn_str: str - :param debug: Whether to output network trace logs to the logger. Default - is `False`. - :type debug: bool - :param http_proxy: HTTP proxy settings. This must be a dictionary with the following - keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). - Additionally the following keys may also be present: 'username', 'password'. - :type http_proxy: dict[str, Any] - :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. - The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. - :type auth_timeout: int - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START create_eventhub_client_iot_connstr] - :end-before: [END create_eventhub_client_iot_connstr] - :language: python - :dedent: 4 - :caption: Create an EventHubClient from an IoTHub connection string. - - """ - address, policy, key, _ = _parse_conn_str(conn_str) - hub_name = address.split('.')[0] - username = "{}@sas.root.{}".format(policy, hub_name) - password = _generate_sas_token(address, policy, key) - client = cls("amqps://" + address, username=username, password=password, **kwargs) - client._auth_config = { # pylint: disable=protected-access - 'iot_username': policy, - 'iot_password': key, - 'username': username, - 'password': password} - return client - def _create_auth(self, username=None, password=None): """ Create an ~uamqp.authentication.SASTokenAuth instance to authenticate @@ -270,7 +56,18 @@ def _create_auth(self, username=None, password=None): :param password: The shared access key. :type password: str """ - if self.sas_token: + http_proxy = self.config.http_proxy + transport_type = self.config.transport_type + auth_timeout = self.config.auth_timeout + if self.aad_credential and self.sas_token: + raise EventHubError("Can't have both sas_token and aad credential") + + elif self.aad_credential: + get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) + return authentication.JWTTokenAuth(self.auth_uri, self.auth_uri, + get_jwt_token, http_proxy=http_proxy, + transport_type=transport_type) + elif self.sas_token: token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) @@ -279,122 +76,19 @@ def _create_auth(self, username=None, password=None): return authentication.SASTokenAuth( self.auth_uri, self.auth_uri, token, expires_at=expiry, - timeout=self.auth_timeout, - http_proxy=self.http_proxy) + timeout=auth_timeout, + http_proxy=http_proxy, + transport_type=transport_type) username = username or self._auth_config['username'] password = password or self._auth_config['password'] if "@sas.root" in username: return authentication.SASLPlain( - self.address.hostname, username, password, http_proxy=self.http_proxy) + self.address.hostname, username, password, http_proxy=http_proxy, transport_type=transport_type) return authentication.SASTokenAuth.from_shared_access_key( - self.auth_uri, username, password, timeout=self.auth_timeout, http_proxy=self.http_proxy) - - def create_properties(self): # pylint: disable=no-self-use - """ - Format the properties with which to instantiate the connection. - This acts like a user agent over HTTP. - - :rtype: dict - """ - properties = {} - properties["product"] = "eventhub.python" - properties["version"] = __version__ - properties["framework"] = "Python {}.{}.{}".format(*sys.version_info[0:3]) - properties["platform"] = sys.platform - return properties + self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - def _close_clients(self): - """ - Close all open Sender/Receiver clients. - """ - for client in self.clients: - client.close() - - def _start_clients(self): - for client in self.clients: - try: - if not client.running: - client.open() - except Exception as exp: # pylint: disable=broad-except - client.close(exception=exp) - - def _process_redirect_uri(self, redirect): - redirect_uri = redirect.address.decode('utf-8') - auth_uri, _, _ = redirect_uri.partition("/ConsumerGroups") - self.address = urlparse(auth_uri) - self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) - self.eh_name = self.address.path.lstrip('/') - self.mgmt_target = redirect_uri - - def _handle_redirect(self, redirects): - if len(redirects) != len(self.clients): - raise EventHubError("Some clients are attempting to redirect the connection.") - if not all(r.hostname == redirects[0].hostname for r in redirects): - raise EventHubError("Multiple clients attempting to redirect to different hosts.") - self._process_redirect_uri(redirects[0]) - for client in self.clients: - client.open() - - def run(self): - """ - Run the EventHubClient in blocking mode. - Opens the connection and starts running all Sender/Receiver clients. - Returns a list of the start up results. For a succcesful client start the - result will be `None`, otherwise the exception raised. - If all clients failed to start, then run will fail, shut down the connection - and raise an exception. - If at least one client starts up successfully the run command will succeed. - - :rtype: list[~azure.eventhub.common.EventHubError] - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_run] - :end-before: [END eventhub_client_run] - :language: python - :dedent: 4 - :caption: Run the EventHubClient in blocking mode. - - """ - log.info("%r: Starting %r clients", self.container_id, len(self.clients)) - try: - self._start_clients() - redirects = [c.redirected for c in self.clients if c.redirected] - failed = [c.error for c in self.clients if c.error] - if failed and len(failed) == len(self.clients): - log.warning("%r: All clients failed to start.", self.container_id) - raise failed[0] - if failed: - log.warning("%r: %r clients failed to start.", self.container_id, len(failed)) - elif redirects: - self._handle_redirect(redirects) - except EventHubError: - self.stop() - raise - except Exception as e: - self.stop() - raise EventHubError(str(e)) - return failed - - def stop(self): - """ - Stop the EventHubClient and all its Sender/Receiver clients. - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_stop] - :end-before: [END eventhub_client_stop] - :language: python - :dedent: 4 - :caption: Stop the EventHubClient and all its Sender/Receiver clients. - - """ - log.info("%r: Stopping %r clients", self.container_id, len(self.clients)) - self.stopped = True - self._close_clients() - - def get_eventhub_info(self): + def get_eventhub_information(self): """ Get details on the specified EventHub. Keys in the details dictionary include: @@ -409,7 +103,7 @@ def get_eventhub_info(self): """ alt_creds = { "username": self._auth_config.get("iot_username"), - "password":self._auth_config.get("iot_password")} + "password": self._auth_config.get("iot_password")} try: mgmt_auth = self._create_auth(**alt_creds) mgmt_client = uamqp.AMQPClient(self.mgmt_target, auth=mgmt_auth, debug=self.debug) @@ -433,9 +127,12 @@ def get_eventhub_info(self): finally: mgmt_client.close() - def add_receiver( - self, consumer_group, partition, offset=None, prefetch=300, - operation=None, keep_alive=30, auto_reconnect=True): + def create_receiver( + self, consumer_group, partition, offset=None, epoch=None, operation=None, + prefetch=None, + keep_alive=None, + auto_reconnect=None, + ): """ Add a receiver to the client for a particular consumer group and partition. @@ -461,56 +158,24 @@ def add_receiver( :caption: Add a receiver to the client for a particular consumer group and partition. """ + keep_alive = self.config.keep_alive if keep_alive is None else keep_alive + auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect + prefetch = self.config.prefetch if prefetch is None else prefetch + path = self.address.path + operation if operation else self.address.path source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=offset, prefetch=prefetch, - keep_alive=keep_alive, auto_reconnect=auto_reconnect) + self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) self.clients.append(handler) return handler - def add_epoch_receiver( + def create_epoch_receiver( self, consumer_group, partition, epoch, prefetch=300, - operation=None, keep_alive=30, auto_reconnect=True): - """ - Add a receiver to the client with an epoch value. Only a single epoch receiver - can connect to a partition at any given time - additional epoch receivers must have - a higher epoch value or they will be rejected. If a 2nd epoch receiver has - connected, the first will be closed. + operation=None): + return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, operation=operation) - :param consumer_group: The name of the consumer group. - :type consumer_group: str - :param partition: The ID of the partition. - :type partition: str - :param epoch: The epoch value for the receiver. - :type epoch: int - :param prefetch: The message prefetch count of the receiver. Default is 300. - :type prefetch: int - :operation: An optional operation to be appended to the hostname in the source URL. - The value must start with `/` character. - :type operation: str - :rtype: ~azure.eventhub.receiver.Receiver - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START create_eventhub_client_epoch_receiver] - :end-before: [END create_eventhub_client_epoch_receiver] - :language: python - :dedent: 4 - :caption: Add a receiver to the client with an epoch value. - - """ - path = self.address.path + operation if operation else self.address.path - source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( - self.address.hostname, path, consumer_group, partition) - handler = Receiver( - self, source_url, prefetch=prefetch, epoch=epoch, - keep_alive=keep_alive, auto_reconnect=auto_reconnect) - self.clients.append(handler) - return handler - - def add_sender(self, partition=None, operation=None, send_timeout=60, keep_alive=30, auto_reconnect=True): + def create_sender(self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None): """ Add a sender to the client to send EventData object to an EventHub. @@ -544,8 +209,11 @@ def add_sender(self, partition=None, operation=None, send_timeout=60, keep_alive target = "amqps://{}{}".format(self.address.hostname, self.address.path) if operation: target = target + operation + send_timeout = self.config.send_timeout if send_timeout is None else send_timeout + keep_alive = self.config.keep_alive if keep_alive is None else keep_alive + auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect + handler = Sender( - self, target, partition=partition, send_timeout=send_timeout, - keep_alive=keep_alive, auto_reconnect=auto_reconnect) + self, target, partition=partition, send_timeout=send_timeout, keep_alive=keep_alive, auto_reconnect=auto_reconnect) self.clients.append(handler) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py new file mode 100644 index 000000000000..944bf8d1c15e --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -0,0 +1,312 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from __future__ import unicode_literals + +import logging +import datetime +import sys +import uuid +import time +import functools +from abc import abstractmethod +try: + from urlparse import urlparse + from urllib import unquote_plus, urlencode, quote_plus +except ImportError: + from urllib.parse import urlparse, unquote_plus, urlencode, quote_plus + +import uamqp +from uamqp import Message +from uamqp import authentication +from uamqp import constants + +from azure.eventhub import __version__ +from azure.eventhub.sender import Sender +from azure.eventhub.receiver import Receiver +from azure.eventhub.common import EventHubError, parse_sas_token +from azure.eventhub.configuration import Configuration + +log = logging.getLogger(__name__) + + +def _parse_conn_str(conn_str): + endpoint = None + shared_access_key_name = None + shared_access_key = None + entity_path = None + for element in conn_str.split(';'): + key, _, value = element.partition('=') + if key.lower() == 'endpoint': + endpoint = value.rstrip('/') + elif key.lower() == 'hostname': + endpoint = value.rstrip('/') + elif key.lower() == 'sharedaccesskeyname': + shared_access_key_name = value + elif key.lower() == 'sharedaccesskey': + shared_access_key = value + elif key.lower() == 'entitypath': + entity_path = value + if not all([endpoint, shared_access_key_name, shared_access_key]): + raise ValueError("Invalid connection string") + return endpoint, shared_access_key_name, shared_access_key, entity_path + + +def _generate_sas_token(uri, policy, key, expiry=None): + """Create a shared access signiture token as a string literal. + :returns: SAS token as string literal. + :rtype: str + """ + from base64 import b64encode, b64decode + from hashlib import sha256 + from hmac import HMAC + if not expiry: + expiry = time.time() + 3600 # Default to 1 hour. + encoded_uri = quote_plus(uri) + ttl = int(expiry) + sign_key = '%s\n%d' % (encoded_uri, ttl) + signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest()) + result = { + 'sr': uri, + 'sig': signature, + 'se': str(ttl)} + if policy: + result['skn'] = policy + return 'SharedAccessSignature ' + urlencode(result) + + +def _build_uri(address, entity): + parsed = urlparse(address) + if parsed.path: + return address + if not entity: + raise ValueError("No EventHub specified") + address += "/" + str(entity) + return address + + +class EventHubClientAbstract(object): + """ + The EventHubClient class defines a high level interface for sending + events to and receiving events from the Azure Event Hubs service. + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START create_eventhub_client] + :end-before: [END create_eventhub_client] + :language: python + :dedent: 4 + :caption: Create a new instance of the Event Hub client + + """ + + def __init__( + self, address, username=None, password=None, sas_token=None, aad_credential=None, **kwargs): + """ + Constructs a new EventHubClient with the given address URL. + + :param address: The full URI string of the Event Hub. This can optionally + include URL-encoded access name and key. + :type address: str + :param username: The name of the shared access policy. This must be supplied + if not encoded into the address. + :type username: str + :param password: The shared access key. This must be supplied if not encoded + into the address. + :type password: str + :param debug: Whether to output network trace logs to the logger. Default + is `False`. + :type debug: bool + :param http_proxy: HTTP proxy settings. This must be a dictionary with the following + keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). + Additionally the following keys may also be present: 'username', 'password'. + :type http_proxy: dict[str, Any] + :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. + The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. + :type auth_timeout: int + :param sas_token: A SAS token or function that returns a SAS token. If a function is supplied, + it will be used to retrieve subsequent tokens in the case of token expiry. The function should + take no arguments. + :type sas_token: str or callable + """ + self.container_id = "eventhub.pysdk-" + str(uuid.uuid4())[:8] + self.sas_token = sas_token + self.address = urlparse(address) + self.aad_credential = aad_credential + self.eh_name = self.address.path.lstrip('/') + # self.http_proxy = kwargs.get("http_proxy") + self.keep_alive = kwargs.get("keep_alive", 30) + self.auto_reconnect = kwargs.get("auto_reconnect", True) + self.mgmt_target = "amqps://{}/{}".format(self.address.hostname, self.eh_name) + url_username = unquote_plus(self.address.username) if self.address.username else None + username = username or url_username + url_password = unquote_plus(self.address.password) if self.address.password else None + password = password or url_password + if (not username or not password) and not sas_token: + raise ValueError("Please supply either username and password, or a SAS token") + self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) + self._auth_config = {'username': username, 'password': password} + self.get_auth = functools.partial(self._create_auth) + # self.debug = kwargs.get("debug", False) # debug + #self.auth_timeout = auth_timeout + + self.clients = [] + self.stopped = False + self.config = Configuration(**kwargs) + self.debug = self.config.network_tracing + + log.info("%r: Created the Event Hub client", self.container_id) + + @classmethod + def from_sas_token(cls, address, sas_token, eventhub=None, **kwargs): + """Create an EventHubClient from an existing auth token or token generator. + + :param address: The Event Hub address URL + :type address: str + :param sas_token: A SAS token or function that returns a SAS token. If a function is supplied, + it will be used to retrieve subsequent tokens in the case of token expiry. The function should + take no arguments. + :type sas_token: str or callable + :param eventhub: The name of the EventHub, if not already included in the address URL. + :type eventhub: str + :param debug: Whether to output network trace logs to the logger. Default + is `False`. + :type debug: bool + :param http_proxy: HTTP proxy settings. This must be a dictionary with the following + keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). + Additionally the following keys may also be present: 'username', 'password'. + :type http_proxy: dict[str, Any] + :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. + The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. + :type auth_timeout: int + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START create_eventhub_client_sas_token] + :end-before: [END create_eventhub_client_sas_token] + :language: python + :dedent: 4 + :caption: Create an EventHubClient from an existing auth token or token generator. + + """ + address = _build_uri(address, eventhub) + return cls(address, sas_token=sas_token, **kwargs) + + @classmethod + def from_connection_string(cls, conn_str, eventhub=None, **kwargs): + """Create an EventHubClient from a connection string. + + :param conn_str: The connection string. + :type conn_str: str + :param eventhub: The name of the EventHub, if the EntityName is + not included in the connection string. + :type eventhub: str + :param debug: Whether to output network trace logs to the logger. Default + is `False`. + :type debug: bool + :param http_proxy: HTTP proxy settings. This must be a dictionary with the following + keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). + Additionally the following keys may also be present: 'username', 'password'. + :type http_proxy: dict[str, Any] + :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. + The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. + :type auth_timeout: int + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START create_eventhub_client_connstr] + :end-before: [END create_eventhub_client_connstr] + :language: python + :dedent: 4 + :caption: Create an EventHubClient from a connection string. + + """ + address, policy, key, entity = _parse_conn_str(conn_str) + entity = eventhub or entity + address = _build_uri(address, entity) + return cls(address, username=policy, password=key, **kwargs) + + @classmethod + def from_iothub_connection_string(cls, conn_str, **kwargs): + """ + Create an EventHubClient from an IoTHub connection string. + + :param conn_str: The connection string. + :type conn_str: str + :param debug: Whether to output network trace logs to the logger. Default + is `False`. + :type debug: bool + :param http_proxy: HTTP proxy settings. This must be a dictionary with the following + keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). + Additionally the following keys may also be present: 'username', 'password'. + :type http_proxy: dict[str, Any] + :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. + The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. + :type auth_timeout: int + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START create_eventhub_client_iot_connstr] + :end-before: [END create_eventhub_client_iot_connstr] + :language: python + :dedent: 4 + :caption: Create an EventHubClient from an IoTHub connection string. + + """ + address, policy, key, _ = _parse_conn_str(conn_str) + hub_name = address.split('.')[0] + username = "{}@sas.root.{}".format(policy, hub_name) + password = _generate_sas_token(address, policy, key) + client = cls("amqps://" + address, username=username, password=password, **kwargs) + client._auth_config = { # pylint: disable=protected-access + 'iot_username': policy, + 'iot_password': key, + 'username': username, + 'password': password} + return client + + @classmethod + def from_aad_credential(cls, address, aad_credential, eventhub=None, **kwargs): + address = _build_uri(address, eventhub) + return cls(address, aad_credential=aad_credential, **kwargs) + + @abstractmethod + def _create_auth(self, username=None, password=None): + pass + + def create_properties(self): # pylint: disable=no-self-use + """ + Format the properties with which to instantiate the connection. + This acts like a user agent over HTTP. + + :rtype: dict + """ + properties = {} + properties["product"] = "eventhub.python" + properties["version"] = __version__ + properties["framework"] = "Python {}.{}.{}".format(*sys.version_info[0:3]) + properties["platform"] = sys.platform + return properties + + def _process_redirect_uri(self, redirect): + redirect_uri = redirect.address.decode('utf-8') + auth_uri, _, _ = redirect_uri.partition("/ConsumerGroups") + self.address = urlparse(auth_uri) + self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) + self.eh_name = self.address.path.lstrip('/') + self.mgmt_target = redirect_uri + + @abstractmethod + def get_eventhub_information(self): + pass + + @abstractmethod + def create_receiver( + self, consumer_group, partition, epoch=None, offset=None, prefetch=300, + operation=None): + pass + + @abstractmethod + def create_sender(self, partition=None, operation=None, send_timeout=60): + pass diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 76e315d2a25e..3cb0c23bd4d3 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -4,6 +4,7 @@ # -------------------------------------------------------------------------------------------- from __future__ import unicode_literals +from enum import Enum import datetime import calendar import json @@ -83,7 +84,7 @@ class EventData(object): PROP_TIMESTAMP = b"x-opt-enqueued-time" PROP_DEVICE_ID = b"iothub-connection-device-id" - def __init__(self, body=None, batch=None, to_device=None, message=None): + def __init__(self, body=None, to_device=None, message=None): """ Initialize EventData. @@ -102,9 +103,7 @@ def __init__(self, body=None, batch=None, to_device=None, message=None): self.msg_properties = MessageProperties() if to_device: self.msg_properties.to = '/devices/{}/messages/devicebound'.format(to_device) - if batch: - self.message = BatchMessage(data=batch, multi_messages=True, properties=self.msg_properties) - elif message: + if message: self.message = message self.msg_properties = message.properties self._annotations = message.annotations @@ -258,6 +257,16 @@ def body_as_json(self, encoding='UTF-8'): except Exception as e: raise TypeError("Event data is not compatible with JSON type: {}".format(e)) + def encode_message(self): + return self.message.encode_message() + + +class BatchSendEventData(EventData): + def __init__(self, batch_event_data): + # TODO: rethink if to_device should be included in + self.message = BatchMessage(data=batch_event_data, multi_messages=True, properties=None) + + class Offset(object): """ diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py new file mode 100644 index 000000000000..b68df67a5362 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -0,0 +1,21 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from uamqp.constants import TransportType + + +class Configuration(object): + def __init__(self, **kwargs): + self.user_agent = kwargs.get("user_agent") + self.max_retries = kwargs.get("max_retries", 3) + self.network_tracing = kwargs.get("debug", False) + self.http_proxy = kwargs.get("http_proxy") + self.auto_reconnect = kwargs.get("auto_reconnect", False) + self.keep_alive = kwargs.get("keep_alive", 1) + self.transport_type = TransportType.AmqpOverWebsocket if self.http_proxy \ + else kwargs.get("transport_type", TransportType.Amqp) + self.auth_timeout = kwargs.get("auth_timeout", 60) + self.prefetch = kwargs.get("prefetch") + self.send_timeout = kwargs.get("send_timeout", 60) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py new file mode 100644 index 000000000000..788d8f4cf244 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py @@ -0,0 +1,23 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from .policies import ( + RetryPolicy, + ProxyPolicy, + AutoReconnectPolicy, + KeepAlivePolicy, + UserAgentPolicy, + NetworkTraceLoggingPolicy, + RedirectPolicy, +) + +__all__ = [ + "RetryPolicy", + "ProxyPolicy", + "AutoReconnectPolicy", + "KeepAlivePolicy", + "UserAgentPolicy", + "NetworkTraceLoggingPolicy", + "RedirectPolicy", +] diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py new file mode 100644 index 000000000000..1d470c998e90 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from uamqp import AMQPClient +from abc import abstractmethod + + +class AMQPClientPolicy(object): + + def __init__(self, **kwargs): + pass + + @abstractmethod + def apply(self, amqp_client): + # type: (AMQPClient) -> None + pass diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py new file mode 100644 index 000000000000..f22c6a7fb600 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py @@ -0,0 +1,63 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from .base import AMQPClientPolicy +from uamqp import AMQPClient + + +class RetryPolicy(AMQPClientPolicy): + + def __init__(self, **kwargs): + self.max_retries = kwargs.get("max_retries", 3) + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class ProxyPolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.http_proxy = kwargs.get("http_proxy") + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class AutoReconnectPolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.auto_reconnect = kwargs.get("auto_reconnect", True) + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class KeepAlivePolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.keep_alive = kwargs.get("keep_alive", 30) + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class UserAgentPolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.user_agent = kwargs.get("user_agent") + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class NetworkTraceLoggingPolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.network_trace_logging = kwargs.get("debug", False) + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") + + +class RedirectPolicy(AMQPClientPolicy): + def __init__(self, **kwargs): + self.redirect = kwargs.get("redirect") + + def apply(self, amqp_client): # type: (AMQPClient) -> None + raise NotImplementedError("Placeholder for future implementation") diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 486c75b3c682..8d38cc758a07 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -55,6 +55,7 @@ def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_a self.epoch = epoch self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect + # max_retries = client.config.retry_policy.max_retries self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) self.reconnect_backoff = 1 self.properties = None @@ -288,7 +289,7 @@ def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error if not self.running: - raise ValueError("Unable to receive until client has been started.") + self.open() data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 @@ -327,3 +328,47 @@ def receive(self, max_batch_size=None, timeout=None): error = EventHubError("Receive failed: {}".format(e)) self.close(exception=error) raise error + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close(exc_val) + + def __iter__(self): + if not self.running: + self.open() + self.messages_iter = self._handler.receive_messages_iter() + return self + + def __next__(self): + while True: + try: + message = next(self.messages_iter) + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + self.close(exception=error) + raise error diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 0a7334050a5f..43a3a74f0cca 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -10,8 +10,9 @@ from uamqp import constants, errors from uamqp import SendClient +from uamqp.constants import MessageSendResult -from azure.eventhub.common import EventHubError, _error_handler +from azure.eventhub.common import EventHubError, EventData, BatchSendEventData, _error_handler log = logging.getLogger(__name__) @@ -30,7 +31,7 @@ class Sender(object): """ - def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=True): + def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=30, auto_reconnect=True): """ Instantiate an EventHub event Sender handler. @@ -60,6 +61,7 @@ def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=N self.error = None self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect + # max_retries = client.config.retry_policy.max_retries self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) self.reconnect_backoff = 1 self.name = "EHSender-{}".format(uuid.uuid4()) @@ -235,37 +237,10 @@ def close(self, exception=None): self.error = EventHubError("This send handler is now closed.") self._handler.close() - def send(self, event_data): - """ - Sends an event data and blocks until acknowledgement is - received or operation times out. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :raises: ~azure.eventhub.common.EventHubError if the message fails to - send. - :return: The outcome of the message send. - :rtype: ~uamqp.constants.MessageSendResult - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_sync_send] - :end-before: [END eventhub_client_sync_send] - :language: python - :dedent: 4 - :caption: Sends an event data and blocks until acknowledgement is received or operation times out. - - """ - if self.error: - raise self.error - if not self.running: - raise ValueError("Unable to send until client has been started.") - if event_data.partition_key and self.partition: - raise ValueError("EventData partition key cannot be used with a partition sender.") - event_data.message.on_send_complete = self._on_outcome + def _send_event_data(self, event_data): try: self._handler.send_message(event_data.message) - if self._outcome != constants.MessageSendResult.Ok: + if self._outcome != MessageSendResult.Ok: raise Sender._error(self._outcome, self._condition) except errors.MessageException as failed: error = EventHubError(str(failed), failed) @@ -300,6 +275,71 @@ def send(self, event_data): else: return self._outcome + def send(self, event_data): + """ + Sends an event data and blocks until acknowledgement is + received or operation times out. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :raises: ~azure.eventhub.common.EventHubError if the message fails to + send. + :return: The outcome of the message send. + :rtype: ~uamqp.constants.MessageSendResult + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START eventhub_client_sync_send] + :end-before: [END eventhub_client_sync_send] + :language: python + :dedent: 4 + :caption: Sends an event data and blocks until acknowledgement is received or operation times out. + + """ + if self.error: + raise self.error + if not self.running: + self.open() + if event_data.partition_key and self.partition: + raise ValueError("EventData partition key cannot be used with a partition sender.") + event_data.message.on_send_complete = self._on_outcome + return self._send_event_data(event_data) + + def send_batch(self, batch_event_data): + """ + Sends an event data and blocks until acknowledgement is + received or operation times out. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :raises: ~azure.eventhub.common.EventHubError if the message fails to + send. + :return: The outcome of the message send. + :rtype: ~uamqp.constants.MessageSendResult + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START eventhub_client_sync_send] + :end-before: [END eventhub_client_sync_send] + :language: python + :dedent: 4 + :caption: Sends an event data and blocks until acknowledgement is received or operation times out. + + """ + if self.error: + raise self.error + event_data_list = list(batch_event_data) + if len(event_data_list) == 0: + raise ValueError("batch_event_data must not be empty") + for i in range(1, len(event_data_list)): + if event_data_list[i].partition_key != event_data_list[i-1].partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + if not self.running: + self.open() + wrapper_event_data = BatchSendEventData(event_data_list) + wrapper_event_data.message.on_send_complete = self._on_outcome + return self._send_event_data(wrapper_event_data) + def transfer(self, event_data, callback=None): """ Transfers an event data and notifies the callback when the operation is done. @@ -322,7 +362,7 @@ def transfer(self, event_data, callback=None): if self.error: raise self.error if not self.running: - raise ValueError("Unable to send until client has been started.") + self.open() if event_data.partition_key and self.partition: raise ValueError("EventData partition key cannot be used with a partition sender.") if callback: @@ -345,7 +385,7 @@ def wait(self): if self.error: raise self.error if not self.running: - raise ValueError("Unable to send until client has been started.") + self.open() try: self._handler.wait() except (errors.TokenExpired, errors.AuthenticationException): @@ -383,6 +423,12 @@ def _on_outcome(self, outcome, condition): self._outcome = outcome self._condition = condition + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close(exc_val) + @staticmethod def _error(outcome, condition): - return None if outcome == constants.MessageSendResult.Ok else EventHubError(outcome, condition) + return None if outcome == MessageSendResult.Ok else EventHubError(outcome, condition) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py index e0aa25dc2e8d..f24906ea4455 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py @@ -5,7 +5,8 @@ import logging import asyncio -from azure.eventhub import Offset, EventHubClientAsync +from azure.eventhub import Offset +from azure.eventhub.aio import EventHubClient from azure.eventprocessorhost.partition_pump import PartitionPump @@ -64,7 +65,7 @@ async def open_clients_async(self): """ await self.partition_context.get_initial_offset_async() # Create event hub client and receive handler and set options - self.eh_client = EventHubClientAsync( + self.eh_client = EventHubClient( self.host.eh_config.client_address, debug=self.host.eph_options.debug_trace, http_proxy=self.host.eph_options.http_proxy) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/partition_manager.py b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/partition_manager.py index 41aaded73b56..d532846a5476 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/partition_manager.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/partition_manager.py @@ -8,7 +8,8 @@ from queue import Queue from collections import Counter -from azure.eventhub import EventHubClientAsync +from azure.eventhub.aio import EventHubClient + from azure.eventprocessorhost.eh_partition_pump import EventHubPartitionPump from azure.eventprocessorhost.cancellation_token import CancellationToken @@ -36,7 +37,7 @@ async def get_partition_ids_async(self): """ if not self.partition_ids: try: - eh_client = EventHubClientAsync( + eh_client = EventHubClient( self.host.eh_config.client_address, debug=self.host.eph_options.debug_trace, http_proxy=self.host.eph_options.http_proxy) diff --git a/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py b/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py new file mode 100644 index 000000000000..29e70b35631c --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py @@ -0,0 +1,10 @@ +import logging +from azure.eventhub import EventHubClient, Offset + +logging.basicConfig(level=logging.DEBUG) + +ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" +USER = "RootManageSharedAccessKey" +KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" +client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) +print(client.get_eventhub_info()) \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhubs/debugging/receive.py b/sdk/eventhub/azure-eventhubs/debugging/receive.py new file mode 100644 index 000000000000..3c0c6fe19026 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/debugging/receive.py @@ -0,0 +1,14 @@ +import logging +from azure.eventhub import EventHubClient, Offset, EventData + +#logging.basicConfig(level=logging.DEBUG) + +ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" +USER = "RootManageSharedAccessKey" +KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" +client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) +receiver = client.create_receiver("$default", "1", prefetch=100, offset=Offset("-1", True)) +with receiver: + ed = receiver.receive() # type: list[EventData] + for item in ed: + print(item.sequence_number, item.offset.value, item.enqueued_time, item.partition_key, item.application_properties) diff --git a/sdk/eventhub/azure-eventhubs/debugging/receive_async.py b/sdk/eventhub/azure-eventhubs/debugging/receive_async.py new file mode 100644 index 000000000000..cfeec70e157b --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/debugging/receive_async.py @@ -0,0 +1,26 @@ +import logging +from azure.eventhub.aio import EventHubClient +from azure.eventhub import Offset +import asyncio + +logging.basicConfig(level=logging.DEBUG) + +ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" +USER = "RootManageSharedAccessKey" +KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" +client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) + + +async def batch_async_receiver(): + async with client.add_async_receiver("$default", "1", prefetch=1, offset = Offset("-1")) as receiver: + event_data = await receiver.receive() + print(event_data) +asyncio.run(batch_async_receiver()) + +''' +async def iter_async_receiver(): + async with client.add_async_receiver("$default", "1", prefetch=1, offset = Offset("-1")) as receiver: + async for item in receiver: + print(item) +asyncio.run(iter_async_receiver()) +''' \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhubs/debugging/send.py b/sdk/eventhub/azure-eventhubs/debugging/send.py new file mode 100644 index 000000000000..99b9fa0384bc --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/debugging/send.py @@ -0,0 +1,34 @@ +import logging +from azure.eventhub import EventHubClient, EventData, MessageSendResult +import uuid + +#logging.basicConfig(level=logging.DEBUG) +ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" +USER = "RootManageSharedAccessKey" +KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" +client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) + +def send(): + with client.create_sender() as sender: + for i in range(1): + print("Sending message: {}".format(i)) + ed = EventData("Message with different content {}".format(i)) + ed.partition_key = "20" + ed.application_properties = {"partition_test": "same partition key 20"} + sender.send(ed) + + +def send_batch(): + with client.create_sender() as sender: + batch_event_data = [] + for i in range(5): + event_data = EventData("Message with different content {}".format(i)) + event_data.application_properties = {"batch_send_index": str(i)} + event_data.partition_key = "aaa" + batch_event_data.append(event_data) + send_result = sender.send_batch(batch_event_data) + if send_result == MessageSendResult.Ok: + print("Sent") + + +send_batch() diff --git a/sdk/eventhub/azure-eventhubs/debugging/send_async.py b/sdk/eventhub/azure-eventhubs/debugging/send_async.py new file mode 100644 index 000000000000..345a05ac834f --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/debugging/send_async.py @@ -0,0 +1,20 @@ +import logging +from azure.eventhub.aio import EventHubClient +from azure.eventhub import EventData +import asyncio + +logging.basicConfig(level=logging.DEBUG) + +ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" +USER = "RootManageSharedAccessKey" +KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" +client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) + + +async def send_async(): + async with client.add_async_sender(partition="1") as sender: + for i in range(1): + print("Sending message: {}".format(i)) + await sender.send(EventData("Message id{}".format(i))) + +asyncio.run(send_async()) From 0e88a67210585264aa60f98dd3309a21c2839be2 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sat, 18 May 2019 17:09:22 -0700 Subject: [PATCH 02/49] Remove policies --- .../azure/eventhub/policies/__init__.py | 23 ------- .../azure/eventhub/policies/base.py | 17 ----- .../azure/eventhub/policies/policies.py | 63 ------------------- 3 files changed, 103 deletions(-) delete mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py delete mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py delete mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py deleted file mode 100644 index 788d8f4cf244..000000000000 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -from .policies import ( - RetryPolicy, - ProxyPolicy, - AutoReconnectPolicy, - KeepAlivePolicy, - UserAgentPolicy, - NetworkTraceLoggingPolicy, - RedirectPolicy, -) - -__all__ = [ - "RetryPolicy", - "ProxyPolicy", - "AutoReconnectPolicy", - "KeepAlivePolicy", - "UserAgentPolicy", - "NetworkTraceLoggingPolicy", - "RedirectPolicy", -] diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py deleted file mode 100644 index 1d470c998e90..000000000000 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/base.py +++ /dev/null @@ -1,17 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -from uamqp import AMQPClient -from abc import abstractmethod - - -class AMQPClientPolicy(object): - - def __init__(self, **kwargs): - pass - - @abstractmethod - def apply(self, amqp_client): - # type: (AMQPClient) -> None - pass diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py deleted file mode 100644 index f22c6a7fb600..000000000000 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/policies/policies.py +++ /dev/null @@ -1,63 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# -------------------------------------------------------------------------------------------- -from .base import AMQPClientPolicy -from uamqp import AMQPClient - - -class RetryPolicy(AMQPClientPolicy): - - def __init__(self, **kwargs): - self.max_retries = kwargs.get("max_retries", 3) - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class ProxyPolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.http_proxy = kwargs.get("http_proxy") - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class AutoReconnectPolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.auto_reconnect = kwargs.get("auto_reconnect", True) - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class KeepAlivePolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.keep_alive = kwargs.get("keep_alive", 30) - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class UserAgentPolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.user_agent = kwargs.get("user_agent") - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class NetworkTraceLoggingPolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.network_trace_logging = kwargs.get("debug", False) - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") - - -class RedirectPolicy(AMQPClientPolicy): - def __init__(self, **kwargs): - self.redirect = kwargs.get("redirect") - - def apply(self, amqp_client): # type: (AMQPClient) -> None - raise NotImplementedError("Placeholder for future implementation") From f348cf27dd1c86c4b5fe084d1b4c054d402f8993 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sat, 18 May 2019 17:22:00 -0700 Subject: [PATCH 03/49] Remove debugging files --- .../debugging/get_eventhub_info.py | 10 ------ .../azure-eventhubs/debugging/receive.py | 14 -------- .../debugging/receive_async.py | 26 -------------- .../azure-eventhubs/debugging/send.py | 34 ------------------- .../azure-eventhubs/debugging/send_async.py | 20 ----------- 5 files changed, 104 deletions(-) delete mode 100644 sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py delete mode 100644 sdk/eventhub/azure-eventhubs/debugging/receive.py delete mode 100644 sdk/eventhub/azure-eventhubs/debugging/receive_async.py delete mode 100644 sdk/eventhub/azure-eventhubs/debugging/send.py delete mode 100644 sdk/eventhub/azure-eventhubs/debugging/send_async.py diff --git a/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py b/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py deleted file mode 100644 index 29e70b35631c..000000000000 --- a/sdk/eventhub/azure-eventhubs/debugging/get_eventhub_info.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging -from azure.eventhub import EventHubClient, Offset - -logging.basicConfig(level=logging.DEBUG) - -ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" -USER = "RootManageSharedAccessKey" -KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" -client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) -print(client.get_eventhub_info()) \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhubs/debugging/receive.py b/sdk/eventhub/azure-eventhubs/debugging/receive.py deleted file mode 100644 index 3c0c6fe19026..000000000000 --- a/sdk/eventhub/azure-eventhubs/debugging/receive.py +++ /dev/null @@ -1,14 +0,0 @@ -import logging -from azure.eventhub import EventHubClient, Offset, EventData - -#logging.basicConfig(level=logging.DEBUG) - -ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" -USER = "RootManageSharedAccessKey" -KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" -client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) -receiver = client.create_receiver("$default", "1", prefetch=100, offset=Offset("-1", True)) -with receiver: - ed = receiver.receive() # type: list[EventData] - for item in ed: - print(item.sequence_number, item.offset.value, item.enqueued_time, item.partition_key, item.application_properties) diff --git a/sdk/eventhub/azure-eventhubs/debugging/receive_async.py b/sdk/eventhub/azure-eventhubs/debugging/receive_async.py deleted file mode 100644 index cfeec70e157b..000000000000 --- a/sdk/eventhub/azure-eventhubs/debugging/receive_async.py +++ /dev/null @@ -1,26 +0,0 @@ -import logging -from azure.eventhub.aio import EventHubClient -from azure.eventhub import Offset -import asyncio - -logging.basicConfig(level=logging.DEBUG) - -ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" -USER = "RootManageSharedAccessKey" -KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" -client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) - - -async def batch_async_receiver(): - async with client.add_async_receiver("$default", "1", prefetch=1, offset = Offset("-1")) as receiver: - event_data = await receiver.receive() - print(event_data) -asyncio.run(batch_async_receiver()) - -''' -async def iter_async_receiver(): - async with client.add_async_receiver("$default", "1", prefetch=1, offset = Offset("-1")) as receiver: - async for item in receiver: - print(item) -asyncio.run(iter_async_receiver()) -''' \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhubs/debugging/send.py b/sdk/eventhub/azure-eventhubs/debugging/send.py deleted file mode 100644 index 99b9fa0384bc..000000000000 --- a/sdk/eventhub/azure-eventhubs/debugging/send.py +++ /dev/null @@ -1,34 +0,0 @@ -import logging -from azure.eventhub import EventHubClient, EventData, MessageSendResult -import uuid - -#logging.basicConfig(level=logging.DEBUG) -ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" -USER = "RootManageSharedAccessKey" -KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" -client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) - -def send(): - with client.create_sender() as sender: - for i in range(1): - print("Sending message: {}".format(i)) - ed = EventData("Message with different content {}".format(i)) - ed.partition_key = "20" - ed.application_properties = {"partition_test": "same partition key 20"} - sender.send(ed) - - -def send_batch(): - with client.create_sender() as sender: - batch_event_data = [] - for i in range(5): - event_data = EventData("Message with different content {}".format(i)) - event_data.application_properties = {"batch_send_index": str(i)} - event_data.partition_key = "aaa" - batch_event_data.append(event_data) - send_result = sender.send_batch(batch_event_data) - if send_result == MessageSendResult.Ok: - print("Sent") - - -send_batch() diff --git a/sdk/eventhub/azure-eventhubs/debugging/send_async.py b/sdk/eventhub/azure-eventhubs/debugging/send_async.py deleted file mode 100644 index 345a05ac834f..000000000000 --- a/sdk/eventhub/azure-eventhubs/debugging/send_async.py +++ /dev/null @@ -1,20 +0,0 @@ -import logging -from azure.eventhub.aio import EventHubClient -from azure.eventhub import EventData -import asyncio - -logging.basicConfig(level=logging.DEBUG) - -ADDRESS = "amqp://yijun-eventh.servicebus.windows.net/test_eventhub" -USER = "RootManageSharedAccessKey" -KEY = "a4xbgNrqFT3tlN5Ak1jWvhSXmnuClOjkNMTQ81posWA=" -client = EventHubClient(ADDRESS, username=USER, password=KEY, debug=True) - - -async def send_async(): - async with client.add_async_sender(partition="1") as sender: - for i in range(1): - print("Sending message: {}".format(i)) - await sender.send(EventData("Message id{}".format(i))) - -asyncio.run(send_async()) From be129cfe7a6b134f4a62e9c05c385df2a78eab6f Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 00:44:25 -0700 Subject: [PATCH 04/49] Rename Offset to EventPosition --- .../azure/eventhub/__init__.py | 4 +- .../azure-eventhubs/azure/eventhub/common.py | 41 ++++++++++++++----- .../azure/eventhub/receiver.py | 6 ++- 3 files changed, 36 insertions(+), 15 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index be8547693d2b..e2bcc43ed877 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -5,7 +5,7 @@ __version__ = "1.3.1" -from azure.eventhub.common import EventData, EventHubError, Offset +from azure.eventhub.common import EventData, EventHubError, EventPosition from azure.eventhub.client import EventHubClient from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver @@ -15,7 +15,7 @@ __all__ = [ "EventData", "EventHubError", - "Offset", + "EventPosition", "EventHubClient", "Sender", "Receiver", diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 3cb0c23bd4d3..99da4e1d09b6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -135,7 +135,7 @@ def offset(self): :rtype: ~azure.eventhub.common.Offset """ try: - return Offset(self._annotations[EventData.PROP_OFFSET].decode('UTF-8')) + return EventPosition(self._annotations[EventData.PROP_OFFSET].decode('UTF-8')) except (KeyError, AttributeError): return None @@ -267,23 +267,22 @@ def __init__(self, batch_event_data): self.message = BatchMessage(data=batch_event_data, multi_messages=True, properties=None) - -class Offset(object): +class EventPosition(object): """ - The offset (position or timestamp) where a receiver starts. Examples: + The position(offset, sequence or timestamp) where a receiver starts. Examples: Beginning of the event stream: - >>> offset = Offset("-1") + >>> event_pos = EventPosition("-1") End of the event stream: - >>> offset = Offset("@latest") + >>> event_pos = EventPosition("@latest") Events after the specified offset: - >>> offset = Offset("12345") + >>> event_pos = EventPosition("12345") Events from the specified offset: - >>> offset = Offset("12345", True) + >>> event_pos = EventPosition("12345", True) Events after a datetime: - >>> offset = Offset(datetime.datetime.utcnow()) + >>> event_pos = EventPosition(datetime.datetime.utcnow()) Events after a specific sequence number: - >>> offset = Offset(1506968696002) + >>> event_pos = EventPosition(1506968696002) """ def __init__(self, value, inclusive=False): @@ -308,10 +307,30 @@ def selector(self): if isinstance(self.value, datetime.datetime): timestamp = (calendar.timegm(self.value.utctimetuple()) * 1000) + (self.value.microsecond/1000) return ("amqp.annotation.x-opt-enqueued-time {} '{}'".format(operator, int(timestamp))).encode('utf-8') - if isinstance(self.value, six.integer_types): + elif isinstance(self.value, six.integer_types): return ("amqp.annotation.x-opt-sequence-number {} '{}'".format(operator, self.value)).encode('utf-8') return ("amqp.annotation.x-opt-offset {} '{}'".format(operator, self.value)).encode('utf-8') + @staticmethod + def from_start_of_sream(): + return EventPosition("-1") + + @staticmethod + def from_end_of_sream(): + return EventPosition("@latest") + + @staticmethod + def from_offset(offset, inclusive=False): + return EventPosition(offset, inclusive) + + @staticmethod + def from_sequence(sequence, inclusive=False): + return EventPosition(sequence, inclusive) + + @staticmethod + def from_enqueued_time(enqueued_time, inclusive=False): + return EventPosition(enqueued_time, inclusive) + class EventHubError(Exception): """ diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 8d38cc758a07..ac593e7faeee 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -51,11 +51,11 @@ def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_a self.client = client self.source = source self.offset = offset + self.iter_started = False self.prefetch = prefetch self.epoch = epoch self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect - # max_retries = client.config.retry_policy.max_retries self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) self.reconnect_backoff = 1 self.properties = None @@ -338,7 +338,9 @@ def __exit__(self, exc_type, exc_val, exc_tb): def __iter__(self): if not self.running: self.open() - self.messages_iter = self._handler.receive_messages_iter() + if not self.iter_started: + self.iter_started = True + self.messages_iter = self._handler.receive_messages_iter() return self def __next__(self): From 0b257ec92fda30c0a5c1e19d37c0162ef52f35e7 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 11:32:13 -0700 Subject: [PATCH 05/49] make tests a namespace package --- sdk/eventhub/azure-eventhubs/tests/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdk/eventhub/azure-eventhubs/tests/__init__.py diff --git a/sdk/eventhub/azure-eventhubs/tests/__init__.py b/sdk/eventhub/azure-eventhubs/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 From 74d398e995e5996482e6d9aa5be6d220bbda3cf0 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 11:35:06 -0700 Subject: [PATCH 06/49] Revised test receive for new code --- .../eventprocessorhost/eh_partition_pump.py | 4 +- sdk/eventhub/azure-eventhubs/conftest.py | 31 ++- .../azure-eventhubs/tests/test_receive.py | 182 +++++++----------- 3 files changed, 83 insertions(+), 134 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py index f24906ea4455..d2c649f9a0a6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventprocessorhost/eh_partition_pump.py @@ -5,7 +5,7 @@ import logging import asyncio -from azure.eventhub import Offset +from azure.eventhub import EventPosition from azure.eventhub.aio import EventHubClient from azure.eventprocessorhost.partition_pump import PartitionPump @@ -72,7 +72,7 @@ async def open_clients_async(self): self.partition_receive_handler = self.eh_client.add_async_receiver( self.partition_context.consumer_group_name, self.partition_context.partition_id, - Offset(self.partition_context.offset), + EventPosition(self.partition_context.offset), prefetch=self.host.eph_options.prefetch_count, keep_alive=self.host.eph_options.keep_alive_interval, auto_reconnect=self.host.eph_options.auto_reconnect_on_error, diff --git a/sdk/eventhub/azure-eventhubs/conftest.py b/sdk/eventhub/azure-eventhubs/conftest.py index 237a60918f17..bf9e6ab77c3b 100644 --- a/sdk/eventhub/azure-eventhubs/conftest.py +++ b/sdk/eventhub/azure-eventhubs/conftest.py @@ -19,7 +19,7 @@ collect_ignore.append("examples/async_examples") else: sys.path.append(os.path.join(os.path.dirname(__file__), "tests")) - from asynctests import MockEventProcessor + from tests.asynctests import MockEventProcessor from azure.eventprocessorhost import EventProcessorHost from azure.eventprocessorhost import EventHubPartitionPump from azure.eventprocessorhost import AzureStorageCheckpointLeaseManager @@ -29,8 +29,7 @@ from azure.eventprocessorhost.partition_pump import PartitionPump from azure.eventprocessorhost.partition_manager import PartitionManager -from azure import eventhub -from azure.eventhub import EventHubClient, Receiver, Offset +from azure.eventhub import EventHubClient, Receiver, EventPosition def get_logger(filename, level=logging.INFO): @@ -71,7 +70,7 @@ def create_eventhub(eventhub_config, client=None): raise ValueError("EventHub creation failed.") -def cleanup_eventhub(servicebus_config, hub_name, client=None): +def cleanup_eventhub(eventhub_config, hub_name, client=None): from azure.servicebus.control_client import ServiceBusService client = client or ServiceBusService( service_namespace=eventhub_config['namespace'], @@ -166,36 +165,36 @@ def device_id(): @pytest.fixture() def connstr_receivers(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=False) - eh_hub_info = client.get_eventhub_info() + eh_hub_info = client.get_eventhub_information() partitions = eh_hub_info["partition_ids"] - recv_offset = Offset("@latest") + recv_offset = EventPosition("@latest") receivers = [] for p in partitions: - receivers.append(client.add_receiver("$default", p, prefetch=500, offset=Offset("@latest"))) - - client.run() - + receiver = client.create_receiver("$default", p, prefetch=500, offset=EventPosition("@latest")) + receivers.append(receiver) + receiver.receive(timeout=1) for r in receivers: r.receive(timeout=1) yield connection_str, receivers - client.stop() + for r in receivers: + r.close() @pytest.fixture() def connstr_senders(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=True) - eh_hub_info = client.get_eventhub_info() + eh_hub_info = client.get_eventhub_information() partitions = eh_hub_info["partition_ids"] senders = [] for p in partitions: - senders.append(client.add_sender(partition=p)) - - client.run() + sender = client.create_sender(partition=p) + senders.append(sender) yield connection_str, senders - client.stop() + for s in senders: + s.close() @pytest.fixture() diff --git a/sdk/eventhub/azure-eventhubs/tests/test_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_receive.py index 0b05bf78c842..51fbb3a6079a 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_receive.py @@ -9,14 +9,13 @@ import time import datetime -from azure import eventhub -from azure.eventhub import EventData, EventHubClient, Offset +from azure.eventhub import EventData, EventHubClient, EventPosition # def test_receive_without_events(connstr_senders): # connection_str, senders = connstr_senders # client = EventHubClient.from_connection_string(connection_str, debug=True) -# receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) +# receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) # finish = datetime.datetime.now() + datetime.timedelta(seconds=240) # count = 0 # try: @@ -38,10 +37,8 @@ def test_receive_end_of_stream(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() - + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Receiving only a single event")) @@ -50,22 +47,17 @@ def test_receive_end_of_stream(connstr_senders): assert received[0].body_as_str() == "Receiving only a single event" assert list(received[-1].body)[0] == b"Receiving only a single event" - except: - raise - finally: - client.stop() @pytest.mark.liveTest def test_receive_with_offset_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - partitions = client.get_eventhub_info() + partitions = client.get_eventhub_information() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() - more_partitions = client.get_eventhub_info() + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: + more_partitions = client.get_eventhub_information() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) @@ -78,27 +70,22 @@ def test_receive_with_offset_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.add_receiver("$default", "0", offset=offset) - client.run() - received = offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message after offset")) - received = offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - client.stop() + offset_receiver = client.create_receiver("$default", "0", offset=offset) + with offset_receiver: + received = offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message after offset")) + received = offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest def test_receive_with_inclusive_offset(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -110,26 +97,21 @@ def test_receive_with_inclusive_offset(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset.value, inclusive=True)) - client.run() - received = offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - client.stop() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset.value, inclusive=True)) + with offset_receiver: + received = offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest def test_receive_with_datetime_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - partitions = client.get_eventhub_info() + partitions = client.get_eventhub_information() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() - more_partitions = client.get_eventhub_info() + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: + more_partitions = client.get_eventhub_information() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) assert len(received) == 0 @@ -141,17 +123,13 @@ def test_receive_with_datetime_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset)) - client.run() - received = offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message after timestamp")) - received = offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - client.stop() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + with offset_receiver: + received = offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message after timestamp")) + received = offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @@ -167,9 +145,8 @@ def test_receive_with_custom_datetime_sync(connstr_senders): for i in range(5): senders[0].send(EventData(b"Message after timestamp")) - receiver = client.add_receiver("$default", "0", offset=Offset(offset)) - try: - client.run() + receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + with receiver: all_received = [] received = receiver.receive(timeout=1) while received: @@ -180,20 +157,14 @@ def test_receive_with_custom_datetime_sync(connstr_senders): for received_event in all_received: assert received_event.body_as_str() == "Message after timestamp" assert received_event.enqueued_time > offset - except: - raise - finally: - client.stop() @pytest.mark.liveTest def test_receive_with_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() - + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -202,91 +173,73 @@ def test_receive_with_sequence_no(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset)) - client.run() - received = offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message next in sequence")) - time.sleep(1) - received = offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - client.stop() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + with offset_receiver: + received = offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message next in sequence")) + time.sleep(1) + received = offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest def test_receive_with_inclusive_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) - try: - client.run() - + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].sequence_number - - offset_receiver = client.add_receiver("$default", "0", offset=Offset(offset, inclusive=True)) - client.run() - received = offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - client.stop() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset, inclusive=True)) + with offset_receiver: + received = offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest def test_receive_batch(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) - try: - client.run() - + receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 for i in range(10): senders[0].send(EventData(b"Data")) received = receiver.receive(max_batch_size=5, timeout=5) assert len(received) == 5 - except: - raise - finally: - client.stop() @pytest.mark.liveTest def test_receive_batch_with_app_prop_sync(connstr_senders): - pytest.skip("Waiting on uAMQP release") + #pytest.skip("Waiting on uAMQP release") connection_str, senders = connstr_senders + app_prop_key = "raw_prop" + app_prop_value = "raw_value" + batch_app_prop = {app_prop_key: app_prop_value} def batched(): for i in range(10): - yield "Event Data {}".format(i) + ed = EventData("Event Data {}".format(i)) + ed.application_properties = batch_app_prop + yield ed for i in range(10, 20): - yield EventData("Event Data {}".format(i)) + ed = EventData("Event Data {}".format(i)) + ed.application_properties = batch_app_prop + yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) - try: - client.run() - + receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 - app_prop_key = "raw_prop" - app_prop_value = "raw_value" - batch_app_prop = {app_prop_key:app_prop_value} - batch_event = EventData(batch=batched()) - batch_event.application_properties = batch_app_prop - - senders[0].send(batch_event) + senders[0].send_batch(batched()) time.sleep(1) @@ -297,7 +250,4 @@ def batched(): assert list(message.body)[0] == "Event Data {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) - except: - raise - finally: - client.stop() + From 1856bfaed6d6fef20d368659906e1c790ab0d1cf Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 15:35:44 -0700 Subject: [PATCH 07/49] Revised test send for track two --- .../azure-eventhubs/azure/eventhub/common.py | 2 +- .../azure-eventhubs/azure/eventhub/sender.py | 6 +- .../azure-eventhubs/tests/test_send.py | 138 +++++------------- 3 files changed, 43 insertions(+), 103 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 99da4e1d09b6..ba2b910a7f1a 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -207,7 +207,7 @@ def application_properties(self, value): :type value: dict """ self._app_properties = value - properties = dict(self._app_properties) + properties = None if value is None else dict(self._app_properties) self.message.application_properties = properties @property diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 43a3a74f0cca..e6a33dc65d30 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -238,6 +238,8 @@ def close(self, exception=None): self._handler.close() def _send_event_data(self, event_data): + if not self.running: + self.open() try: self._handler.send_message(event_data.message) if self._outcome != MessageSendResult.Ok: @@ -298,8 +300,6 @@ def send(self, event_data): """ if self.error: raise self.error - if not self.running: - self.open() if event_data.partition_key and self.partition: raise ValueError("EventData partition key cannot be used with a partition sender.") event_data.message.on_send_complete = self._on_outcome @@ -334,8 +334,6 @@ def send_batch(self, batch_event_data): for i in range(1, len(event_data_list)): if event_data_list[i].partition_key != event_data_list[i-1].partition_key: raise ValueError("partition key of all EventData must be the same if being sent in a batch") - if not self.running: - self.open() wrapper_event_data = BatchSendEventData(event_data_list) wrapper_event_data.message.on_send_complete = self._on_outcome return self._send_event_data(wrapper_event_data) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_send.py b/sdk/eventhub/azure-eventhubs/tests/test_send.py index f7a8ccc3b158..cdf1f0ebc6d0 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_send.py @@ -5,13 +5,11 @@ # license information. #-------------------------------------------------------------------------- -import os import pytest import time import json import sys -from azure import eventhub from azure.eventhub import EventData, EventHubClient @@ -19,10 +17,8 @@ def test_send_with_partition_key(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() - + sender = client.create_sender() + with sender: data_val = 0 for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: partition_key = b"test_partition_" + partition @@ -31,10 +27,6 @@ def test_send_with_partition_key(connstr_receivers): data.partition_key = partition_key data_val += 1 sender.send(data) - except: - raise - finally: - client.stop() found_partition_keys = {} for index, partition in enumerate(receivers): @@ -53,15 +45,10 @@ def test_send_and_receive_large_body_size(connstr_receivers): pytest.skip("Skipping on OSX - open issue regarding message size") connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: payload = 250 * 1024 sender.send(EventData("A" * payload)) - except: - raise - finally: - client.stop() received = [] for r in receivers: @@ -75,14 +62,9 @@ def test_send_and_receive_large_body_size(connstr_receivers): def test_send_and_receive_zero_length_body(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: sender.send(EventData("")) - except: - raise - finally: - client.stop() received = [] for r in receivers: @@ -96,14 +78,9 @@ def test_send_and_receive_zero_length_body(connstr_receivers): def test_send_single_event(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: sender.send(EventData(b"A single event")) - except: - raise - finally: - client.stop() received = [] for r in receivers: @@ -118,17 +95,12 @@ def test_send_batch_sync(connstr_receivers): connection_str, receivers = connstr_receivers def batched(): for i in range(10): - yield "Event number {}".format(i) + yield EventData("Event number {}".format(i)) client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() - sender.send(EventData(batch=batched())) - except: - raise - finally: - client.stop() + sender = client.create_sender() + with sender: + sender.send_batch(batched()) time.sleep(1) received = [] @@ -144,14 +116,9 @@ def batched(): def test_send_partition(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender(partition="1") - try: - client.run() + sender = client.create_sender(partition="1") + with sender: sender.send(EventData(b"Data")) - except: - raise - finally: - client.stop() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 @@ -163,15 +130,10 @@ def test_send_partition(connstr_receivers): def test_send_non_ascii(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender(partition="0") - try: - client.run() + sender = client.create_sender(partition="0") + with sender: sender.send(EventData(u"é,è,à,ù,â,ê,î,ô,û")) sender.send(EventData(json.dumps({"foo": u"漢字"}))) - except: - raise - finally: - client.stop() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 2 @@ -184,18 +146,13 @@ def test_send_partition_batch(connstr_receivers): connection_str, receivers = connstr_receivers def batched(): for i in range(10): - yield "Event number {}".format(i) + yield EventData("Event number {}".format(i)) client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender(partition="1") - try: - client.run() - sender.send(EventData(batch=batched())) + sender = client.create_sender(partition="1") + with sender: + sender.send_batch(batched()) time.sleep(1) - except: - raise - finally: - client.stop() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 @@ -207,14 +164,9 @@ def batched(): def test_send_array_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=True) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: sender.send(EventData([b"A", b"B", b"C"])) - except: - raise - finally: - client.stop() received = [] for r in receivers: @@ -228,16 +180,12 @@ def test_send_array_sync(connstr_receivers): def test_send_multiple_clients(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender_0 = client.add_sender(partition="0") - sender_1 = client.add_sender(partition="1") - try: - client.run() + sender_0 = client.create_sender(partition="0") + sender_1 = client.create_sender(partition="1") + with sender_0: sender_0.send(EventData(b"Message 0")) + with sender_1: sender_1.send(EventData(b"Message 1")) - except: - raise - finally: - client.stop() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 1 @@ -247,33 +195,27 @@ def test_send_multiple_clients(connstr_receivers): @pytest.mark.liveTest def test_send_batch_with_app_prop_sync(connstr_receivers): - pytest.skip("Waiting on uAMQP release") + #pytest.skip("Waiting on uAMQP release") connection_str, receivers = connstr_receivers + app_prop_key = "raw_prop" + app_prop_value = "raw_value" + app_prop = {app_prop_key: app_prop_value} + def batched(): for i in range(10): - yield "Event number {}".format(i) + ed = EventData("Event number {}".format(i)) + ed.application_properties = app_prop + yield ed for i in range(10, 20): - yield EventData("Event number {}".format(i)) + ed = EventData("Event number {}".format(i)) + ed.application_properties = app_prop + yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() - try: - client.run() - - app_prop_key = "raw_prop" - app_prop_value = "raw_value" - batch_app_prop = {app_prop_key:app_prop_value} - batch_event = EventData(batch=batched()) - batch_event.application_properties = batch_app_prop - - sender.send(batch_event) - except: - raise - finally: - client.stop() - + sender = client.create_sender() + with sender: + sender.send_batch(batched()) time.sleep(1) - received = [] for r in receivers: received.extend(r.receive(timeout=3)) From 9d6403ae6f9274e143d255cbc87e8d9e8d6ef70b Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 21:38:05 -0700 Subject: [PATCH 08/49] Update async code from sync --- .../eventhub/aio/event_hubs_client_async.py | 149 ++++----------- .../azure/eventhub/aio/receiver_async.py | 98 +++++----- .../azure/eventhub/aio/sender_async.py | 178 ++++++++++-------- .../azure-eventhubs/azure/eventhub/client.py | 4 +- .../azure/eventhub/client_abstract.py | 1 - .../azure-eventhubs/azure/eventhub/sender.py | 22 --- 6 files changed, 184 insertions(+), 268 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index d6cb0003ba18..275f76f6ee62 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -7,6 +7,7 @@ import asyncio import time import datetime +import functools from uamqp import authentication, constants, types, errors from uamqp import ( @@ -51,10 +52,19 @@ def _create_auth(self, username=None, password=None): :param password: The shared access key. :type password: str """ - http_proxy = self.config.http_proxy_policy.http_proxy + http_proxy = self.config.http_proxy transport_type = self.config.transport_type auth_timeout = self.config.auth_timeout - if self.sas_token: + if self.aad_credential and self.sas_token: + raise ValueError("Can't have both sas_token and aad_credential") + + elif self.aad_credential: + get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) + # TODO: should use async aad_credential.get_token. Check with Charles for async identity api + return authentication.JWTTokenAsync(self.auth_uri, self.auth_uri, + get_jwt_token, http_proxy=http_proxy, + transport_type=transport_type) + elif self.sas_token: token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) @@ -64,7 +74,8 @@ def _create_auth(self, username=None, password=None): self.auth_uri, self.auth_uri, token, expires_at=expiry, timeout=auth_timeout, - http_proxy=http_proxy, transport_type=transport_type) + http_proxy=http_proxy, + transport_type=transport_type) username = username or self._auth_config['username'] password = password or self._auth_config['password'] @@ -74,100 +85,7 @@ def _create_auth(self, username=None, password=None): return authentication.SASTokenAsync.from_shared_access_key( self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - async def _close_clients_async(self): - """ - Close all open AsyncSender/AsyncReceiver clients. - """ - await asyncio.gather(*[c.close_async() for c in self.clients]) - - async def _wait_for_client(self, client): - try: - while client.get_handler_state().value == 2: - await client._handler._connection.work_async() # pylint: disable=protected-access - except Exception as exp: # pylint: disable=broad-except - await client.close_async(exception=exp) - - async def _start_client_async(self, client): - try: - if not client.running: - await client.open_async() - except Exception as exp: # pylint: disable=broad-except - log.info("Encountered error while starting handler: %r", exp) - await client.close_async(exception=exp) - log.info("Finished closing failed handler") - - async def _handle_redirect(self, redirects): - if len(redirects) != len(self.clients): - not_redirected = [c for c in self.clients if not c.redirected] - _, timeout = await asyncio.wait([self._wait_for_client(c) for c in not_redirected], timeout=5) - if timeout: - raise EventHubError("Some clients are attempting to redirect the connection.") - redirects = [c.redirected for c in self.clients if c.redirected] - if not all(r.hostname == redirects[0].hostname for r in redirects): - raise EventHubError("Multiple clients attempting to redirect to different hosts.") - self._process_redirect_uri(redirects[0]) - await asyncio.gather(*[c.open_async() for c in self.clients]) - - async def run_async(self): - """ - Run the EventHubClient asynchronously. - Opens the connection and starts running all AsyncSender/AsyncReceiver clients. - Returns a list of the start up results. For a succcesful client start the - result will be `None`, otherwise the exception raised. - If all clients failed to start, then run will fail, shut down the connection - and raise an exception. - If at least one client starts up successfully the run command will succeed. - - :rtype: list[~azure.eventhub.common.EventHubError] - - Example: - .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py - :start-after: [START eventhub_client_run_async] - :end-before: [END eventhub_client_run_async] - :language: python - :dedent: 4 - :caption: Run the EventHubClient asynchronously. - - """ - log.info("%r: Starting %r clients", self.container_id, len(self.clients)) - tasks = [self._start_client_async(c) for c in self.clients] - try: - await asyncio.gather(*tasks) - redirects = [c.redirected for c in self.clients if c.redirected] - failed = [c.error for c in self.clients if c.error] - if failed and len(failed) == len(self.clients): - log.warning("%r: All clients failed to start.", self.container_id) - raise failed[0] - if failed: - log.warning("%r: %r clients failed to start.", self.container_id, len(failed)) - elif redirects: - await self._handle_redirect(redirects) - except EventHubError: - await self.stop_async() - raise - except Exception as exp: - await self.stop_async() - raise EventHubError(str(exp)) - return failed - - async def stop_async(self): - """ - Stop the EventHubClient and all its Sender/Receiver clients. - - Example: - .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py - :start-after: [START eventhub_client_async_stop] - :end-before: [END eventhub_client_async_stop] - :language: python - :dedent: 4 - :caption: Stop the EventHubClient and all its Sender/Receiver clients. - - """ - log.info("%r: Stopping %r clients", self.container_id, len(self.clients)) - self.stopped = True - await self._close_clients_async() - - async def get_eventhub_info_async(self): + async def get_eventhub_information(self): """ Get details on the specified EventHub async. @@ -176,7 +94,6 @@ async def get_eventhub_info_async(self): alt_creds = { "username": self._auth_config.get("iot_username"), "password":self._auth_config.get("iot_password")} - # TODO: add proxy? try: mgmt_auth = self._create_auth(**alt_creds) mgmt_client = AMQPClientAsync(self.mgmt_target, auth=mgmt_auth, debug=self.debug) @@ -200,8 +117,9 @@ async def get_eventhub_info_async(self): finally: await mgmt_client.close_async() - def add_async_receiver( - self, consumer_group, partition, offset=None, epoch=None, prefetch=300, operation=None, loop=None): + def create_receiver( + self, consumer_group, partition, offset=None, epoch=None, operation=None, + prefetch=None, keep_alive=None, auto_reconnect=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. @@ -227,15 +145,19 @@ def add_async_receiver( :caption: Add an async receiver to the client for a particular consumer group and partition. """ + keep_alive = self.config.keep_alive if keep_alive is None else keep_alive + auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect + prefetch = self.config.prefetch if prefetch is None else prefetch + path = self.address.path + operation if operation else self.address.path source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, loop=loop) - self.clients.append(handler) + self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, + auto_reconnect=auto_reconnect, loop=loop) return handler - def add_async_epoch_receiver( + def create_epoch_receiver( self, consumer_group, partition, epoch, prefetch=300, operation=None, loop=None): """ Add an async receiver to the client with an epoch value. Only a single epoch receiver @@ -265,16 +187,11 @@ def add_async_epoch_receiver( :caption: Add an async receiver to the client with an epoch value. """ - path = self.address.path + operation if operation else self.address.path - source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( - self.address.hostname, path, consumer_group, partition) - handler = Receiver( - self, source_url, prefetch=prefetch, epoch=epoch, loop=loop) - self.clients.append(handler) - return handler + return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, + operation=operation, loop=loop) - def add_async_sender( - self, partition=None, operation=None, send_timeout=60, loop=None): + def create_sender( + self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None, loop=None): """ Add an async sender to the client to send ~azure.eventhub.common.EventData object to an EventHub. @@ -311,7 +228,11 @@ def add_async_sender( target = "amqps://{}{}".format(self.address.hostname, self.address.path) if operation: target = target + operation + send_timeout = self.config.send_timeout if send_timeout is None else send_timeout + keep_alive = self.config.keep_alive if keep_alive is None else keep_alive + auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect + handler = Sender( - self, target, partition=partition, send_timeout=send_timeout, loop=loop) - self.clients.append(handler) + self, target, partition=partition, send_timeout=send_timeout, + keep_alive=keep_alive, auto_reconnect=auto_reconnect, loop=loop) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index bad66bcae917..d6d17713afc7 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -11,7 +11,6 @@ from uamqp import ReceiveClientAsync, Source from azure.eventhub import EventHubError, EventData -from azure.eventhub.receiver import Receiver from azure.eventhub.common import _error_handler log = logging.getLogger(__name__) @@ -35,7 +34,7 @@ class Receiver(object): def __init__( # pylint: disable=super-init-not-called self, client, source, offset=None, prefetch=300, epoch=None, - keep_alive=None, auto_reconnect=True, loop=None): + keep_alive=None, auto_reconnect=False, loop=None): """ Instantiate an async receiver. @@ -57,8 +56,8 @@ def __init__( # pylint: disable=super-init-not-called self.offset = offset self.prefetch = prefetch self.epoch = epoch - self.keep_alive = client.config.keep_alive_policy.keep_alive - self.auto_reconnect = client.config.auto_reconnect_policy.auto_reconnect + self.keep_alive = keep_alive + self.auto_reconnect = auto_reconnect self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) self.reconnect_backoff = 1 self.redirected = None @@ -74,7 +73,7 @@ def __init__( # pylint: disable=super-init-not-called self._handler = ReceiveClientAsync( source, auth=self.client.get_auth(), - debug=self.client.config.network_trace_policy.network_trace_logging, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, @@ -84,7 +83,7 @@ def __init__( # pylint: disable=super-init-not-called properties=self.client.create_properties(), loop=self.loop) - async def open_async(self): + async def open(self): """ Open the Receiver using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -128,7 +127,7 @@ async def open_async(self): while not await self._handler.client_ready_async(): await asyncio.sleep(0.05) - async def _reconnect_async(self): # pylint: disable=too-many-statements + async def _reconnect(self): # pylint: disable=too-many-statements # pylint: disable=protected-access alt_creds = { "username": self.client._auth_config.get("iot_username"), @@ -157,7 +156,7 @@ async def _reconnect_async(self): # pylint: disable=too-many-statements except errors.TokenExpired as shutdown: log.info("AsyncReceiver disconnected due to token expiry. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: if shutdown.action.retry and self.auto_reconnect: @@ -165,7 +164,7 @@ async def _reconnect_async(self): # pylint: disable=too-many-statements return False log.info("AsyncReceiver detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: @@ -173,7 +172,7 @@ async def _reconnect_async(self): # pylint: disable=too-many-statements return False log.info("AsyncReceiver detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: @@ -181,43 +180,21 @@ async def _reconnect_async(self): # pylint: disable=too-many-statements return False log.info("AsyncReceiver connection error (%r). Shutting down.", shutdown) error = EventHubError(str(shutdown)) - await self.close_async(exception=error) + await self.close(exception=error) raise error except Exception as e: log.info("Unexpected error occurred (%r). Shutting down.", e) error = EventHubError("Receiver reconnect failed: {}".format(e)) - await self.close_async(exception=error) + await self.close(exception=error) raise error - async def reconnect_async(self): + async def reconnect(self): """If the Receiver was disconnected from the service with a retryable error - attempt to reconnect.""" while not await self._reconnect_async(): await asyncio.sleep(self.reconnect_backoff) - async def has_started(self): - """ - Whether the handler has completed all start up processes such as - establishing the connection, session, link and authentication, and - is not ready to process messages. - **This function is now deprecated and will be removed in v2.0+.** - - :rtype: bool - """ - # pylint: disable=protected-access - timeout = False - auth_in_progress = False - if self._handler._connection.cbs: - timeout, auth_in_progress = await self._handler._auth.handle_token_async() - if timeout: - raise EventHubError("Authorization timeout.") - if auth_in_progress: - return False - if not await self._handler._client_ready_async(): - return False - return True - - async def close_async(self, exception=None): + async def close(self, exception=None): """ Close down the handler. If the handler has already closed, this will be a no op. An optional exception can be passed in to @@ -276,7 +253,7 @@ async def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error if not self.running: - raise ValueError("Unable to receive until client has been started.") + await self.open() data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 @@ -290,43 +267,70 @@ async def receive(self, max_batch_size=None, timeout=None): return data_batch except (errors.TokenExpired, errors.AuthenticationException): log.info("AsyncReceiver disconnected due to token error. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() return data_batch except (errors.LinkDetach, errors.ConnectionClose) as shutdown: if shutdown.action.retry and self.auto_reconnect: log.info("AsyncReceiver detached. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() return data_batch log.info("AsyncReceiver detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: log.info("AsyncReceiver detached. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() return data_batch log.info("AsyncReceiver detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except Exception as e: log.info("Unexpected error occurred (%r). Shutting down.", e) error = EventHubError("Receive failed: {}".format(e)) - await self.close_async(exception=error) + await self.close(exception=error) raise error async def __aenter__(self): - await self.open_async() return self async def __aexit__(self, exc_type, exc_val, exc_tb): - self.client.clients.remove(self) - await self.close_async(exc_val) + await self.close(exc_val) def __aiter__(self): self.messages_iter = self._handler.receive_messages_iter_async() return self async def __anext__(self): - return await self.messages_iter.__anext__() + while True: + try: + message = await self.messages_iter.__anext__() + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close(exception=error) + raise error diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index f1f8824e422a..01245b64e7d5 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -10,9 +10,9 @@ from uamqp import constants, errors from uamqp import SendClientAsync +from azure.eventhub import MessageSendResult from azure.eventhub import EventHubError -from azure.eventhub.sender import Sender -from azure.eventhub.common import _error_handler +from azure.eventhub.common import _error_handler, BatchSendEventData log = logging.getLogger(__name__) @@ -33,7 +33,7 @@ class Sender(object): def __init__( # pylint: disable=super-init-not-called self, client, target, partition=None, send_timeout=60, - keep_alive=None, auto_reconnect=True, loop=None): + keep_alive=30, auto_reconnect=False, loop=None): """ Instantiate an EventHub event SenderAsync handler. @@ -60,8 +60,8 @@ def __init__( # pylint: disable=super-init-not-called self.client = client self.target = target self.partition = partition - self.keep_alive = client.config.keep_alive_policy.keep_alive - self.auto_reconnect = client.config.auto_reconnect_policy.auto_reconnect + self.keep_alive = keep_alive + self.auto_reconnect = auto_reconnect self.timeout = send_timeout self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) self.reconnect_backoff = 1 @@ -84,7 +84,7 @@ def __init__( # pylint: disable=super-init-not-called self._outcome = None self._condition = None - async def open_async(self): + async def open(self): """ Open the Sender using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -119,7 +119,7 @@ async def open_async(self): while not await self._handler.client_ready_async(): await asyncio.sleep(0.05) - async def _reconnect_async(self): + async def _reconnect(self): await self._handler.close_async() unsent_events = self._handler.pending_messages self._handler = SendClientAsync( @@ -140,7 +140,7 @@ async def _reconnect_async(self): except errors.TokenExpired as shutdown: log.info("AsyncSender disconnected due to token expiry. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: if shutdown.action.retry and self.auto_reconnect: @@ -148,7 +148,7 @@ async def _reconnect_async(self): return False log.info("AsyncSender reconnect failed. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: @@ -156,7 +156,7 @@ async def _reconnect_async(self): return False log.info("AsyncSender reconnect failed. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: @@ -164,43 +164,21 @@ async def _reconnect_async(self): return False log.info("AsyncSender connection error (%r). Shutting down.", shutdown) error = EventHubError(str(shutdown)) - await self.close_async(exception=error) + await self.close(exception=error) raise error except Exception as e: log.info("Unexpected error occurred (%r). Shutting down.", e) error = EventHubError("Sender reconnect failed: {}".format(e)) - await self.close_async(exception=error) + await self.close(exception=error) raise error - async def reconnect_async(self): + async def reconnect(self): """If the Receiver was disconnected from the service with a retryable error - attempt to reconnect.""" - while not await self._reconnect_async(): + while not await self._reconnect(): await asyncio.sleep(self.reconnect_backoff) - async def has_started(self): - """ - Whether the handler has completed all start up processes such as - establishing the connection, session, link and authentication, and - is not ready to process messages. - **This function is now deprecated and will be removed in v2.0+.** - - :rtype: bool - """ - # pylint: disable=protected-access - timeout = False - auth_in_progress = False - if self._handler._connection.cbs: - timeout, auth_in_progress = await self._handler._auth.handle_token_async() - if timeout: - raise EventHubError("Authorization timeout.") - if auth_in_progress: - return False - if not await self._handler._client_ready_async(): - return False - return True - - async def close_async(self, exception=None): + async def close(self, exception=None): """ Close down the handler. If the handler has already closed, this will be a no op. An optional exception can be passed in to @@ -234,6 +212,46 @@ async def close_async(self, exception=None): self.error = EventHubError("This send handler is now closed.") await self._handler.close_async() + async def _send_event_data(self, event_data): + if not self.running: + await self.open() + try: + self._handler.send_message(event_data.message) + if self._outcome != MessageSendResult.Ok: + raise Sender._error(self._outcome, self._condition) + except errors.MessageException as failed: + error = EventHubError(str(failed), failed) + await self.close(exception=error) + raise error + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Sender disconnected due to token error. Attempting reconnect.") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Sender detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("Sender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Sender detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("Sender detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Send failed: {}".format(e)) + await self.close(exception=error) + raise error + else: + return self._outcome + async def send(self, event_data): """ Sends an event data and asynchronously waits until @@ -256,45 +274,45 @@ async def send(self, event_data): """ if self.error: raise self.error - if not self.running: - raise ValueError("Unable to send until client has been started.") if event_data.partition_key and self.partition: raise ValueError("EventData partition key cannot be used with a partition sender.") event_data.message.on_send_complete = self._on_outcome - try: - await self._handler.send_message_async(event_data.message) - if self._outcome != constants.MessageSendResult.Ok: - raise Sender._error(self._outcome, self._condition) - except (errors.TokenExpired, errors.AuthenticationException): - log.info("AsyncSender disconnected due to token error. Attempting reconnect.") - await self.reconnect_async() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect_async() - else: - log.info("AsyncSender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect_async() - else: - log.info("AsyncSender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Send failed: {}".format(e)) - await self.close_async(exception=error) - raise error - else: - return self._outcome + await self._send_event_data(event_data) + + async def send_batch(self, batch_event_data): + """ + Sends an event data and blocks until acknowledgement is + received or operation times out. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :raises: ~azure.eventhub.common.EventHubError if the message fails to + send. + :return: The outcome of the message send. + :rtype: ~uamqp.constants.MessageSendResult + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START eventhub_client_sync_send] + :end-before: [END eventhub_client_sync_send] + :language: python + :dedent: 4 + :caption: Sends an event data and blocks until acknowledgement is received or operation times out. - async def wait_async(self): + """ + if self.error: + raise self.error + event_data_list = list(batch_event_data) + if len(event_data_list) == 0: + raise ValueError("batch_event_data must not be empty") + for i in range(1, len(event_data_list)): + if event_data_list[i].partition_key != event_data_list[i-1].partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + wrapper_event_data = BatchSendEventData(event_data_list) + wrapper_event_data.message.on_send_complete = self._on_outcome + return await self._send_event_data(wrapper_event_data) + + async def wait(self): """ Wait until all transferred events have been sent. """ @@ -306,24 +324,24 @@ async def wait_async(self): await self._handler.wait_async() except (errors.TokenExpired, errors.AuthenticationException): log.info("AsyncSender disconnected due to token error. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() except (errors.LinkDetach, errors.ConnectionClose) as shutdown: if shutdown.action.retry and self.auto_reconnect: log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() else: log.info("AsyncSender detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect_async() + await self.reconnect() else: log.info("AsyncSender detached. Shutting down.") error = EventHubError(str(shutdown), shutdown) - await self.close_async(exception=error) + await self.close(exception=error) raise error except Exception as e: log.info("Unexpected error occurred (%r).", e) @@ -340,13 +358,11 @@ def _on_outcome(self, outcome, condition): self._condition = condition async def __aenter__(self): - await self.open_async() return self async def __aexit__(self, exc_type, exc_val, exc_tb): - self.client.clients.remove(self) - await self.close_async(exc_val) + await self.close(exc_val) @staticmethod def _error(outcome, condition): - return None if outcome == constants.MessageSendResult.Ok else EventHubError(outcome, condition) + return None if outcome == MessageSendResult.Ok else EventHubError(outcome, condition) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 688aac92b504..58cd975e4baf 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -60,7 +60,7 @@ def _create_auth(self, username=None, password=None): transport_type = self.config.transport_type auth_timeout = self.config.auth_timeout if self.aad_credential and self.sas_token: - raise EventHubError("Can't have both sas_token and aad credential") + raise ValueError("Can't have both sas_token and aad_credential") elif self.aad_credential: get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) @@ -167,7 +167,6 @@ def create_receiver( self.address.hostname, path, consumer_group, partition) handler = Receiver( self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) - self.clients.append(handler) return handler def create_epoch_receiver( @@ -215,5 +214,4 @@ def create_sender(self, partition=None, operation=None, send_timeout=None, keep_ handler = Sender( self, target, partition=partition, send_timeout=send_timeout, keep_alive=keep_alive, auto_reconnect=auto_reconnect) - self.clients.append(handler) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 944bf8d1c15e..1435d15bd2be 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -151,7 +151,6 @@ def __init__( # self.debug = kwargs.get("debug", False) # debug #self.auth_timeout = auth_timeout - self.clients = [] self.stopped = False self.config = Configuration(**kwargs) self.debug = self.config.network_tracing diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index e6a33dc65d30..bd84ace039c3 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -183,28 +183,6 @@ def get_handler_state(self): # pylint: disable=protected-access return self._handler._message_sender.get_state() - def has_started(self): - """ - Whether the handler has completed all start up processes such as - establishing the connection, session, link and authentication, and - is not ready to process messages. - **This function is now deprecated and will be removed in v2.0+.** - - :rtype: bool - """ - # pylint: disable=protected-access - timeout = False - auth_in_progress = False - if self._handler._connection.cbs: - timeout, auth_in_progress = self._handler._auth.handle_token() - if timeout: - raise EventHubError("Authorization timeout.") - if auth_in_progress: - return False - if not self._handler._client_ready(): - return False - return True - def close(self, exception=None): """ Close down the handler. If the handler has already closed, From e1dff6be53317eb464656e8af476e26209fcc527 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 19 May 2019 21:39:23 -0700 Subject: [PATCH 09/49] Revise async receive and send live test for track2 --- .../tests/asynctests/test_receive_async.py | 236 +++++++----------- .../tests/asynctests/test_send_async.py | 139 ++++------- 2 files changed, 139 insertions(+), 236 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py index 6b086ff8202c..1be11107dae0 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py @@ -9,18 +9,17 @@ import pytest import time -from azure import eventhub -from azure.eventhub import EventData, Offset, EventHubError, EventHubClientAsync +from azure.eventhub import EventData, EventPosition, EventHubError +from azure.eventhub.aio import EventHubClient @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_end_of_stream_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Receiving only a single event")) @@ -28,20 +27,15 @@ async def test_receive_end_of_stream_async(connstr_senders): assert len(received) == 1 assert list(received[-1].body)[0] == b"Receiving only a single event" - except: - raise - finally: - await client.stop_async() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_offset_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -50,27 +44,22 @@ async def test_receive_with_offset_async(connstr_senders): assert len(received) == 1 offset = received[0].offset - offset_receiver = client.add_async_receiver("$default", "0", offset=offset) - await client.run_async() - received = await offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message after offset")) - received = await offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - await client.stop_async() + offset_receiver = client.create_receiver("$default", "0", offset=offset) + async with offset_receiver: + received = await offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message after offset")) + received = await offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_inclusive_offset_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -79,24 +68,19 @@ async def test_receive_with_inclusive_offset_async(connstr_senders): assert len(received) == 1 offset = received[0].offset - offset_receiver = client.add_async_receiver("$default", "0", offset=Offset(offset.value, inclusive=True)) - await client.run_async() - received = await offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - await client.stop_async() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset.value, inclusive=True)) + async with offset_receiver: + received = await offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_datetime_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -104,28 +88,23 @@ async def test_receive_with_datetime_async(connstr_senders): assert len(received) == 1 offset = received[0].enqueued_time - offset_receiver = client.add_async_receiver("$default", "0", offset=Offset(offset)) - await client.run_async() - received = await offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message after timestamp")) - time.sleep(1) - received = await offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - await client.stop_async() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + async with offset_receiver: + received = await offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message after timestamp")) + time.sleep(1) + received = await offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_sequence_no_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -133,28 +112,23 @@ async def test_receive_with_sequence_no_async(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.add_async_receiver("$default", "0", offset=Offset(offset)) - await client.run_async() - received = await offset_receiver.receive(timeout=5) - assert len(received) == 0 - senders[0].send(EventData(b"Message next in sequence")) - time.sleep(1) - received = await offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - await client.stop_async() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + async with offset_receiver: + received = await offset_receiver.receive(timeout=5) + assert len(received) == 0 + senders[0].send(EventData(b"Message next in sequence")) + time.sleep(1) + received = await offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_inclusive_sequence_no_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Data")) @@ -162,34 +136,25 @@ async def test_receive_with_inclusive_sequence_no_async(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.add_async_receiver("$default", "0", offset=Offset(offset, inclusive=True)) - await client.run_async() - received = await offset_receiver.receive(timeout=5) - assert len(received) == 1 - except: - raise - finally: - await client.stop_async() + offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset, inclusive=True)) + async with offset_receiver: + received = await offset_receiver.receive(timeout=5) + assert len(received) == 1 @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_batch_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) - await client.run_async() - try: + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 for i in range(10): senders[0].send(EventData(b"Data")) received = await receiver.receive(max_batch_size=5, timeout=5) assert len(received) == 5 - except: - raise - finally: - await client.stop_async() async def pump(receiver, sleep=None): @@ -213,22 +178,16 @@ async def test_epoch_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] for epoch in [10, 20]: - receivers.append(client.add_async_epoch_receiver("$default", "0", epoch, prefetch=5)) - try: - await client.run_async() - outputs = await asyncio.gather( - pump(receivers[0]), - pump(receivers[1]), - return_exceptions=True) - assert isinstance(outputs[0], EventHubError) - assert outputs[1] == 1 - except: - raise - finally: - await client.stop_async() + receivers.append(client.create_epoch_receiver("$default", "0", epoch, prefetch=5)) + outputs = await asyncio.gather( + pump(receivers[0]), + pump(receivers[1]), + return_exceptions=True) + assert isinstance(outputs[0], EventHubError) + assert outputs[1] == 1 @pytest.mark.liveTest @@ -237,15 +196,14 @@ async def test_multiple_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) - partitions = await client.get_eventhub_info_async() + client = EventHubClient.from_connection_string(connection_str, debug=True) + partitions = await client.get_eventhub_information() assert partitions["partition_ids"] == ["0", "1"] receivers = [] for i in range(2): - receivers.append(client.add_async_receiver("$default", "0", prefetch=10)) + receivers.append(client.create_receiver("$default", "0", prefetch=10)) try: - await client.run_async() - more_partitions = await client.get_eventhub_info_async() + more_partitions = await client.get_eventhub_information() assert more_partitions["partition_ids"] == ["0", "1"] outputs = await asyncio.gather( pump(receivers[0]), @@ -253,10 +211,9 @@ async def test_multiple_receiver_async(connstr_senders): return_exceptions=True) assert isinstance(outputs[0], int) and outputs[0] == 1 assert isinstance(outputs[1], int) and outputs[1] == 1 - except: - raise finally: - await client.stop_async() + for r in receivers: + await r.close() @pytest.mark.liveTest @@ -265,22 +222,20 @@ async def test_epoch_receiver_after_non_epoch_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] - receivers.append(client.add_async_receiver("$default", "0", prefetch=10)) - receivers.append(client.add_async_epoch_receiver("$default", "0", 15, prefetch=10)) + receivers.append(client.create_receiver("$default", "0", prefetch=10)) + receivers.append(client.create_epoch_receiver("$default", "0", 15, prefetch=10)) try: - await client.run_async() outputs = await asyncio.gather( pump(receivers[0]), pump(receivers[1], sleep=5), return_exceptions=True) assert isinstance(outputs[0], EventHubError) assert isinstance(outputs[1], int) and outputs[1] == 1 - except: - raise finally: - await client.stop_async() + for r in receivers: + await r.close() @pytest.mark.liveTest @@ -289,51 +244,48 @@ async def test_non_epoch_receiver_after_epoch_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] - receivers.append(client.add_async_epoch_receiver("$default", "0", 15, prefetch=10)) - receivers.append(client.add_async_receiver("$default", "0", prefetch=10)) + receivers.append(client.create_epoch_receiver("$default", "0", 15, prefetch=10)) + receivers.append(client.create_receiver("$default", "0", prefetch=10)) try: - await client.run_async() outputs = await asyncio.gather( pump(receivers[0]), pump(receivers[1]), return_exceptions=True) assert isinstance(outputs[1], EventHubError) assert isinstance(outputs[0], int) and outputs[0] == 1 - except: - raise finally: - await client.stop_async() + for r in receivers: + await r.close() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_batch_with_app_prop_async(connstr_senders): - pytest.skip("Waiting on uAMQP release") + #pytest.skip("Waiting on uAMQP release") connection_str, senders = connstr_senders + app_prop_key = "raw_prop" + app_prop_value = "raw_value" + app_prop = {app_prop_key: app_prop_value} def batched(): for i in range(10): - yield "Event Data {}".format(i) + ed = EventData("Event Data {}".format(i)) + ed.application_properties = app_prop + yield ed for i in range(10, 20): - yield EventData("Event Data {}".format(i)) - - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - receiver = client.add_async_receiver("$default", "0", prefetch=500, offset=Offset('@latest')) - try: - await client.run_async() + ed = EventData("Event Data {}".format(i)) + ed.application_properties = app_prop + yield ed + client = EventHubClient.from_connection_string(connection_str, debug=False) + receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 - app_prop_key = "raw_prop" - app_prop_value = "raw_value" - batch_app_prop = {app_prop_key:app_prop_value} - batch_event = EventData(batch=batched()) - batch_event.application_properties = batch_app_prop - - senders[0].send(batch_event) + senders[0].send_batch(batched()) await asyncio.sleep(1) @@ -344,7 +296,3 @@ def batched(): assert list(message.body)[0] == "Event Data {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) - except: - raise - finally: - await client.stop_async() diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py index 917d7cde3b63..b17dad9cae2c 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py @@ -11,16 +11,16 @@ import time import json -from azure.eventhub import EventData, EventHubClientAsync +from azure.eventhub import EventData +from azure.eventhub.aio import EventHubClient @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_partition_key_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() data_val = 0 for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: @@ -30,7 +30,6 @@ async def test_send_with_partition_key_async(connstr_receivers): data.partition_key = partition_key data_val += 1 await sender.send(data) - await client.stop_async() found_partition_keys = {} for index, partition in enumerate(receivers): @@ -47,15 +46,10 @@ async def test_send_with_partition_key_async(connstr_receivers): @pytest.mark.asyncio async def test_send_and_receive_zero_length_body_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() + async with sender: await sender.send(EventData("")) - except: - raise - finally: - await client.stop_async() received = [] for r in receivers: @@ -69,15 +63,10 @@ async def test_send_and_receive_zero_length_body_async(connstr_receivers): @pytest.mark.asyncio async def test_send_single_event_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() + async with sender: await sender.send(EventData(b"A single event")) - except: - raise - finally: - await client.stop_async() received = [] for r in receivers: @@ -93,17 +82,12 @@ async def test_send_batch_async(connstr_receivers): connection_str, receivers = connstr_receivers def batched(): for i in range(10): - yield "Event number {}".format(i) + yield EventData("Event number {}".format(i)) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - try: - await client.run_async() - await sender.send(EventData(batch=batched())) - except: - raise - finally: - await client.stop_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() + async with sender: + await sender.send_batch(batched()) time.sleep(1) received = [] @@ -119,15 +103,10 @@ def batched(): @pytest.mark.asyncio async def test_send_partition_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender(partition="1") - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender(partition="1") + async with sender: await sender.send(EventData(b"Data")) - except: - raise - finally: - await client.stop_async() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 @@ -139,16 +118,11 @@ async def test_send_partition_async(connstr_receivers): @pytest.mark.asyncio async def test_send_non_ascii_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender(partition="0") - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender(partition="0") + async with sender: await sender.send(EventData("é,è,à,ù,â,ê,î,ô,û")) await sender.send(EventData(json.dumps({"foo": "漢字"}))) - except: - raise - finally: - await client.stop_async() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 2 @@ -160,19 +134,15 @@ async def test_send_non_ascii_async(connstr_receivers): @pytest.mark.asyncio async def test_send_partition_batch_async(connstr_receivers): connection_str, receivers = connstr_receivers + def batched(): for i in range(10): - yield "Event number {}".format(i) + yield EventData("Event number {}".format(i)) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender(partition="1") - try: - await client.run_async() - await sender.send(EventData(batch=batched())) - except: - raise - finally: - await client.stop_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender(partition="1") + async with sender: + await sender.send_batch(batched()) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 @@ -184,15 +154,10 @@ def batched(): @pytest.mark.asyncio async def test_send_array_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() + async with sender: await sender.send(EventData([b"A", b"B", b"C"])) - except: - raise - finally: - await client.stop_async() received = [] for r in receivers: @@ -206,17 +171,12 @@ async def test_send_array_async(connstr_receivers): @pytest.mark.asyncio async def test_send_multiple_clients_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender_0 = client.add_async_sender(partition="0") - sender_1 = client.add_async_sender(partition="1") - try: - await client.run_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender_0 = client.create_sender(partition="0") + sender_1 = client.create_sender(partition="1") + async with sender_0 and sender_1: await sender_0.send(EventData(b"Message 0")) await sender_1.send(EventData(b"Message 1")) - except: - raise - finally: - await client.stop_async() partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 1 @@ -227,31 +187,26 @@ async def test_send_multiple_clients_async(connstr_receivers): @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_batch_with_app_prop_async(connstr_receivers): - pytest.skip("Waiting on uAMQP release") + # pytest.skip("Waiting on uAMQP release") connection_str, receivers = connstr_receivers + app_prop_key = "raw_prop" + app_prop_value = "raw_value" + app_prop = {app_prop_key: app_prop_value} def batched(): for i in range(10): + ed = EventData("Event number {}".format(i)) + ed.application_properties = app_prop yield "Event number {}".format(i) for i in range(10, 20): - yield EventData("Event number {}".format(i)) + ed = EventData("Event number {}".format(i)) + ed.application_properties = app_prop + yield "Event number {}".format(i) - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() - try: - await client.run_async() - - app_prop_key = "raw_prop" - app_prop_value = "raw_value" - batch_app_prop = {app_prop_key:app_prop_value} - batch_event = EventData(batch=batched()) - batch_event.application_properties = batch_app_prop - - await sender.send(batch_event) - except: - raise - finally: - await client.stop_async() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() + async with sender: + await sender.send_batch(batched()) time.sleep(1) From 9a671c3d1cbee79c7265c5f72788743c3ef05c4e Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 20 May 2019 09:53:23 -0700 Subject: [PATCH 10/49] Use uamqp 1.2 --- sdk/eventhub/azure-eventhubs/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/setup.py b/sdk/eventhub/azure-eventhubs/setup.py index 034bdc14c115..1fdb12ec33d8 100644 --- a/sdk/eventhub/azure-eventhubs/setup.py +++ b/sdk/eventhub/azure-eventhubs/setup.py @@ -62,7 +62,7 @@ "tests", "tests.asynctests"]), install_requires=[ - 'uamqp~=1.1.0', + 'uamqp~=1.2.0', 'msrestazure>=0.4.32,<2.0.0', 'azure-common~=1.1', 'azure-storage-blob~=1.3' From 1ab3e1d5df483ba9769f096318e267fdc7452712 Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 20 May 2019 16:30:31 -0700 Subject: [PATCH 11/49] Resolve code review feedback --- .../azure/eventhub/aio/receiver_async.py | 44 ++++++++- .../azure/eventhub/aio/sender_async.py | 41 ++++---- .../azure-eventhubs/azure/eventhub/common.py | 6 +- .../azure/eventhub/configuration.py | 2 +- .../azure/eventhub/receiver.py | 94 +++++++++---------- .../azure-eventhubs/azure/eventhub/sender.py | 44 +++++---- sdk/eventhub/azure-eventhubs/conftest.py | 2 - 7 files changed, 143 insertions(+), 90 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index d6d17713afc7..aafe4c8dcd20 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -58,7 +58,7 @@ def __init__( # pylint: disable=super-init-not-called self.epoch = epoch self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect - self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) self.reconnect_backoff = 1 self.redirected = None self.error = None @@ -83,6 +83,48 @@ def __init__( # pylint: disable=super-init-not-called properties=self.client.create_properties(), loop=self.loop) + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close(exc_val) + + def __aiter__(self): + self.messages_iter = self._handler.receive_messages_iter_async() + return self + + async def __anext__(self): + while True: + try: + message = await self.messages_iter.__anext__() + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close(exception=error) + raise error + async def open(self): """ Open the Receiver using the supplied conneciton. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 01245b64e7d5..751106d7087e 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -12,7 +12,7 @@ from azure.eventhub import MessageSendResult from azure.eventhub import EventHubError -from azure.eventhub.common import _error_handler, BatchSendEventData +from azure.eventhub.common import _error_handler, _BatchSendEventData log = logging.getLogger(__name__) @@ -33,7 +33,7 @@ class Sender(object): def __init__( # pylint: disable=super-init-not-called self, client, target, partition=None, send_timeout=60, - keep_alive=30, auto_reconnect=False, loop=None): + keep_alive=None, auto_reconnect=False, loop=None): """ Instantiate an EventHub event SenderAsync handler. @@ -63,7 +63,7 @@ def __init__( # pylint: disable=super-init-not-called self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect self.timeout = send_timeout - self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) self.reconnect_backoff = 1 self.name = "EHSender-{}".format(uuid.uuid4()) self.redirected = None @@ -84,6 +84,12 @@ def __init__( # pylint: disable=super-init-not-called self._outcome = None self._condition = None + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close(exc_val) + async def open(self): """ Open the Sender using the supplied conneciton. @@ -302,17 +308,24 @@ async def send_batch(self, batch_event_data): """ if self.error: raise self.error - event_data_list = list(batch_event_data) - if len(event_data_list) == 0: - raise ValueError("batch_event_data must not be empty") - for i in range(1, len(event_data_list)): - if event_data_list[i].partition_key != event_data_list[i-1].partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") - wrapper_event_data = BatchSendEventData(event_data_list) + + def verify_partition(ed_iter): + try: + ed = next(ed_iter) + partition_key = ed.partition_key + yield ed + except StopIteration: + raise ValueError("batch_event_data must not be empty") + for ed in ed_iter: + if ed.partition_key != partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + yield ed + + wrapper_event_data = _BatchSendEventData(verify_partition(batch_event_data)) wrapper_event_data.message.on_send_complete = self._on_outcome return await self._send_event_data(wrapper_event_data) - async def wait(self): + async def send_pending_messages(self): """ Wait until all transferred events have been sent. """ @@ -357,12 +370,6 @@ def _on_outcome(self, outcome, condition): self._outcome = outcome self._condition = condition - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close(exc_val) - @staticmethod def _error(outcome, condition): return None if outcome == MessageSendResult.Ok else EventHubError(outcome, condition) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index ba2b910a7f1a..03a602616c4d 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -261,7 +261,7 @@ def encode_message(self): return self.message.encode_message() -class BatchSendEventData(EventData): +class _BatchSendEventData(EventData): def __init__(self, batch_event_data): # TODO: rethink if to_device should be included in self.message = BatchMessage(data=batch_event_data, multi_messages=True, properties=None) @@ -312,11 +312,11 @@ def selector(self): return ("amqp.annotation.x-opt-offset {} '{}'".format(operator, self.value)).encode('utf-8') @staticmethod - def from_start_of_sream(): + def from_start_of_stream(): return EventPosition("-1") @staticmethod - def from_end_of_sream(): + def from_end_of_stream(): return EventPosition("@latest") @staticmethod diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py index b68df67a5362..2d7a7be57638 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -13,7 +13,7 @@ def __init__(self, **kwargs): self.network_tracing = kwargs.get("debug", False) self.http_proxy = kwargs.get("http_proxy") self.auto_reconnect = kwargs.get("auto_reconnect", False) - self.keep_alive = kwargs.get("keep_alive", 1) + self.keep_alive = kwargs.get("keep_alive", 0) self.transport_type = TransportType.AmqpOverWebsocket if self.http_proxy \ else kwargs.get("transport_type", TransportType.Amqp) self.auth_timeout = kwargs.get("auth_timeout", 60) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index ac593e7faeee..4577d0332af5 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -56,7 +56,7 @@ def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_a self.epoch = epoch self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect - self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) self.reconnect_backoff = 1 self.properties = None self.redirected = None @@ -80,6 +80,52 @@ def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_a client_name=self.name, properties=self.client.create_properties()) + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close(exc_val) + + def __iter__(self): + if not self.running: + self.open() + if not self.iter_started: + self.iter_started = True + self.messages_iter = self._handler.receive_messages_iter() + return self + + def __next__(self): + while True: + try: + message = next(self.messages_iter) + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + self.close(exception=error) + raise error + def open(self): """ Open the Receiver using the supplied conneciton. @@ -328,49 +374,3 @@ def receive(self, max_batch_size=None, timeout=None): error = EventHubError("Receive failed: {}".format(e)) self.close(exception=error) raise error - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close(exc_val) - - def __iter__(self): - if not self.running: - self.open() - if not self.iter_started: - self.iter_started = True - self.messages_iter = self._handler.receive_messages_iter() - return self - - def __next__(self): - while True: - try: - message = next(self.messages_iter) - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") - self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - self.close(exception=error) - raise error diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index bd84ace039c3..7c82a728adf9 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -12,7 +12,7 @@ from uamqp import SendClient from uamqp.constants import MessageSendResult -from azure.eventhub.common import EventHubError, EventData, BatchSendEventData, _error_handler +from azure.eventhub.common import EventHubError, EventData, _BatchSendEventData, _error_handler log = logging.getLogger(__name__) @@ -31,7 +31,7 @@ class Sender(object): """ - def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=30, auto_reconnect=True): + def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=True): """ Instantiate an EventHub event Sender handler. @@ -61,8 +61,7 @@ def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=3 self.error = None self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect - # max_retries = client.config.retry_policy.max_retries - self.retry_policy = errors.ErrorPolicy(max_retries=3, on_error=_error_handler) + self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) self.reconnect_backoff = 1 self.name = "EHSender-{}".format(uuid.uuid4()) if partition: @@ -80,6 +79,12 @@ def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=3 self._outcome = None self._condition = None + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close(exc_val) + def open(self): """ Open the Sender using the supplied conneciton. @@ -306,17 +311,24 @@ def send_batch(self, batch_event_data): """ if self.error: raise self.error - event_data_list = list(batch_event_data) - if len(event_data_list) == 0: - raise ValueError("batch_event_data must not be empty") - for i in range(1, len(event_data_list)): - if event_data_list[i].partition_key != event_data_list[i-1].partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") - wrapper_event_data = BatchSendEventData(event_data_list) + + def verify_partition(ed_iter): + try: + ed = next(ed_iter) + partition_key = ed.partition_key + yield ed + except StopIteration: + raise ValueError("batch_event_data must not be empty") + for ed in ed_iter: + if ed.partition_key != partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + yield ed + + wrapper_event_data = _BatchSendEventData(verify_partition(batch_event_data)) wrapper_event_data.message.on_send_complete = self._on_outcome return self._send_event_data(wrapper_event_data) - def transfer(self, event_data, callback=None): + def queue_message(self, event_data, callback=None): """ Transfers an event data and notifies the callback when the operation is done. @@ -345,7 +357,7 @@ def transfer(self, event_data, callback=None): event_data.message.on_send_complete = lambda o, c: callback(o, Sender._error(o, c)) self._handler.queue_message(event_data.message) - def wait(self): + def send_pending_messages(self): """ Wait until all transferred events have been sent. @@ -399,12 +411,6 @@ def _on_outcome(self, outcome, condition): self._outcome = outcome self._condition = condition - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self.close(exc_val) - @staticmethod def _error(outcome, condition): return None if outcome == MessageSendResult.Ok else EventHubError(outcome, condition) diff --git a/sdk/eventhub/azure-eventhubs/conftest.py b/sdk/eventhub/azure-eventhubs/conftest.py index bf9e6ab77c3b..ce6f83adc6af 100644 --- a/sdk/eventhub/azure-eventhubs/conftest.py +++ b/sdk/eventhub/azure-eventhubs/conftest.py @@ -174,8 +174,6 @@ def connstr_receivers(connection_str): receiver = client.create_receiver("$default", p, prefetch=500, offset=EventPosition("@latest")) receivers.append(receiver) receiver.receive(timeout=1) - for r in receivers: - r.receive(timeout=1) yield connection_str, receivers for r in receivers: From 39320415678870ef1d283a243e58c96ea9ddd1c9 Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 20 May 2019 16:36:08 -0700 Subject: [PATCH 12/49] add queue_message to async sender --- .../azure/eventhub/aio/sender_async.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 751106d7087e..2d9c741c5f29 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -325,6 +325,35 @@ def verify_partition(ed_iter): wrapper_event_data.message.on_send_complete = self._on_outcome return await self._send_event_data(wrapper_event_data) + def queue_message(self, event_data, callback=None): + """ + Transfers an event data and notifies the callback when the operation is done. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :param callback: Callback to be run once the message has been send. + This must be a function that accepts two arguments. + :type callback: callable[~uamqp.constants.MessageSendResult, ~azure.eventhub.common.EventHubError] + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START eventhub_client_transfer] + :end-before: [END eventhub_client_transfer] + :language: python + :dedent: 4 + :caption: Transfers an event data and notifies the callback when the operation is done. + + """ + if self.error: + raise self.error + if not self.running: + self.open() + if event_data.partition_key and self.partition: + raise ValueError("EventData partition key cannot be used with a partition sender.") + if callback: + event_data.message.on_send_complete = lambda o, c: callback(o, Sender._error(o, c)) + self._handler.queue_message(event_data.message) + async def send_pending_messages(self): """ Wait until all transferred events have been sent. From 36423f4829e0ff9f8b76b7d3fea93bb7ad790413 Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 20 May 2019 17:19:49 -0700 Subject: [PATCH 13/49] send_batch receives both list and iterator --- .../azure-eventhubs/azure/eventhub/aio/sender_async.py | 3 ++- sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 2d9c741c5f29..0ef46d519579 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -309,7 +309,8 @@ async def send_batch(self, batch_event_data): if self.error: raise self.error - def verify_partition(ed_iter): + def verify_partition(event_datas): + ed_iter = iter(event_datas) try: ed = next(ed_iter) partition_key = ed.partition_key diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 7c82a728adf9..ab113eac1c28 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -312,7 +312,8 @@ def send_batch(self, batch_event_data): if self.error: raise self.error - def verify_partition(ed_iter): + def verify_partition(event_datas): + ed_iter = iter(event_datas) try: ed = next(ed_iter) partition_key = ed.partition_key From c680b6aa563d0e6434e947dc7a39b3857457c1cb Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 23 May 2019 15:59:10 -0700 Subject: [PATCH 14/49] Update after adp review --- .../azure/eventhub/aio/receiver_async.py | 8 +++ .../azure-eventhubs/azure/eventhub/client.py | 69 ++++++++++++++++--- .../azure/eventhub/client_abstract.py | 6 +- .../azure-eventhubs/azure/eventhub/common.py | 2 +- .../azure/eventhub/configuration.py | 6 +- .../azure/eventhub/receiver.py | 38 ++-------- .../azure-eventhubs/azure/eventhub/sender.py | 21 ++---- 7 files changed, 87 insertions(+), 63 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index aafe4c8dcd20..2cb60893e1e6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -54,6 +54,7 @@ def __init__( # pylint: disable=super-init-not-called self.client = client self.source = source self.offset = offset + self.iter_started = False self.prefetch = prefetch self.epoch = epoch self.keep_alive = keep_alive @@ -346,6 +347,8 @@ def __aiter__(self): return self async def __anext__(self): + if not self.running: + await self.open() while True: try: message = await self.messages_iter.__anext__() @@ -371,6 +374,11 @@ async def __anext__(self): error = EventHubError(str(shutdown), shutdown) await self.close(exception=error) raise error + except StopAsyncIteration: + raise + except asyncio.CancelledError: + # TODO: stop self.message_iter + raise except Exception as e: log.info("Unexpected error occurred (%r). Shutting down.", e) error = EventHubError("Receive failed: {}".format(e)) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 58cd975e4baf..9c231b351677 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -17,7 +17,7 @@ from urllib.parse import urlparse, unquote_plus, urlencode, quote_plus import uamqp -from uamqp import Message +from uamqp import Message, AMQPClient from uamqp import authentication from uamqp import constants @@ -88,7 +88,7 @@ def _create_auth(self, username=None, password=None): return authentication.SASTokenAuth.from_shared_access_key( self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - def get_eventhub_information(self): + def get_properties(self): """ Get details on the specified EventHub. Keys in the details dictionary include: @@ -118,8 +118,8 @@ def get_eventhub_information(self): eh_info = response.get_data() output = {} if eh_info: - output['name'] = eh_info[b'name'].decode('utf-8') - output['type'] = eh_info[b'type'].decode('utf-8') + output['path'] = eh_info[b'name'].decode('utf-8') + # output['type'] = eh_info[b'type'].decode('utf-8') output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000) output['partition_count'] = eh_info[b'partition_count'] output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']] @@ -127,8 +127,57 @@ def get_eventhub_information(self): finally: mgmt_client.close() + def get_partition_properties(self, partition): + """ + Get information on the specified partition async. + Keys in the details dictionary include: + + -'name' + -'type' + -'partition' + -'begin_sequence_number' + -'last_enqueued_sequence_number' + -'last_enqueued_offset' + -'last_enqueued_time_utc' + -'is_partition_empty' + + :param partition: The target partition id. + :type partition: str + :rtype: dict + """ + alt_creds = { + "username": self._auth_config.get("iot_username"), + "password": self._auth_config.get("iot_password")} + try: + mgmt_auth = self._create_auth(**alt_creds) + mgmt_client = AMQPClient(self.mgmt_target, auth=mgmt_auth, debug=self.debug) + mgmt_client.open() + mgmt_msg = Message(application_properties={'name': self.eh_name, + 'partition': partition}) + response = mgmt_client.mgmt_request( + mgmt_msg, + constants.READ_OPERATION, + op_type=b'com.microsoft:partition', + status_code_field=b'status-code', + description_fields=b'status-description') + partition_info = response.get_data() + output = {} + if partition_info: + output['event_hub_path'] = partition_info[b'name'].decode('utf-8') + # output['type'] = partition_info[b'type'].decode('utf-8') + output['id'] = partition_info[b'partition'].decode('utf-8') + output['beginning_sequence_number'] = partition_info[b'begin_sequence_number'] + output['last_enqueued_sequence_number'] = partition_info[b'last_enqueued_sequence_number'] + output['last_enqueued_offset'] = partition_info[b'last_enqueued_offset'].decode('utf-8') + output['last_enqueued_time_utc'] = datetime.datetime.utcfromtimestamp( + float(partition_info[b'last_enqueued_time_utc'] / 1000)) + output['is_empty'] = partition_info[b'is_partition_empty'] + return output + finally: + mgmt_client.close() + def create_receiver( - self, consumer_group, partition, offset=None, epoch=None, operation=None, + self, consumer_group, partition, event_position=None, epoch=None, operation=None, prefetch=None, keep_alive=None, auto_reconnect=None, @@ -166,13 +215,17 @@ def create_receiver( source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) + self, source_url, event_position=event_position, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) return handler def create_epoch_receiver( self, consumer_group, partition, epoch, prefetch=300, - operation=None): - return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, operation=operation) + keep_alive=None, + auto_reconnect=None, + operation=None + ): + return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, + keep_alive=keep_alive, auto_reconnect=auto_reconnect, operation=operation) def create_sender(self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None): """ diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 1435d15bd2be..a8aca2f70cf2 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -143,8 +143,8 @@ def __init__( username = username or url_username url_password = unquote_plus(self.address.password) if self.address.password else None password = password or url_password - if (not username or not password) and not sas_token: - raise ValueError("Please supply either username and password, or a SAS token") + if (not username or not password) and not sas_token and not aad_credential: + raise ValueError("Please supply any of username and password, or a SAS token, or an AAD credential") self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) self._auth_config = {'username': username, 'password': password} self.get_auth = functools.partial(self._create_auth) @@ -266,7 +266,7 @@ def from_iothub_connection_string(cls, conn_str, **kwargs): return client @classmethod - def from_aad_credential(cls, address, aad_credential, eventhub=None, **kwargs): + def from_azure_identity(cls, address, aad_credential, eventhub=None, **kwargs): address = _build_uri(address, eventhub) return cls(address, aad_credential=aad_credential, **kwargs) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 03a602616c4d..fc70107f33cc 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -294,7 +294,7 @@ def __init__(self, value, inclusive=False): :param inclusive: Whether to include the supplied value as the start point. :type inclusive: bool """ - self.value = value + self.value = value if value else "-1" self.inclusive = inclusive def selector(self): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py index 2d7a7be57638..15b1d69550c3 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -13,9 +13,11 @@ def __init__(self, **kwargs): self.network_tracing = kwargs.get("debug", False) self.http_proxy = kwargs.get("http_proxy") self.auto_reconnect = kwargs.get("auto_reconnect", False) - self.keep_alive = kwargs.get("keep_alive", 0) + self.keep_alive = kwargs.get("keep_alive", 0) # 0 or None means to not keep alive self.transport_type = TransportType.AmqpOverWebsocket if self.http_proxy \ else kwargs.get("transport_type", TransportType.Amqp) self.auth_timeout = kwargs.get("auth_timeout", 60) - self.prefetch = kwargs.get("prefetch") + self.prefetch = kwargs.get("prefetch", 300) + self.max_batch_size = kwargs.get("max_batch_size") + self.receive_timeout = kwargs.get("receive_timeout", 0) self.send_timeout = kwargs.get("send_timeout", 60) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 4577d0332af5..f5faafdb9f95 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -33,7 +33,7 @@ class Receiver(object): timeout = 0 _epoch = b'com.microsoft:epoch' - def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_alive=None, auto_reconnect=True): + def __init__(self, client, source, event_position=None, prefetch=300, epoch=None, keep_alive=None, auto_reconnect=False): """ Instantiate a receiver. @@ -50,7 +50,7 @@ def __init__(self, client, source, offset=None, prefetch=300, epoch=None, keep_a self.running = False self.client = client self.source = source - self.offset = offset + self.offset = event_position self.iter_started = False self.prefetch = prefetch self.epoch = epoch @@ -120,6 +120,8 @@ def __next__(self): error = EventHubError(str(shutdown), shutdown) self.close(exception=error) raise error + except StopIteration: + raise except Exception as e: log.info("Unexpected error occurred (%r). Shutting down.", e) error = EventHubError("Receive failed: {}".format(e)) @@ -235,38 +237,6 @@ def reconnect(self): while not self._reconnect(): time.sleep(self.reconnect_backoff) - def get_handler_state(self): - """ - Get the state of the underlying handler with regards to start - up processes. - - :rtype: ~uamqp.constants.MessageReceiverState - """ - # pylint: disable=protected-access - return self._handler._message_receiver.get_state() - - def has_started(self): - """ - Whether the handler has completed all start up processes such as - establishing the connection, session, link and authentication, and - is not ready to process messages. - **This function is now deprecated and will be removed in v2.0+.** - - :rtype: bool - """ - # pylint: disable=protected-access - timeout = False - auth_in_progress = False - if self._handler._connection.cbs: - timeout, auth_in_progress = self._handler._auth.handle_token() - if timeout: - raise EventHubError("Authorization timeout.") - if auth_in_progress: - return False - if not self._handler._client_ready(): - return False - return True - def close(self, exception=None): """ Close down the handler. If the handler has already closed, diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index ab113eac1c28..a6bc3fe8b128 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -31,7 +31,7 @@ class Sender(object): """ - def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=True): + def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=False): """ Instantiate an EventHub event Sender handler. @@ -178,16 +178,6 @@ def reconnect(self): while not self._reconnect(): time.sleep(self.reconnect_backoff) - def get_handler_state(self): - """ - Get the state of the underlying handler with regards to start - up processes. - - :rtype: ~uamqp.constants.MessageSenderState - """ - # pylint: disable=protected-access - return self._handler._message_sender.get_state() - def close(self, exception=None): """ Close down the handler. If the handler has already closed, @@ -259,7 +249,7 @@ def _send_event_data(self, event_data): raise error else: return self._outcome - + ''' def send(self, event_data): """ Sends an event data and blocks until acknowledgement is @@ -286,9 +276,10 @@ def send(self, event_data): if event_data.partition_key and self.partition: raise ValueError("EventData partition key cannot be used with a partition sender.") event_data.message.on_send_complete = self._on_outcome - return self._send_event_data(event_data) + self._send_event_data(event_data) + ''' - def send_batch(self, batch_event_data): + def send(self, batch_event_data): """ Sends an event data and blocks until acknowledgement is received or operation times out. @@ -327,7 +318,7 @@ def verify_partition(event_datas): wrapper_event_data = _BatchSendEventData(verify_partition(batch_event_data)) wrapper_event_data.message.on_send_complete = self._on_outcome - return self._send_event_data(wrapper_event_data) + self._send_event_data(wrapper_event_data) def queue_message(self, event_data, callback=None): """ From 805b8360f1a723d54dd56adb7bc67079140f12c9 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 24 May 2019 15:07:37 -0700 Subject: [PATCH 15/49] send accepts EventData, list, iteratable --- .../azure/eventhub/__init__.py | 1 + .../azure-eventhubs/azure/eventhub/client.py | 14 +++----- .../azure/eventhub/client_abstract.py | 8 ----- .../azure-eventhubs/azure/eventhub/sender.py | 35 ++++++++++--------- 4 files changed, 24 insertions(+), 34 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index e2bcc43ed877..8f6a9e97cfa9 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -13,6 +13,7 @@ from uamqp.constants import TransportType __all__ = [ + __version__, "EventData", "EventHubError", "EventPosition", diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 9c231b351677..11a8d73154ec 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -127,6 +127,9 @@ def get_properties(self): finally: mgmt_client.close() + def get_partition_ids(self): + return self.get_properties()['partition_ids'] + def get_partition_properties(self, partition): """ Get information on the specified partition async. @@ -177,7 +180,7 @@ def get_partition_properties(self, partition): mgmt_client.close() def create_receiver( - self, consumer_group, partition, event_position=None, epoch=None, operation=None, + self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, keep_alive=None, auto_reconnect=None, @@ -218,15 +221,6 @@ def create_receiver( self, source_url, event_position=event_position, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) return handler - def create_epoch_receiver( - self, consumer_group, partition, epoch, prefetch=300, - keep_alive=None, - auto_reconnect=None, - operation=None - ): - return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, - keep_alive=keep_alive, auto_reconnect=auto_reconnect, operation=operation) - def create_sender(self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None): """ Add a sender to the client to send EventData object to an EventHub. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index a8aca2f70cf2..90c9553d34a3 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -5,7 +5,6 @@ from __future__ import unicode_literals import logging -import datetime import sys import uuid import time @@ -17,15 +16,8 @@ except ImportError: from urllib.parse import urlparse, unquote_plus, urlencode, quote_plus -import uamqp -from uamqp import Message -from uamqp import authentication -from uamqp import constants from azure.eventhub import __version__ -from azure.eventhub.sender import Sender -from azure.eventhub.receiver import Receiver -from azure.eventhub.common import EventHubError, parse_sas_token from azure.eventhub.configuration import Configuration log = logging.getLogger(__name__) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index a6bc3fe8b128..80464354f366 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -279,7 +279,21 @@ def send(self, event_data): self._send_event_data(event_data) ''' - def send(self, batch_event_data): + @staticmethod + def _verify_partition(event_datas): + ed_iter = iter(event_datas) + try: + ed = next(ed_iter) + partition_key = ed.partition_key + yield ed + except StopIteration: + raise ValueError("batch_event_data must not be empty") + for ed in ed_iter: + if ed.partition_key != partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + yield ed + + def send(self, event_data): """ Sends an event data and blocks until acknowledgement is received or operation times out. @@ -302,21 +316,10 @@ def send(self, batch_event_data): """ if self.error: raise self.error - - def verify_partition(event_datas): - ed_iter = iter(event_datas) - try: - ed = next(ed_iter) - partition_key = ed.partition_key - yield ed - except StopIteration: - raise ValueError("batch_event_data must not be empty") - for ed in ed_iter: - if ed.partition_key != partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") - yield ed - - wrapper_event_data = _BatchSendEventData(verify_partition(batch_event_data)) + if isinstance(event_data, EventData): + wrapper_event_data = event_data + else: + wrapper_event_data = _BatchSendEventData(self._verify_partition(event_data)) wrapper_event_data.message.on_send_complete = self._on_outcome self._send_event_data(wrapper_event_data) From 2140eec4babf6e087a8354947a8fdf6eb201547f Mon Sep 17 00:00:00 2001 From: Yunhao Ling <47871814+yunhaoling@users.noreply.github.com> Date: Fri, 24 May 2019 15:48:21 -0700 Subject: [PATCH 16/49] Event Hub Track 2 (#5) * Initial commit * Initial commit * Initial commit * event hub client * Update README.md * Update README.md Fix typos * Memory leak * Support timestamp filter * Support timestamp filter * Update README.md * Add sender and refactor * Added abstract classes Todo - Migrate Base Class Wireframes - Migrate Azure Classes * First draft of class wires directly ported from .net (might be some minor gaps) * send example * Set allowed sasl mechs * Remove client.py * Receiver update * Add dummy send api * logging updates * Error handling, reconnect and logging * Add app properties to event data * unbind transport on connection close * timestamp filter on py2 * module version * Reconnect once when link/session/connection close * Add SessionPolicy * Add client info * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * run client in non-blocking mode * Added unit testing * Implemented the following functionality - Azure_storage_checkpoint_manager - AzureBlobLease isExpired Todo Implement partition manager Implement partition context Test full implementation * Implemented Processing of First Epoh Todo - Fix lease bug that is breaking subsequent epochs * Changes - Completed End to End EPH Flow - Removed storage dependancy on downloading full blob to check lease state Todo - Add thread and queue for checking lease state and other storage operations - Ensure eventhub client shuts down properly - Find way to update partition pumps without restarting them - Other optimizations * Move examples out * Changes - Added thread pool executor to enable conncurent execution of partitions - Removed partition pump dependency on max_batch Todo - Ensure eventhub client shuts down properly (This is causing errors) - Add thread pool for making checkpoint code conccurent - Add thread and queue for checking lease state and other storage operations to enable async - Find way to reassign active partition pumps without restarting them - Other optimizations * Add async receive * Changes - Added logs - Fixed error causing client to prematurely shutdown * Manual link flow control for async receive * Workaround for stuck async receiver * Local variable names * Changes - Optimized logging and comments Todo - Add concurecny mechanim for azure storage - Depricate partition pump event queue and update to latest version of the client * Create Dockerfile * Stuck async receiver * credit keeps increasing in async receiver * Changes - Added asnyc event hub client support - Optimized logging and comments Todo - Add concurecny mechanim for azure storage * Updated docker file as requested * Added EPH example * Fix hardcoded HTTP header * Made suggested changes * Bug fix - Fixed event loop bugs. In windows eventloop is thread dependent but in ubuntu the eventloop is threadsafe so you need to differentiate the thread specific eventloop from the host one. * Updated loop naming convention to be consistent * Added option to pass asyncio event_loop to eph * Updated docker file * Fixed critical bug with partition manager and aquirec mechanisiims Todo : Identitfy and fix remaining bug that is causing all pumps to shut down when a second host starts * Bug fixes - Fixed bug where closing a pump closed a host - Fixed bug where error partitioned were not removed - Fixed bug where leases were renewed at an incorrect interval * Updated file headers Removed author reference * - Fixed bug in eph example that caused host to terminate prematurely - Made the lease renewal and checkpoint creation "multithreaded" * Increase the size of the connection pool The default connection pool size was too small for scenarios where multiple partitions were handled by one EventProcessorHost. If the amount of partitions handled is large, we might end up doing very many connections at the same time due to the multi-threaded blob-handling. For this reason, you might hit the OS limits that restrict the number of open files per process that in MacOS is not very big. This can be worked around with something like: `ulimit -n 2560` * Decrease info logging verbosity * added ability to toggle pump shutdown when all messages on a pump are processed. * Install also eventhubsprocessor * Default to keeping the pumps It is more optimal to keep the pumps alive even if there are no messages so that it is faster to pickup when messages start to arrive. * Pipe and event injector for Windows * Event injector updates * EHClient refactoring. EHClient leaks. Sender part 1. * Send support * ren eventhubsprocessor eventprocessorhost * Changes - Added event hub config to simplify installation story * Changes - Added optional eventprocessor_params for passing context to the event processor - Made the storage manager mandatatory * Fix memory leaks * logging * Fix: 1. process crash due to race in client stop and connection remote close. 2. handle client close in async receiver. 3. fail pending sends when sender is closed. 4. some debug logging. * tests * test: recv from multiple partitions * test utility * logging update * Support callback based send for high throughput * Workaroud memory issue in proton.reactor.ApplicationEvent * renamed eventprocessor to eventprocessorhost for consistency * updated docker file * fixed typo in url * Added amqp port to address * Updated sample documentation since url is auto encoded by config * Updated docs * Implement timeout for send * Async sender and example * Close injector pipe * Use send timer to also check queued messages * Add partition pump loop to partition_context This gives the EventProcessor access to the partition_pump loop object. This way if One desires to run synchronous code inside process_events_async one can utilize the loop object to run the synchronous code using await context.pump_loop.run_in_executor(None, bla) * Include details in send error * Release deliveries when sender is closed * added validation to unquoted sas key * added support for custom eventhub client prefetch size * Update README.md * Update README.md * Added Docker instructions and fixed Dockerfile (#18) * Removed Dockerfile from the main folder and fixed Dockerfile example * Added build and run Dockerfile documentation * Update Readme * Removed rm qpid-proton folder * Removed /usr/share copy * Disallow a sender/receiver to be registered more than once * Make everything async in EPH I have removed all usage of threads thoroughout the code. Using threads to run pumps etc. Causes async code written into the event-processor to become caotic (you need to follow which loop is currently being used in the call to prevent loops not being found or using the wrong loop (There is the main loop and then loops that are created inside threads) Things become caotic when the event processor is being called by objects that run under different loops. So, no Threading except usage of asyncio run_in_executor. This is done mostly for azure blob api calls. Also changed the bla_async methods to not block. this way, when calling open_async for the the event-processor-host, the command will exit once the EPH is started. Due to the above, see the edited example/eph.py where I added a monitor that makes sure the EPH is still running (Could be replaced by loop.run_forever()) in the example file I have also incorporated a test class for gracefully killing the EPH after 30 seconds. this works, nevertheless takes a while to close as we are waiting for timeouts on the eventhubs connections. * Started removing proton code * Removed most of proton _impl * Removed more code * Working sender * Updates to sender * Added some tests/samples * Some progress on clients * Fixed samples * Added azure namespace * #25 Partition key cannot be set for events * Updated version * Updated README * Renamed package to eventhub * Started EPH modifications * Updated imports * Fixed target urls * Updated logging * Updated async message receive * updated test imports * Added mgmt call to get eh info * Updated samples * Updated receive test * Added send and receive test clients * Updated uamqp dependency * Merged updates from dev * Fixed typos * Updated EPH sample * Started docstrings * Converted tests to pytest * Updates to batch receive * Started adding docstrings * More docstrings * bumped version * Started porting test suite * More tests and improvements * Moved eph tests * Some sample cleanup * Some test updates * Some test restructure * Docstring cleanup * Fixed some merge artifacts * Fixed formatting error * Removed delivery count * Nested package directory * Support custom URL suffix * Support custom URL suffix * Support for EventData device ID * Reverted nested directory * Updated release notes * Workaround for partitionkey * Finished partition key workaround * beta2 fixes * pylint fixes * Trigger CI * Test fixes * Added package manifest * Added warning for Python 2.7 support Support for issues #36 and #38 * Started adding scenario tests * More test scenarios * Better docstring formatting * Started iothub support * Fixed long running test * Fixed typo and memory leak * Restructure * IoThub support * Updates for RC1 release * Fix long running test * Docstring and sample cleanups * Working on error retry * Improved error processing * Fixed partition manager * Progress on IotHub error * Some test updates * Updated uamqp dependency * Restructure for independent connections * Added HTTP proxy support Fix for issue #41 * Fixed some tests + samples * pylint fixes * bumped version * Added keepalive config and some eph fixes * Made reconnect configurable * Added more EPH options * Bumped version * Pylint fix * Pylint fix * Added send and auth timeouts * Changed log formatting. Retry on reconnect * Pylint fixes * Renamed internal async module * Updated send example to match recv Fix for issue #56 * Added build badge to readme * Fix for repeat startup * Added more storage connect options to EPH * Bumped version * Handler blocked until client started * Added event data methods * Fix pylint * Fix 3.7 CI * Fix 3.7 CI * Updated pylint version * Pylint fixes * Updated README * Fixed readme badge refresh * Fixed bug in Azure namespace package * Updated manifest * Parse enqueued time as UTC Fixes #72. * Updates for release 1.2.0 (#81) * Made setup 2.7 compatible * Separated async tests * Support 2.7 types * Bumped version * Added non-ascii tests * Fix CI * Fix Py27 pylint * Added iot sample * Updated sender/receiver client opening * bumped version * Updated tests * Fixed test name * Fixed test env settings * Skip eph test * Updates for v1.3.0 (#91) * Added support for storing the state of the Event Processor along the Checkpoint. Both Checkpoint and the EP state are stored as pickled objects. * Fixing pylint complaints. * Switched from pickle back to JSON for lease persistence. * Fixes bug when accessing leases that don't contain EP context. Also, minor renaming. * Better SAS token support * Fixed pylint * Improved auth error handling * Test stabilization * Improved stored EPH context * Updated EPH context storing * Skip test on OSX * Skip tests on OSX Fail due to large message body bug. * Some cleanup * Fixed error handling * Improved SAS token parsing * Fixed datetime offset (#99) * Fixed datetime offset * Updated pylint * Removed 3.4 pylint pass * Fixed bug in error handling (#100) * Migrate event hub sdk to central repo 1. add verifiable code snippets into docstring 2. update readme according to the template 3. add livetest mark and config 4. optimize code layout/structure * 1. document formatting 2. separate async/sync example tests * Fix build error: 1. uamqp dependency mismatch 2. rename test_examples in eventhub to avoid mismatch * This should fix build error * remove tests import and add sys path to solve build error * add live test for sending BatchEvent with application_properties, new live test passed with new uamqp wheel locally installed * Add get_partition_info in Event Hub * add get_partition_info * Add telemetry information to the connection properties * Disable smart split in batch message --- .../eventhub/aio/event_hubs_client_async.py | 49 +++++++++++++++++++ .../azure/eventhub/aio/receiver_async.py | 6 +-- .../azure/eventhub/aio/sender_async.py | 6 +-- .../azure/eventhub/client_abstract.py | 15 +++++- .../azure-eventhubs/azure/eventhub/common.py | 2 +- .../azure/eventhub/constants.py | 8 +++ .../azure/eventhub/receiver.py | 6 +-- .../azure-eventhubs/azure/eventhub/sender.py | 6 +-- 8 files changed, 84 insertions(+), 14 deletions(-) create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 275f76f6ee62..4f8760b02575 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -117,6 +117,55 @@ async def get_eventhub_information(self): finally: await mgmt_client.close_async() + async def get_partition_information(self, partition): + """ + Get information on the specified partition async. + Keys in the details dictionary include: + + -'name' + -'type' + -'partition' + -'begin_sequence_number' + -'last_enqueued_sequence_number' + -'last_enqueued_offset' + -'last_enqueued_time_utc' + -'is_partition_empty' + + :param partition: The target partition id. + :type partition: str + :rtype: dict + """ + alt_creds = { + "username": self._auth_config.get("iot_username"), + "password": self._auth_config.get("iot_password")} + try: + mgmt_auth = self._create_auth(**alt_creds) + mgmt_client = AMQPClientAsync(self.mgmt_target, auth=mgmt_auth, debug=self.debug) + await mgmt_client.open_async() + mgmt_msg = Message(application_properties={'name': self.eh_name, + 'partition': partition}) + response = await mgmt_client.mgmt_request_async( + mgmt_msg, + constants.READ_OPERATION, + op_type=b'com.microsoft:partition', + status_code_field=b'status-code', + description_fields=b'status-description') + partition_info = response.get_data() + output = {} + if partition_info: + output['name'] = partition_info[b'name'].decode('utf-8') + output['type'] = partition_info[b'type'].decode('utf-8') + output['partition'] = partition_info[b'partition'].decode('utf-8') + output['begin_sequence_number'] = partition_info[b'begin_sequence_number'] + output['last_enqueued_sequence_number'] = partition_info[b'last_enqueued_sequence_number'] + output['last_enqueued_offset'] = partition_info[b'last_enqueued_offset'].decode('utf-8') + output['last_enqueued_time_utc'] = datetime.datetime.utcfromtimestamp( + float(partition_info[b'last_enqueued_time_utc'] / 1000)) + output['is_partition_empty'] = partition_info[b'is_partition_empty'] + return output + finally: + await mgmt_client.close_async() + def create_receiver( self, consumer_group, partition, offset=None, epoch=None, operation=None, prefetch=None, keep_alive=None, auto_reconnect=None, loop=None): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 2cb60893e1e6..3db3a703ef49 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -81,7 +81,7 @@ def __init__( # pylint: disable=super-init-not-called error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) async def __aenter__(self): @@ -164,7 +164,7 @@ async def open(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) await self._handler.open_async() while not await self._handler.client_ready_async(): @@ -189,7 +189,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) try: await self._handler.open_async() diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 0ef46d519579..6553aed57b08 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -79,7 +79,7 @@ def __init__( # pylint: disable=super-init-not-called error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) self._outcome = None self._condition = None @@ -119,7 +119,7 @@ async def open(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) await self._handler.open_async() while not await self._handler.client_ready_async(): @@ -136,7 +136,7 @@ async def _reconnect(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties(), + properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) try: await self._handler.open_async() diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 90c9553d34a3..551db1853f00 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -19,6 +19,7 @@ from azure.eventhub import __version__ from azure.eventhub.configuration import Configuration +from azure.eventhub import constants log = logging.getLogger(__name__) @@ -266,7 +267,7 @@ def from_azure_identity(cls, address, aad_credential, eventhub=None, **kwargs): def _create_auth(self, username=None, password=None): pass - def create_properties(self): # pylint: disable=no-self-use + def create_properties(self, user_agent=None): # pylint: disable=no-self-use """ Format the properties with which to instantiate the connection. This acts like a user agent over HTTP. @@ -278,6 +279,18 @@ def create_properties(self): # pylint: disable=no-self-use properties["version"] = __version__ properties["framework"] = "Python {}.{}.{}".format(*sys.version_info[0:3]) properties["platform"] = sys.platform + + final_user_agent = 'azsdk-python-eventhub/{} ({}; {})'.format( + __version__, properties["framework"], sys.platform) + if user_agent: + final_user_agent = '{}, {}'.format(final_user_agent, user_agent) + + if len(final_user_agent) > constants.MAX_USER_AGENT_LENGTH: + raise ValueError("The user-agent string cannot be more than {} in length." + "Current user_agent string is: {} with length: {}".format( + constants.MAX_USER_AGENT_LENGTH, final_user_agent, len(final_user_agent))) + + properties["user-agent"] = final_user_agent return properties def _process_redirect_uri(self, redirect): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index fc70107f33cc..abca83c7b4ab 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -264,7 +264,7 @@ def encode_message(self): class _BatchSendEventData(EventData): def __init__(self, batch_event_data): # TODO: rethink if to_device should be included in - self.message = BatchMessage(data=batch_event_data, multi_messages=True, properties=None) + self.message = BatchMessage(data=batch_event_data, multi_messages=False, properties=None) class EventPosition(object): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py new file mode 100644 index 000000000000..628bb3df4ddf --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py @@ -0,0 +1,8 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + + +MAX_USER_AGENT_LENGTH = 512 diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index f5faafdb9f95..5c60016fc3f0 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -78,7 +78,7 @@ def __init__(self, client, source, event_position=None, prefetch=300, epoch=None error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) def __enter__(self): return self @@ -166,7 +166,7 @@ def open(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) self._handler.open() while not self._handler.client_ready(): time.sleep(0.05) @@ -190,7 +190,7 @@ def _reconnect(self): # pylint: disable=too-many-statements error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) try: self._handler.open() while not self._handler.client_ready(): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 80464354f366..d50fe05ab861 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -75,7 +75,7 @@ def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=N error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) self._outcome = None self._condition = None @@ -114,7 +114,7 @@ def open(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) self._handler.open() while not self._handler.client_ready(): time.sleep(0.05) @@ -131,7 +131,7 @@ def _reconnect(self): error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, client_name=self.name, - properties=self.client.create_properties()) + properties=self.client.create_properties(self.client.config.user_agent)) try: self._handler.open() self._handler.queue_message(*unsent_events) From 273368f4fd4a278a206f4aece0b7ee25cdd772d8 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 24 May 2019 15:51:59 -0700 Subject: [PATCH 17/49] change epoch to exclusive_receiver_priority --- sdk/eventhub/azure-eventhubs/azure/eventhub/common.py | 4 ++-- sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index fc70107f33cc..b395c83da86a 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -130,9 +130,9 @@ def sequence_number(self): @property def offset(self): """ - The offset of the event data object. + The position of the event data object. - :rtype: ~azure.eventhub.common.Offset + :rtype: ~azure.eventhub.common.EventPosition """ try: return EventPosition(self._annotations[EventData.PROP_OFFSET].decode('UTF-8')) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index f5faafdb9f95..947b49524ef6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -33,7 +33,7 @@ class Receiver(object): timeout = 0 _epoch = b'com.microsoft:epoch' - def __init__(self, client, source, event_position=None, prefetch=300, epoch=None, keep_alive=None, auto_reconnect=False): + def __init__(self, client, source, event_position=None, prefetch=300, exclusive_receiver_priority=None, keep_alive=None, auto_reconnect=False): """ Instantiate a receiver. @@ -53,7 +53,7 @@ def __init__(self, client, source, event_position=None, prefetch=300, epoch=None self.offset = event_position self.iter_started = False self.prefetch = prefetch - self.epoch = epoch + self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) @@ -66,8 +66,8 @@ def __init__(self, client, source, event_position=None, prefetch=300, epoch=None source = Source(self.source) if self.offset is not None: source.set_filter(self.offset.selector()) - if epoch: - self.properties = {types.AMQPSymbol(self._epoch): types.AMQPLong(int(epoch))} + if exclusive_receiver_priority: + self.properties = {types.AMQPSymbol(self._epoch): types.AMQPLong(int(exclusive_receiver_priority))} self._handler = ReceiveClient( source, auth=self.client.get_auth(), From cec2fdbbeec12d2087b0a2b9d2831ef0445dbee8 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 24 May 2019 15:59:50 -0700 Subject: [PATCH 18/49] fix small problem --- sdk/eventhub/azure-eventhubs/azure/eventhub/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 11a8d73154ec..0632275187cf 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -218,7 +218,8 @@ def create_receiver( source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, event_position=event_position, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) + self, source_url, event_position=event_position, exclusive_receiver_priority=exclusive_receiver_priority, + prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) return handler def create_sender(self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None): From fa40e94a047efb386df602f70e82452b810c6aa6 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 24 May 2019 18:05:39 -0700 Subject: [PATCH 19/49] remove uamqp dependency --- .../azure/eventhub/__init__.py | 9 +++++---- .../azure-eventhubs/azure/eventhub/common.py | 20 +++++++++++++------ .../azure/eventhub/configuration.py | 2 +- .../azure/eventhub/constants.py | 3 +++ 4 files changed, 23 insertions(+), 11 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index 8f6a9e97cfa9..38ad0e26a14d 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -9,11 +9,12 @@ from azure.eventhub.client import EventHubClient from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver -from uamqp.constants import MessageSendResult -from uamqp.constants import TransportType +from .constants import MessageSendResult +from .constants import TransportType +from .common import FIRST_AVAILABLE, NEW_EVENTS_ONLY __all__ = [ - __version__, + "__version__", "EventData", "EventHubError", "EventPosition", @@ -22,5 +23,5 @@ "Receiver", "MessageSendResult", "TransportType", + "FIRST_AVAILABLE", "NEW_EVENTS_ONLY", ] - diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index c91eb690f68a..9a873d77f049 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -11,7 +11,8 @@ import six -from uamqp import Message, BatchMessage +import uamqp +from uamqp import BatchMessage from uamqp import types, constants, errors from uamqp.message import MessageHeader, MessageProperties @@ -63,6 +64,9 @@ def parse_sas_token(sas_token): return sas_data +Message = uamqp.Message + + class EventData(object): """ The EventData class is a holder of event content. @@ -312,12 +316,12 @@ def selector(self): return ("amqp.annotation.x-opt-offset {} '{}'".format(operator, self.value)).encode('utf-8') @staticmethod - def from_start_of_stream(): - return EventPosition("-1") + def first_available(): + return FIRST_AVAILABLE - @staticmethod - def from_end_of_stream(): - return EventPosition("@latest") + @classmethod + def new_events_only(cls): + return NEW_EVENTS_ONLY @staticmethod def from_offset(offset, inclusive=False): @@ -332,6 +336,10 @@ def from_enqueued_time(enqueued_time, inclusive=False): return EventPosition(enqueued_time, inclusive) +FIRST_AVAILABLE = EventPosition("-1") +NEW_EVENTS_ONLY = EventPosition("latest") + + class EventHubError(Exception): """ Represents an error happened in the client. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py index 15b1d69550c3..a5fb46d804ac 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -3,7 +3,7 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- -from uamqp.constants import TransportType +from .constants import TransportType class Configuration(object): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py index 628bb3df4ddf..e71d3815f48f 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/constants.py @@ -4,5 +4,8 @@ # license information. #-------------------------------------------------------------------------- +from uamqp import constants MAX_USER_AGENT_LENGTH = 512 +TransportType = constants.TransportType +MessageSendResult = constants.MessageSendResult From 37938c3e1c3ca7a4797edbe5caa984a5fd99141b Mon Sep 17 00:00:00 2001 From: Yunhao Ling <47871814+yunhaoling@users.noreply.github.com> Date: Mon, 27 May 2019 15:11:59 -0700 Subject: [PATCH 20/49] Eventhub track2 (#6) * Initial commit * Initial commit * Initial commit * event hub client * Update README.md * Update README.md Fix typos * Memory leak * Support timestamp filter * Support timestamp filter * Update README.md * Add sender and refactor * Added abstract classes Todo - Migrate Base Class Wireframes - Migrate Azure Classes * First draft of class wires directly ported from .net (might be some minor gaps) * send example * Set allowed sasl mechs * Remove client.py * Receiver update * Add dummy send api * logging updates * Error handling, reconnect and logging * Add app properties to event data * unbind transport on connection close * timestamp filter on py2 * module version * Reconnect once when link/session/connection close * Add SessionPolicy * Add client info * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * run client in non-blocking mode * Added unit testing * Implemented the following functionality - Azure_storage_checkpoint_manager - AzureBlobLease isExpired Todo Implement partition manager Implement partition context Test full implementation * Implemented Processing of First Epoh Todo - Fix lease bug that is breaking subsequent epochs * Changes - Completed End to End EPH Flow - Removed storage dependancy on downloading full blob to check lease state Todo - Add thread and queue for checking lease state and other storage operations - Ensure eventhub client shuts down properly - Find way to update partition pumps without restarting them - Other optimizations * Move examples out * Changes - Added thread pool executor to enable conncurent execution of partitions - Removed partition pump dependency on max_batch Todo - Ensure eventhub client shuts down properly (This is causing errors) - Add thread pool for making checkpoint code conccurent - Add thread and queue for checking lease state and other storage operations to enable async - Find way to reassign active partition pumps without restarting them - Other optimizations * Add async receive * Changes - Added logs - Fixed error causing client to prematurely shutdown * Manual link flow control for async receive * Workaround for stuck async receiver * Local variable names * Changes - Optimized logging and comments Todo - Add concurecny mechanim for azure storage - Depricate partition pump event queue and update to latest version of the client * Create Dockerfile * Stuck async receiver * credit keeps increasing in async receiver * Changes - Added asnyc event hub client support - Optimized logging and comments Todo - Add concurecny mechanim for azure storage * Updated docker file as requested * Added EPH example * Fix hardcoded HTTP header * Made suggested changes * Bug fix - Fixed event loop bugs. In windows eventloop is thread dependent but in ubuntu the eventloop is threadsafe so you need to differentiate the thread specific eventloop from the host one. * Updated loop naming convention to be consistent * Added option to pass asyncio event_loop to eph * Updated docker file * Fixed critical bug with partition manager and aquirec mechanisiims Todo : Identitfy and fix remaining bug that is causing all pumps to shut down when a second host starts * Bug fixes - Fixed bug where closing a pump closed a host - Fixed bug where error partitioned were not removed - Fixed bug where leases were renewed at an incorrect interval * Updated file headers Removed author reference * - Fixed bug in eph example that caused host to terminate prematurely - Made the lease renewal and checkpoint creation "multithreaded" * Increase the size of the connection pool The default connection pool size was too small for scenarios where multiple partitions were handled by one EventProcessorHost. If the amount of partitions handled is large, we might end up doing very many connections at the same time due to the multi-threaded blob-handling. For this reason, you might hit the OS limits that restrict the number of open files per process that in MacOS is not very big. This can be worked around with something like: `ulimit -n 2560` * Decrease info logging verbosity * added ability to toggle pump shutdown when all messages on a pump are processed. * Install also eventhubsprocessor * Default to keeping the pumps It is more optimal to keep the pumps alive even if there are no messages so that it is faster to pickup when messages start to arrive. * Pipe and event injector for Windows * Event injector updates * EHClient refactoring. EHClient leaks. Sender part 1. * Send support * ren eventhubsprocessor eventprocessorhost * Changes - Added event hub config to simplify installation story * Changes - Added optional eventprocessor_params for passing context to the event processor - Made the storage manager mandatatory * Fix memory leaks * logging * Fix: 1. process crash due to race in client stop and connection remote close. 2. handle client close in async receiver. 3. fail pending sends when sender is closed. 4. some debug logging. * tests * test: recv from multiple partitions * test utility * logging update * Support callback based send for high throughput * Workaroud memory issue in proton.reactor.ApplicationEvent * renamed eventprocessor to eventprocessorhost for consistency * updated docker file * fixed typo in url * Added amqp port to address * Updated sample documentation since url is auto encoded by config * Updated docs * Implement timeout for send * Async sender and example * Close injector pipe * Use send timer to also check queued messages * Add partition pump loop to partition_context This gives the EventProcessor access to the partition_pump loop object. This way if One desires to run synchronous code inside process_events_async one can utilize the loop object to run the synchronous code using await context.pump_loop.run_in_executor(None, bla) * Include details in send error * Release deliveries when sender is closed * added validation to unquoted sas key * added support for custom eventhub client prefetch size * Update README.md * Update README.md * Added Docker instructions and fixed Dockerfile (#18) * Removed Dockerfile from the main folder and fixed Dockerfile example * Added build and run Dockerfile documentation * Update Readme * Removed rm qpid-proton folder * Removed /usr/share copy * Disallow a sender/receiver to be registered more than once * Make everything async in EPH I have removed all usage of threads thoroughout the code. Using threads to run pumps etc. Causes async code written into the event-processor to become caotic (you need to follow which loop is currently being used in the call to prevent loops not being found or using the wrong loop (There is the main loop and then loops that are created inside threads) Things become caotic when the event processor is being called by objects that run under different loops. So, no Threading except usage of asyncio run_in_executor. This is done mostly for azure blob api calls. Also changed the bla_async methods to not block. this way, when calling open_async for the the event-processor-host, the command will exit once the EPH is started. Due to the above, see the edited example/eph.py where I added a monitor that makes sure the EPH is still running (Could be replaced by loop.run_forever()) in the example file I have also incorporated a test class for gracefully killing the EPH after 30 seconds. this works, nevertheless takes a while to close as we are waiting for timeouts on the eventhubs connections. * Started removing proton code * Removed most of proton _impl * Removed more code * Working sender * Updates to sender * Added some tests/samples * Some progress on clients * Fixed samples * Added azure namespace * #25 Partition key cannot be set for events * Updated version * Updated README * Renamed package to eventhub * Started EPH modifications * Updated imports * Fixed target urls * Updated logging * Updated async message receive * updated test imports * Added mgmt call to get eh info * Updated samples * Updated receive test * Added send and receive test clients * Updated uamqp dependency * Merged updates from dev * Fixed typos * Updated EPH sample * Started docstrings * Converted tests to pytest * Updates to batch receive * Started adding docstrings * More docstrings * bumped version * Started porting test suite * More tests and improvements * Moved eph tests * Some sample cleanup * Some test updates * Some test restructure * Docstring cleanup * Fixed some merge artifacts * Fixed formatting error * Removed delivery count * Nested package directory * Support custom URL suffix * Support custom URL suffix * Support for EventData device ID * Reverted nested directory * Updated release notes * Workaround for partitionkey * Finished partition key workaround * beta2 fixes * pylint fixes * Trigger CI * Test fixes * Added package manifest * Added warning for Python 2.7 support Support for issues #36 and #38 * Started adding scenario tests * More test scenarios * Better docstring formatting * Started iothub support * Fixed long running test * Fixed typo and memory leak * Restructure * IoThub support * Updates for RC1 release * Fix long running test * Docstring and sample cleanups * Working on error retry * Improved error processing * Fixed partition manager * Progress on IotHub error * Some test updates * Updated uamqp dependency * Restructure for independent connections * Added HTTP proxy support Fix for issue #41 * Fixed some tests + samples * pylint fixes * bumped version * Added keepalive config and some eph fixes * Made reconnect configurable * Added more EPH options * Bumped version * Pylint fix * Pylint fix * Added send and auth timeouts * Changed log formatting. Retry on reconnect * Pylint fixes * Renamed internal async module * Updated send example to match recv Fix for issue #56 * Added build badge to readme * Fix for repeat startup * Added more storage connect options to EPH * Bumped version * Handler blocked until client started * Added event data methods * Fix pylint * Fix 3.7 CI * Fix 3.7 CI * Updated pylint version * Pylint fixes * Updated README * Fixed readme badge refresh * Fixed bug in Azure namespace package * Updated manifest * Parse enqueued time as UTC Fixes #72. * Updates for release 1.2.0 (#81) * Made setup 2.7 compatible * Separated async tests * Support 2.7 types * Bumped version * Added non-ascii tests * Fix CI * Fix Py27 pylint * Added iot sample * Updated sender/receiver client opening * bumped version * Updated tests * Fixed test name * Fixed test env settings * Skip eph test * Updates for v1.3.0 (#91) * Added support for storing the state of the Event Processor along the Checkpoint. Both Checkpoint and the EP state are stored as pickled objects. * Fixing pylint complaints. * Switched from pickle back to JSON for lease persistence. * Fixes bug when accessing leases that don't contain EP context. Also, minor renaming. * Better SAS token support * Fixed pylint * Improved auth error handling * Test stabilization * Improved stored EPH context * Updated EPH context storing * Skip test on OSX * Skip tests on OSX Fail due to large message body bug. * Some cleanup * Fixed error handling * Improved SAS token parsing * Fixed datetime offset (#99) * Fixed datetime offset * Updated pylint * Removed 3.4 pylint pass * Fixed bug in error handling (#100) * Migrate event hub sdk to central repo 1. add verifiable code snippets into docstring 2. update readme according to the template 3. add livetest mark and config 4. optimize code layout/structure * 1. document formatting 2. separate async/sync example tests * Fix build error: 1. uamqp dependency mismatch 2. rename test_examples in eventhub to avoid mismatch * This should fix build error * remove tests import and add sys path to solve build error * add live test for sending BatchEvent with application_properties, new live test passed with new uamqp wheel locally installed * Add get_partition_info in Event Hub * add get_partition_info * Add telemetry information to the connection properties * Disable smart split in batch message * 1. Add amqp over websocket test 2. Add proxy sample 3. Update some comment and code --- .../eventhub/aio/event_hubs_client_async.py | 8 +-- .../azure/eventhub/aio/sender_async.py | 48 +++++++++++++++- .../azure-eventhubs/azure/eventhub/client.py | 4 +- .../azure/eventhub/client_abstract.py | 4 -- sdk/eventhub/azure-eventhubs/conftest.py | 8 +-- .../azure-eventhubs/examples/proxy.py | 57 +++++++++++++++++++ .../tests/asynctests/test_receive_async.py | 26 ++++++++- .../tests/asynctests/test_send_async.py | 24 +++++++- .../azure-eventhubs/tests/test_receive.py | 54 ++++++++++++------ .../azure-eventhubs/tests/test_send.py | 23 +++++++- 10 files changed, 221 insertions(+), 35 deletions(-) create mode 100644 sdk/eventhub/azure-eventhubs/examples/proxy.py diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 4f8760b02575..dc2ed1be5173 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -167,7 +167,7 @@ async def get_partition_information(self, partition): await mgmt_client.close_async() def create_receiver( - self, consumer_group, partition, offset=None, epoch=None, operation=None, + self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, keep_alive=None, auto_reconnect=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. @@ -176,8 +176,8 @@ def create_receiver( :type consumer_group: str :param partition: The ID of the partition. :type partition: str - :param offset: The offset from which to start receiving. - :type offset: ~azure.eventhub.common.Offset + :param event_position: The position from which to start receiving. + :type event_position: ~azure.eventhub.common.EventPosition :param prefetch: The message prefetch count of the receiver. Default is 300. :type prefetch: int :operation: An optional operation to be appended to the hostname in the source URL. @@ -202,7 +202,7 @@ def create_receiver( source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=offset, epoch=epoch, prefetch=prefetch, keep_alive=keep_alive, + self, source_url, offset=event_position, epoch=exclusive_receiver_priority, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect, loop=loop) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 6553aed57b08..7643ca5b59ec 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -12,7 +12,7 @@ from azure.eventhub import MessageSendResult from azure.eventhub import EventHubError -from azure.eventhub.common import _error_handler, _BatchSendEventData +from azure.eventhub.common import _error_handler, EventData, _BatchSendEventData log = logging.getLogger(__name__) @@ -258,6 +258,21 @@ async def _send_event_data(self, event_data): else: return self._outcome + @staticmethod + def _verify_partition(event_datas): + ed_iter = iter(event_datas) + try: + ed = next(ed_iter) + partition_key = ed.partition_key + yield ed + except StopIteration: + raise ValueError("batch_event_data must not be empty") + for ed in ed_iter: + if ed.partition_key != partition_key: + raise ValueError("partition key of all EventData must be the same if being sent in a batch") + yield ed + + ''' async def send(self, event_data): """ Sends an event data and asynchronously waits until @@ -284,6 +299,37 @@ async def send(self, event_data): raise ValueError("EventData partition key cannot be used with a partition sender.") event_data.message.on_send_complete = self._on_outcome await self._send_event_data(event_data) + ''' + + async def send(self, event_data): + """ + Sends an event data and blocks until acknowledgement is + received or operation times out. + + :param event_data: The event to be sent. + :type event_data: ~azure.eventhub.common.EventData + :raises: ~azure.eventhub.common.EventHubError if the message fails to + send. + :return: The outcome of the message send. + :rtype: ~uamqp.constants.MessageSendResult + + Example: + .. literalinclude:: ../examples/test_examples_eventhub.py + :start-after: [START eventhub_client_sync_send] + :end-before: [END eventhub_client_sync_send] + :language: python + :dedent: 4 + :caption: Sends an event data and blocks until acknowledgement is received or operation times out. + + """ + if self.error: + raise self.error + if isinstance(event_data, EventData): + wrapper_event_data = event_data + else: + wrapper_event_data = _BatchSendEventData(self._verify_partition(event_data)) + wrapper_event_data.message.on_send_complete = self._on_outcome + await self._send_event_data(wrapper_event_data) async def send_batch(self, batch_event_data): """ diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 0632275187cf..d2803b009915 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -192,8 +192,8 @@ def create_receiver( :type consumer_group: str :param partition: The ID of the partition. :type partition: str - :param offset: The offset from which to start receiving. - :type offset: ~azure.eventhub.common.Offset + :param event_position: The position from which to start receiving. + :type event_position: ~azure.eventhub.common.EventPosition :param prefetch: The message prefetch count of the receiver. Default is 300. :type prefetch: int :operation: An optional operation to be appended to the hostname in the source URL. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 551db1853f00..cbf7b6816ac4 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -301,10 +301,6 @@ def _process_redirect_uri(self, redirect): self.eh_name = self.address.path.lstrip('/') self.mgmt_target = redirect_uri - @abstractmethod - def get_eventhub_information(self): - pass - @abstractmethod def create_receiver( self, consumer_group, partition, epoch=None, offset=None, prefetch=300, diff --git a/sdk/eventhub/azure-eventhubs/conftest.py b/sdk/eventhub/azure-eventhubs/conftest.py index ce6f83adc6af..adfa6a635a65 100644 --- a/sdk/eventhub/azure-eventhubs/conftest.py +++ b/sdk/eventhub/azure-eventhubs/conftest.py @@ -165,13 +165,12 @@ def device_id(): @pytest.fixture() def connstr_receivers(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=False) - eh_hub_info = client.get_eventhub_information() - partitions = eh_hub_info["partition_ids"] + partitions = client.get_partition_ids() recv_offset = EventPosition("@latest") receivers = [] for p in partitions: - receiver = client.create_receiver("$default", p, prefetch=500, offset=EventPosition("@latest")) + receiver = client.create_receiver("$default", p, prefetch=500, event_position=EventPosition("@latest")) receivers.append(receiver) receiver.receive(timeout=1) yield connection_str, receivers @@ -183,8 +182,7 @@ def connstr_receivers(connection_str): @pytest.fixture() def connstr_senders(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=True) - eh_hub_info = client.get_eventhub_information() - partitions = eh_hub_info["partition_ids"] + partitions = client.get_partition_ids() senders = [] for p in partitions: diff --git a/sdk/eventhub/azure-eventhubs/examples/proxy.py b/sdk/eventhub/azure-eventhubs/examples/proxy.py new file mode 100644 index 000000000000..b4a2d51b0411 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/examples/proxy.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +""" +An example to show sending and receiving events behind a proxy +""" +import os +import logging + +from azure.eventhub import EventHubClient, EventPosition, EventData + +import examples +logger = examples.get_logger(logging.INFO) + + +# Address can be in either of these formats: +# "amqps://:@.servicebus.windows.net/myeventhub" +# "amqps://.servicebus.windows.net/myeventhub" +ADDRESS = os.environ.get('EVENT_HUB_ADDRESS') + +# SAS policy and key are not required if they are encoded in the URL +USER = os.environ.get('EVENT_HUB_SAS_POLICY') +KEY = os.environ.get('EVENT_HUB_SAS_KEY') +CONSUMER_GROUP = "$default" +EVENT_POSITION = EventPosition.first_available() +PARTITION = "0" +HTTP_PROXY = { + 'proxy_hostname': '127.0.0.1', # proxy hostname + 'proxy_port': 3128, # proxy port + 'username': 'admin', # username used for proxy authentication if needed + 'password': '123456' # password used for proxy authentication if needed +} + + +if not ADDRESS: + raise ValueError("No EventHubs URL supplied.") + +client = EventHubClient(ADDRESS, debug=False, username=USER, password=KEY, http_proxy=HTTP_PROXY) +sender = client.create_sender(partition=PARTITION) +receiver = client.create_receiver(consumer_group=CONSUMER_GROUP, partition=PARTITION, event_position=EVENT_POSITION) +try: + event_list = [] + for i in range(20): + event_list.append(EventData("Event Number {}".format(i))) + + print('Start sending events behind a proxy.') + + with sender: + sender.send(list) + + print('Start receiving events behind a proxy.') + + with receiver: + received = receiver.receive(max_batch_size=50, timeout=5) + +except KeyboardInterrupt: + pass + diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py index 1be11107dae0..b9e8adc7045b 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py @@ -9,7 +9,7 @@ import pytest import time -from azure.eventhub import EventData, EventPosition, EventHubError +from azure.eventhub import EventData, EventPosition, EventHubError, TransportType from azure.eventhub.aio import EventHubClient @@ -296,3 +296,27 @@ def batched(): assert list(message.body)[0] == "Event Data {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) + + +@pytest.mark.liveTest +@pytest.mark.asyncio +async def test_receive_over_websocket_async(connstr_senders): + connection_str, senders = connstr_senders + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + + event_list = [] + for i in range(20): + event_list.append(EventData("Event Number {}".format(i))) + + async with receiver: + received = await receiver.receive(timeout=5) + assert len(received) == 0 + + with senders[0]: + senders[0].send(event_list) + + time.sleep(1) + + received = await receiver.receive(max_batch_size=50, timeout=5) + assert len(received) == 20 diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py index b17dad9cae2c..da5177fb966d 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py @@ -11,7 +11,7 @@ import time import json -from azure.eventhub import EventData +from azure.eventhub import EventData, TransportType from azure.eventhub.aio import EventHubClient @@ -219,3 +219,25 @@ def batched(): assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) + + +@pytest.mark.liveTest +@pytest.mark.asyncio +async def test_send_over_websocket_async(connstr_receivers): + connection_str, receivers = connstr_receivers + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + sender = client.create_sender() + + event_list = [] + for i in range(20): + event_list.append(EventData("Event Number {}".format(i))) + + async with sender: + await sender.send(event_list) + + time.sleep(1) + received = [] + for r in receivers: + received.extend(r.receive(timeout=3)) + + assert len(received) == 20 diff --git a/sdk/eventhub/azure-eventhubs/tests/test_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_receive.py index 51fbb3a6079a..277d07b856b7 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_receive.py @@ -9,13 +9,13 @@ import time import datetime -from azure.eventhub import EventData, EventHubClient, EventPosition +from azure.eventhub import EventData, EventHubClient, EventPosition, TransportType # def test_receive_without_events(connstr_senders): # connection_str, senders = connstr_senders # client = EventHubClient.from_connection_string(connection_str, debug=True) -# receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) +# receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) # finish = datetime.datetime.now() + datetime.timedelta(seconds=240) # count = 0 # try: @@ -37,7 +37,7 @@ def test_receive_end_of_stream(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -55,7 +55,7 @@ def test_receive_with_offset_sync(connstr_senders): client = EventHubClient.from_connection_string(connection_str, debug=False) partitions = client.get_eventhub_information() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: more_partitions = client.get_eventhub_information() assert more_partitions["partition_ids"] == ["0", "1"] @@ -70,7 +70,7 @@ def test_receive_with_offset_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", offset=offset) + offset_receiver = client.create_receiver("$default", "0", event_position=offset) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -83,7 +83,7 @@ def test_receive_with_offset_sync(connstr_senders): def test_receive_with_inclusive_offset(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -97,7 +97,7 @@ def test_receive_with_inclusive_offset(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset.value, inclusive=True)) + offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset.value, inclusive=True)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -109,7 +109,7 @@ def test_receive_with_datetime_sync(connstr_senders): client = EventHubClient.from_connection_string(connection_str, debug=False) partitions = client.get_eventhub_information() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: more_partitions = client.get_eventhub_information() assert more_partitions["partition_ids"] == ["0", "1"] @@ -123,7 +123,7 @@ def test_receive_with_datetime_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -145,7 +145,7 @@ def test_receive_with_custom_datetime_sync(connstr_senders): for i in range(5): senders[0].send(EventData(b"Message after timestamp")) - receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) with receiver: all_received = [] received = receiver.receive(timeout=1) @@ -163,7 +163,7 @@ def test_receive_with_custom_datetime_sync(connstr_senders): def test_receive_with_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -173,7 +173,7 @@ def test_receive_with_sequence_no(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -187,7 +187,7 @@ def test_receive_with_sequence_no(connstr_senders): def test_receive_with_inclusive_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -195,7 +195,7 @@ def test_receive_with_inclusive_sequence_no(connstr_senders): received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset, inclusive=True)) + offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset, inclusive=True)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -205,7 +205,7 @@ def test_receive_with_inclusive_sequence_no(connstr_senders): def test_receive_batch(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -234,7 +234,7 @@ def batched(): yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -251,3 +251,25 @@ def batched(): assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) + +@pytest.mark.liveTest +def test_receive_over_websocket_sync(connstr_senders): + connection_str, senders = connstr_senders + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + + event_list = [] + for i in range(20): + event_list.append(EventData("Event Number {}".format(i))) + + with receiver: + received = receiver.receive(timeout=5) + assert len(received) == 0 + + with senders[0] as sender: + sender.send(event_list) + + time.sleep(1) + + received = receiver.receive(max_batch_size=50, timeout=5) + assert len(received) == 20 diff --git a/sdk/eventhub/azure-eventhubs/tests/test_send.py b/sdk/eventhub/azure-eventhubs/tests/test_send.py index cdf1f0ebc6d0..728d21202aba 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_send.py @@ -10,7 +10,7 @@ import json import sys -from azure.eventhub import EventData, EventHubClient +from azure.eventhub import EventData, EventHubClient, TransportType @pytest.mark.liveTest @@ -225,3 +225,24 @@ def batched(): assert list(message.body)[0] == "Event number {}".format(index).encode('utf-8') assert (app_prop_key.encode('utf-8') in message.application_properties) \ and (dict(message.application_properties)[app_prop_key.encode('utf-8')] == app_prop_value.encode('utf-8')) + + +@pytest.mark.liveTest +def test_send_over_websocket_sync(connstr_receivers): + connection_str, receivers = connstr_receivers + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + sender = client.create_sender() + + event_list = [] + for i in range(20): + event_list.append(EventData("Event Number {}".format(i))) + + with sender: + sender.send(event_list) + + time.sleep(1) + received = [] + for r in receivers: + received.extend(r.receive(timeout=3)) + + assert len(received) == 20 From 5b7b45630cd100f17821d46b5eda2dac9bebc818 Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 27 May 2019 18:11:22 -0700 Subject: [PATCH 21/49] Changes from cross-lang --- .../eventhub/aio/event_hubs_client_async.py | 42 +----- .../azure/eventhub/aio/receiver_async.py | 121 ++++++------------ .../azure/eventhub/aio/sender_async.py | 72 +---------- .../azure/eventhub/configuration.py | 2 +- .../azure/eventhub/receiver.py | 72 +++++------ .../azure-eventhubs/azure/eventhub/sender.py | 35 +---- 6 files changed, 85 insertions(+), 259 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index dc2ed1be5173..1da7d7a8ec77 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -85,7 +85,7 @@ def _create_auth(self, username=None, password=None): return authentication.SASTokenAsync.from_shared_access_key( self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - async def get_eventhub_information(self): + async def get_properties(self): """ Get details on the specified EventHub async. @@ -116,8 +116,11 @@ async def get_eventhub_information(self): return output finally: await mgmt_client.close_async() + + async def get_partition_ids(self): + return await self.get_properties()['partition_ids'] - async def get_partition_information(self, partition): + async def get_partition_properties(self, partition): """ Get information on the specified partition async. Keys in the details dictionary include: @@ -202,43 +205,10 @@ def create_receiver( source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=event_position, epoch=exclusive_receiver_priority, prefetch=prefetch, keep_alive=keep_alive, + self, source_url, offset=event_position, exclusive_receiver_priority=exclusive_receiver_priority, prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect, loop=loop) return handler - def create_epoch_receiver( - self, consumer_group, partition, epoch, prefetch=300, operation=None, loop=None): - """ - Add an async receiver to the client with an epoch value. Only a single epoch receiver - can connect to a partition at any given time - additional epoch receivers must have - a higher epoch value or they will be rejected. If a 2nd epoch receiver has - connected, the first will be closed. - - :param consumer_group: The name of the consumer group. - :type consumer_group: str - :param partition: The ID of the partition. - :type partition: str - :param epoch: The epoch value for the receiver. - :type epoch: int - :param prefetch: The message prefetch count of the receiver. Default is 300. - :type prefetch: int - :operation: An optional operation to be appended to the hostname in the source URL. - The value must start with `/` character. - :type operation: str - :rtype: ~azure.eventhub.aio.receiver_async.ReceiverAsync - - Example: - .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py - :start-after: [START create_eventhub_client_async_epoch_receiver] - :end-before: [END create_eventhub_client_async_epoch_receiver] - :language: python - :dedent: 4 - :caption: Add an async receiver to the client with an epoch value. - - """ - return self.create_receiver(consumer_group, partition, epoch=epoch, prefetch=prefetch, - operation=operation, loop=loop) - def create_sender( self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None, loop=None): """ diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 3db3a703ef49..1d0b96259e98 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -33,7 +33,7 @@ class Receiver(object): _epoch = b'com.microsoft:epoch' def __init__( # pylint: disable=super-init-not-called - self, client, source, offset=None, prefetch=300, epoch=None, + self, client, source, offset=None, prefetch=300, exclusive_receiver_priority=None, keep_alive=None, auto_reconnect=False, loop=None): """ Instantiate an async receiver. @@ -56,7 +56,7 @@ def __init__( # pylint: disable=super-init-not-called self.offset = offset self.iter_started = False self.prefetch = prefetch - self.epoch = epoch + self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive self.auto_reconnect = auto_reconnect self.retry_policy = errors.ErrorPolicy(max_retries=self.client.config.max_retries, on_error=_error_handler) @@ -69,8 +69,8 @@ def __init__( # pylint: disable=super-init-not-called source = Source(self.source) if self.offset is not None: source.set_filter(self.offset.selector()) - if epoch: - self.properties = {types.AMQPSymbol(self._epoch): types.AMQPLong(int(epoch))} + if exclusive_receiver_priority: + self.properties = {types.AMQPSymbol(self._epoch): types.AMQPLong(int(exclusive_receiver_priority))} self._handler = ReceiveClientAsync( source, auth=self.client.get_auth(), @@ -95,36 +95,42 @@ def __aiter__(self): return self async def __anext__(self): - while True: - try: - message = await self.messages_iter.__anext__() - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") + if not self.running: + await self.open() + try: + message = await self.messages_iter.__anext__() + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") await self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - await self.close(exception=error) - raise error + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except StopAsyncIteration: + raise + except asyncio.CancelledError: + # TODO: stop self.message_iter + raise + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close(exception=error) + raise error async def open(self): """ @@ -335,52 +341,3 @@ async def receive(self, max_batch_size=None, timeout=None): error = EventHubError("Receive failed: {}".format(e)) await self.close(exception=error) raise error - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.close(exc_val) - - def __aiter__(self): - self.messages_iter = self._handler.receive_messages_iter_async() - return self - - async def __anext__(self): - if not self.running: - await self.open() - while True: - try: - message = await self.messages_iter.__anext__() - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") - await self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except StopAsyncIteration: - raise - except asyncio.CancelledError: - # TODO: stop self.message_iter - raise - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - await self.close(exception=error) - raise error diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 7643ca5b59ec..c39037ac1564 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -74,7 +74,7 @@ def __init__( # pylint: disable=super-init-not-called self._handler = SendClientAsync( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, @@ -272,35 +272,6 @@ def _verify_partition(event_datas): raise ValueError("partition key of all EventData must be the same if being sent in a batch") yield ed - ''' - async def send(self, event_data): - """ - Sends an event data and asynchronously waits until - acknowledgement is received or operation times out. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :raises: ~azure.eventhub.common.EventHubError if the message fails to - send. - - Example: - .. literalinclude:: ../examples/async_examples/test_examples_eventhub_async.py - :start-after: [START eventhub_client_async_send] - :end-before: [END eventhub_client_async_send] - :language: python - :dedent: 4 - :caption: Sends an event data and asynchronously waits - until acknowledgement is received or operation times out. - - """ - if self.error: - raise self.error - if event_data.partition_key and self.partition: - raise ValueError("EventData partition key cannot be used with a partition sender.") - event_data.message.on_send_complete = self._on_outcome - await self._send_event_data(event_data) - ''' - async def send(self, event_data): """ Sends an event data and blocks until acknowledgement is @@ -331,47 +302,6 @@ async def send(self, event_data): wrapper_event_data.message.on_send_complete = self._on_outcome await self._send_event_data(wrapper_event_data) - async def send_batch(self, batch_event_data): - """ - Sends an event data and blocks until acknowledgement is - received or operation times out. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :raises: ~azure.eventhub.common.EventHubError if the message fails to - send. - :return: The outcome of the message send. - :rtype: ~uamqp.constants.MessageSendResult - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_sync_send] - :end-before: [END eventhub_client_sync_send] - :language: python - :dedent: 4 - :caption: Sends an event data and blocks until acknowledgement is received or operation times out. - - """ - if self.error: - raise self.error - - def verify_partition(event_datas): - ed_iter = iter(event_datas) - try: - ed = next(ed_iter) - partition_key = ed.partition_key - yield ed - except StopIteration: - raise ValueError("batch_event_data must not be empty") - for ed in ed_iter: - if ed.partition_key != partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") - yield ed - - wrapper_event_data = _BatchSendEventData(verify_partition(batch_event_data)) - wrapper_event_data.message.on_send_complete = self._on_outcome - return await self._send_event_data(wrapper_event_data) - def queue_message(self, event_data, callback=None): """ Transfers an event data and notifies the callback when the operation is done. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py index a5fb46d804ac..198bbc75b2ec 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -10,7 +10,7 @@ class Configuration(object): def __init__(self, **kwargs): self.user_agent = kwargs.get("user_agent") self.max_retries = kwargs.get("max_retries", 3) - self.network_tracing = kwargs.get("debug", False) + self.network_tracing = kwargs.get("network_tracing", False) self.http_proxy = kwargs.get("http_proxy") self.auto_reconnect = kwargs.get("auto_reconnect", False) self.keep_alive = kwargs.get("keep_alive", 0) # 0 or None means to not keep alive diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 4d1c9154e619..e9a1c149d93b 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -51,7 +51,6 @@ def __init__(self, client, source, event_position=None, prefetch=300, exclusive_ self.client = client self.source = source self.offset = event_position - self.iter_started = False self.prefetch = prefetch self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive @@ -71,7 +70,7 @@ def __init__(self, client, source, event_position=None, prefetch=300, exclusive_ self._handler = ReceiveClient( source, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, @@ -89,44 +88,41 @@ def __exit__(self, exc_type, exc_val, exc_tb): def __iter__(self): if not self.running: self.open() - if not self.iter_started: - self.iter_started = True - self.messages_iter = self._handler.receive_messages_iter() + self.messages_iter = self._handler.receive_messages_iter() return self def __next__(self): - while True: - try: - message = next(self.messages_iter) - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") + try: + message = next(self.messages_iter) + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") + self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except StopIteration: - raise - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - self.close(exception=error) - raise error + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except StopIteration: + raise + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + self.close(exception=error) + raise error def open(self): """ @@ -159,7 +155,7 @@ def open(self): self._handler = ReceiveClient( source, auth=self.client.get_auth(**alt_creds), - debug=self.client.debug, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, @@ -183,7 +179,7 @@ def _reconnect(self): # pylint: disable=too-many-statements self._handler = ReceiveClient( source, auth=self.client.get_auth(**alt_creds), - debug=self.client.debug, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index d50fe05ab861..a5f23bdfbd20 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -70,7 +70,7 @@ def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=N self._handler = SendClient( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, @@ -249,35 +249,6 @@ def _send_event_data(self, event_data): raise error else: return self._outcome - ''' - def send(self, event_data): - """ - Sends an event data and blocks until acknowledgement is - received or operation times out. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :raises: ~azure.eventhub.common.EventHubError if the message fails to - send. - :return: The outcome of the message send. - :rtype: ~uamqp.constants.MessageSendResult - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_sync_send] - :end-before: [END eventhub_client_sync_send] - :language: python - :dedent: 4 - :caption: Sends an event data and blocks until acknowledgement is received or operation times out. - - """ - if self.error: - raise self.error - if event_data.partition_key and self.partition: - raise ValueError("EventData partition key cannot be used with a partition sender.") - event_data.message.on_send_complete = self._on_outcome - self._send_event_data(event_data) - ''' @staticmethod def _verify_partition(event_datas): @@ -346,8 +317,10 @@ def queue_message(self, event_data, callback=None): raise self.error if not self.running: self.open() + if event_data.partition_key and self.partition: - raise ValueError("EventData partition key cannot be used with a partition sender.") + # raise ValueError("EventData partition key cannot be used with a partition sender.") + log.warning("EventData partition key should not be used with a partition sender.") if callback: event_data.message.on_send_complete = lambda o, c: callback(o, Sender._error(o, c)) self._handler.queue_message(event_data.message) From 1262a2c030606249c575736fd9958f42e08b4ba6 Mon Sep 17 00:00:00 2001 From: yijxie Date: Mon, 27 May 2019 18:20:29 -0700 Subject: [PATCH 22/49] Change debug to network_tracing --- .../azure-eventhubs/azure/eventhub/aio/receiver_async.py | 4 ++-- .../azure-eventhubs/azure/eventhub/aio/sender_async.py | 4 ++-- sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 1d0b96259e98..20584ef13d4a 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -163,7 +163,7 @@ async def open(self): self._handler = ReceiveClientAsync( source, auth=self.client.get_auth(**alt_creds), - debug=self.client.debug, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, @@ -188,7 +188,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements self._handler = ReceiveClientAsync( source, auth=self.client.get_auth(**alt_creds), - debug=self.client.debug, + debug=self.client.config.network_tracing, prefetch=self.prefetch, link_properties=self.properties, timeout=self.timeout, diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index c39037ac1564..a9b24d211fc7 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -114,7 +114,7 @@ async def open(self): self._handler = SendClientAsync( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, @@ -131,7 +131,7 @@ async def _reconnect(self): self._handler = SendClientAsync( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index a5f23bdfbd20..8c5d7638e793 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -109,7 +109,7 @@ def open(self): self._handler = SendClient( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, @@ -126,7 +126,7 @@ def _reconnect(self): self._handler = SendClient( self.target, auth=self.client.get_auth(), - debug=self.client.debug, + debug=self.client.config.network_tracing, msg_timeout=self.timeout, error_policy=self.retry_policy, keep_alive_interval=self.keep_alive, From f8b717e404533ddda91606f85eeb5211a5fa32f8 Mon Sep 17 00:00:00 2001 From: yijxie Date: Tue, 28 May 2019 19:51:31 -0700 Subject: [PATCH 23/49] Sync Client Constructor --- .../azure/eventhub/__init__.py | 4 +- .../eventhub/aio/event_hubs_client_async.py | 48 ++++----- .../azure-eventhubs/azure/eventhub/client.py | 39 +++---- .../azure/eventhub/client_abstract.py | 100 +++++++----------- .../azure-eventhubs/azure/eventhub/common.py | 20 +++- 5 files changed, 105 insertions(+), 106 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index 38ad0e26a14d..56a1ff935c0c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -11,7 +11,7 @@ from azure.eventhub.receiver import Receiver from .constants import MessageSendResult from .constants import TransportType -from .common import FIRST_AVAILABLE, NEW_EVENTS_ONLY +from .common import FIRST_AVAILABLE, NEW_EVENTS_ONLY, SharedKeyCredentials, SASTokenCredentials __all__ = [ "__version__", @@ -24,4 +24,6 @@ "MessageSendResult", "TransportType", "FIRST_AVAILABLE", "NEW_EVENTS_ONLY", + "SharedKeyCredentials", + "SASTokenCredentials", ] diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 1da7d7a8ec77..c8c650494ecd 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -15,7 +15,7 @@ AMQPClientAsync, ) -from azure.eventhub.common import parse_sas_token +from azure.eventhub.common import parse_sas_token, SharedKeyCredentials, SASTokenCredentials from azure.eventhub import ( EventHubError) from ..client_abstract import EventHubClientAbstract @@ -55,16 +55,18 @@ def _create_auth(self, username=None, password=None): http_proxy = self.config.http_proxy transport_type = self.config.transport_type auth_timeout = self.config.auth_timeout - if self.aad_credential and self.sas_token: - raise ValueError("Can't have both sas_token and aad_credential") - elif self.aad_credential: - get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) - # TODO: should use async aad_credential.get_token. Check with Charles for async identity api - return authentication.JWTTokenAsync(self.auth_uri, self.auth_uri, - get_jwt_token, http_proxy=http_proxy, - transport_type=transport_type) - elif self.sas_token: + if isinstance(self.credentials, SharedKeyCredentials): + username = username or self._auth_config['username'] + password = password or self._auth_config['password'] + if "@sas.root" in username: + return authentication.SASLPlain( + self.host, username, password, http_proxy=http_proxy, transport_type=transport_type) + return authentication.SASTokenAsync.from_shared_access_key( + self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, + transport_type=transport_type) + + elif isinstance(self.credentials, SASTokenCredentials): token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) @@ -77,13 +79,12 @@ def _create_auth(self, username=None, password=None): http_proxy=http_proxy, transport_type=transport_type) - username = username or self._auth_config['username'] - password = password or self._auth_config['password'] - if "@sas.root" in username: - return authentication.SASLPlain( - self.address.hostname, username, password, http_proxy=http_proxy, transport_type=transport_type) - return authentication.SASTokenAsync.from_shared_access_key( - self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) + else: + get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) + return authentication.JWTTokenAsync(self.auth_uri, self.auth_uri, + get_jwt_token, http_proxy=http_proxy, + transport_type=transport_type) + async def get_properties(self): """ @@ -108,10 +109,8 @@ async def get_properties(self): eh_info = response.get_data() output = {} if eh_info: - output['name'] = eh_info[b'name'].decode('utf-8') - output['type'] = eh_info[b'type'].decode('utf-8') - output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000) - output['partition_count'] = eh_info[b'partition_count'] + output['path'] = eh_info[b'name'].decode('utf-8') + output['created_at'] = datetime.datetime.utcfromtimestamp(float(eh_info[b'created_at'])/1000) output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']] return output finally: @@ -156,15 +155,14 @@ async def get_partition_properties(self, partition): partition_info = response.get_data() output = {} if partition_info: - output['name'] = partition_info[b'name'].decode('utf-8') - output['type'] = partition_info[b'type'].decode('utf-8') - output['partition'] = partition_info[b'partition'].decode('utf-8') + output['event_hub_path'] = partition_info[b'name'].decode('utf-8') + output['id'] = partition_info[b'partition'].decode('utf-8') output['begin_sequence_number'] = partition_info[b'begin_sequence_number'] output['last_enqueued_sequence_number'] = partition_info[b'last_enqueued_sequence_number'] output['last_enqueued_offset'] = partition_info[b'last_enqueued_offset'].decode('utf-8') output['last_enqueued_time_utc'] = datetime.datetime.utcfromtimestamp( float(partition_info[b'last_enqueued_time_utc'] / 1000)) - output['is_partition_empty'] = partition_info[b'is_partition_empty'] + output['is_empty'] = partition_info[b'is_partition_empty'] return output finally: await mgmt_client.close_async() diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index d2803b009915..95e92987a623 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -26,6 +26,7 @@ from azure.eventhub.receiver import Receiver from azure.eventhub.common import EventHubError, parse_sas_token from .client_abstract import EventHubClientAbstract +from .common import SASTokenCredentials, SharedKeyCredentials log = logging.getLogger(__name__) @@ -59,15 +60,19 @@ def _create_auth(self, username=None, password=None): http_proxy = self.config.http_proxy transport_type = self.config.transport_type auth_timeout = self.config.auth_timeout - if self.aad_credential and self.sas_token: - raise ValueError("Can't have both sas_token and aad_credential") - elif self.aad_credential: - get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) - return authentication.JWTTokenAuth(self.auth_uri, self.auth_uri, - get_jwt_token, http_proxy=http_proxy, - transport_type=transport_type) - elif self.sas_token: + # TODO: the following code can be refactored to create auth from classes directly instead of using if-else + if isinstance(self.credentials, SharedKeyCredentials): + username = username or self._auth_config['username'] + password = password or self._auth_config['password'] + if "@sas.root" in username: + return authentication.SASLPlain( + self.host, username, password, http_proxy=http_proxy, transport_type=transport_type) + return authentication.SASTokenAuth.from_shared_access_key( + self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, + transport_type=transport_type) + + elif isinstance(self.credentials, SASTokenCredentials): token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) @@ -80,13 +85,13 @@ def _create_auth(self, username=None, password=None): http_proxy=http_proxy, transport_type=transport_type) - username = username or self._auth_config['username'] - password = password or self._auth_config['password'] - if "@sas.root" in username: - return authentication.SASLPlain( - self.address.hostname, username, password, http_proxy=http_proxy, transport_type=transport_type) - return authentication.SASTokenAuth.from_shared_access_key( - self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) + else: # Azure credential + get_jwt_token = functools.partial(self.credentials.get_token, + ['https://eventhubs.azure.net//.default']) + return authentication.JWTTokenAuth(self.auth_uri, self.auth_uri, + get_jwt_token, http_proxy=http_proxy, + transport_type=transport_type) + def get_properties(self): """ @@ -119,9 +124,7 @@ def get_properties(self): output = {} if eh_info: output['path'] = eh_info[b'name'].decode('utf-8') - # output['type'] = eh_info[b'type'].decode('utf-8') - output['created_at'] = datetime.datetime.fromtimestamp(float(eh_info[b'created_at'])/1000) - output['partition_count'] = eh_info[b'partition_count'] + output['created_at'] = datetime.datetime.utcfromtimestamp(float(eh_info[b'created_at'])/1000) output['partition_ids'] = [p.decode('utf-8') for p in eh_info[b'partition_ids']] return output finally: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index cbf7b6816ac4..6848e37ae47d 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -20,6 +20,7 @@ from azure.eventhub import __version__ from azure.eventhub.configuration import Configuration from azure.eventhub import constants +from .common import SASTokenCredentials, SharedKeyCredentials, Address log = logging.getLogger(__name__) @@ -94,8 +95,7 @@ class EventHubClientAbstract(object): """ - def __init__( - self, address, username=None, password=None, sas_token=None, aad_credential=None, **kwargs): + def __init__(self, host, event_hub_path, credentials, **kwargs): """ Constructs a new EventHubClient with the given address URL. @@ -124,67 +124,40 @@ def __init__( :type sas_token: str or callable """ self.container_id = "eventhub.pysdk-" + str(uuid.uuid4())[:8] - self.sas_token = sas_token - self.address = urlparse(address) - self.aad_credential = aad_credential - self.eh_name = self.address.path.lstrip('/') + self.address = Address() + self.address.hostname = host + self.address.path = "/" + event_hub_path if event_hub_path else "" + self._auth_config = {} + self.credentials = credentials + if isinstance(credentials, SASTokenCredentials): + self.sas_token = credentials.token + elif isinstance(credentials, SharedKeyCredentials): + self.username = credentials.policy + self.password = credentials.key + self._auth_config['username'] = self.username + self._auth_config['password'] = self.password + else: + self.aad_credential = credentials + + self.host = host + #self.eh_name = self.address.path.lstrip('/') + self.eh_name = event_hub_path # self.http_proxy = kwargs.get("http_proxy") self.keep_alive = kwargs.get("keep_alive", 30) self.auto_reconnect = kwargs.get("auto_reconnect", True) - self.mgmt_target = "amqps://{}/{}".format(self.address.hostname, self.eh_name) - url_username = unquote_plus(self.address.username) if self.address.username else None - username = username or url_username - url_password = unquote_plus(self.address.password) if self.address.password else None - password = password or url_password - if (not username or not password) and not sas_token and not aad_credential: - raise ValueError("Please supply any of username and password, or a SAS token, or an AAD credential") + # self.mgmt_target = "amqps://{}/{}".format(self.address.hostname, self.eh_name) + self.mgmt_target = "amqps://{}/{}".format(self.host, self.eh_name) + # url_username = unquote_plus(self.address.username) if self.address.username else None + # username = username or url_username + # url_password = unquote_plus(self.address.password) if self.address.password else None + # password = password or url_password self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) - self._auth_config = {'username': username, 'password': password} self.get_auth = functools.partial(self._create_auth) - # self.debug = kwargs.get("debug", False) # debug - #self.auth_timeout = auth_timeout - - self.stopped = False self.config = Configuration(**kwargs) self.debug = self.config.network_tracing log.info("%r: Created the Event Hub client", self.container_id) - @classmethod - def from_sas_token(cls, address, sas_token, eventhub=None, **kwargs): - """Create an EventHubClient from an existing auth token or token generator. - - :param address: The Event Hub address URL - :type address: str - :param sas_token: A SAS token or function that returns a SAS token. If a function is supplied, - it will be used to retrieve subsequent tokens in the case of token expiry. The function should - take no arguments. - :type sas_token: str or callable - :param eventhub: The name of the EventHub, if not already included in the address URL. - :type eventhub: str - :param debug: Whether to output network trace logs to the logger. Default - is `False`. - :type debug: bool - :param http_proxy: HTTP proxy settings. This must be a dictionary with the following - keys: 'proxy_hostname' (str value) and 'proxy_port' (int value). - Additionally the following keys may also be present: 'username', 'password'. - :type http_proxy: dict[str, Any] - :param auth_timeout: The time in seconds to wait for a token to be authorized by the service. - The default value is 60 seconds. If set to 0, no timeout will be enforced from the client. - :type auth_timeout: int - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START create_eventhub_client_sas_token] - :end-before: [END create_eventhub_client_sas_token] - :language: python - :dedent: 4 - :caption: Create an EventHubClient from an existing auth token or token generator. - - """ - address = _build_uri(address, eventhub) - return cls(address, sas_token=sas_token, **kwargs) - @classmethod def from_connection_string(cls, conn_str, eventhub=None, **kwargs): """Create an EventHubClient from a connection string. @@ -216,8 +189,12 @@ def from_connection_string(cls, conn_str, eventhub=None, **kwargs): """ address, policy, key, entity = _parse_conn_str(conn_str) entity = eventhub or entity - address = _build_uri(address, entity) - return cls(address, username=policy, password=key, **kwargs) + left_slash_pos = address.find("//") + if left_slash_pos != -1: + host = address[left_slash_pos + 2:] + else: + host = address + return cls(host, entity, SharedKeyCredentials(policy, key), **kwargs) @classmethod def from_iothub_connection_string(cls, conn_str, **kwargs): @@ -250,7 +227,12 @@ def from_iothub_connection_string(cls, conn_str, **kwargs): hub_name = address.split('.')[0] username = "{}@sas.root.{}".format(policy, hub_name) password = _generate_sas_token(address, policy, key) - client = cls("amqps://" + address, username=username, password=password, **kwargs) + left_slash_pos = address.find("//") + if left_slash_pos != -1: + host = address[left_slash_pos + 2:] + else: + host = address + client = cls(host, "", SharedKeyCredentials(username, password), **kwargs) client._auth_config = { # pylint: disable=protected-access 'iot_username': policy, 'iot_password': key, @@ -258,11 +240,6 @@ def from_iothub_connection_string(cls, conn_str, **kwargs): 'password': password} return client - @classmethod - def from_azure_identity(cls, address, aad_credential, eventhub=None, **kwargs): - address = _build_uri(address, eventhub) - return cls(address, aad_credential=aad_credential, **kwargs) - @abstractmethod def _create_auth(self, username=None, password=None): pass @@ -297,6 +274,7 @@ def _process_redirect_uri(self, redirect): redirect_uri = redirect.address.decode('utf-8') auth_uri, _, _ = redirect_uri.partition("/ConsumerGroups") self.address = urlparse(auth_uri) + self.host = self.address.hostname self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) self.eh_name = self.address.path.lstrip('/') self.mgmt_target = redirect_uri diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 9a873d77f049..82be5e890ddb 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -337,7 +337,7 @@ def from_enqueued_time(enqueued_time, inclusive=False): FIRST_AVAILABLE = EventPosition("-1") -NEW_EVENTS_ONLY = EventPosition("latest") +NEW_EVENTS_ONLY = EventPosition("@latest") class EventHubError(Exception): @@ -390,3 +390,21 @@ def _parse_error(self, error_list): details.append(details_msg[system_index + 2: timestamp_index]) details.append(details_msg[timestamp_index + 2:]) self.details = details + + +# TODO: move some behaviors to these two classes. +class SASTokenCredentials(object): + def __init__(self, token): + self.token = token + + +class SharedKeyCredentials(object): + def __init__(self, policy, key): + self.policy = policy + self.key = key + + +class Address(object): + def __init__(self, hostname=None, path=None): + self.hostname = hostname + self.path = path From 0e53a5cd7884125577314091dd596c749a01397b Mon Sep 17 00:00:00 2001 From: yijxie Date: Tue, 28 May 2019 20:02:53 -0700 Subject: [PATCH 24/49] auto_reconnect True and keep_alive None --- .../azure/eventhub/aio/event_hubs_client_async.py | 15 +++++---------- .../azure/eventhub/aio/receiver_async.py | 2 +- .../azure/eventhub/aio/sender_async.py | 2 +- .../azure-eventhubs/azure/eventhub/client.py | 14 +++++--------- .../azure/eventhub/configuration.py | 2 -- .../azure-eventhubs/azure/eventhub/receiver.py | 2 +- .../azure-eventhubs/azure/eventhub/sender.py | 2 +- 7 files changed, 14 insertions(+), 25 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index c8c650494ecd..819bd12de7ca 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -169,7 +169,7 @@ async def get_partition_properties(self, partition): def create_receiver( self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, - prefetch=None, keep_alive=None, auto_reconnect=None, loop=None): + prefetch=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. @@ -195,20 +195,18 @@ def create_receiver( :caption: Add an async receiver to the client for a particular consumer group and partition. """ - keep_alive = self.config.keep_alive if keep_alive is None else keep_alive - auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect prefetch = self.config.prefetch if prefetch is None else prefetch path = self.address.path + operation if operation else self.address.path source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( self.address.hostname, path, consumer_group, partition) handler = Receiver( - self, source_url, offset=event_position, exclusive_receiver_priority=exclusive_receiver_priority, prefetch=prefetch, keep_alive=keep_alive, - auto_reconnect=auto_reconnect, loop=loop) + self, source_url, offset=event_position, exclusive_receiver_priority=exclusive_receiver_priority, + prefetch=prefetch, loop=loop) return handler def create_sender( - self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None, loop=None): + self, partition=None, operation=None, send_timeout=None, loop=None): """ Add an async sender to the client to send ~azure.eventhub.common.EventData object to an EventHub. @@ -246,10 +244,7 @@ def create_sender( if operation: target = target + operation send_timeout = self.config.send_timeout if send_timeout is None else send_timeout - keep_alive = self.config.keep_alive if keep_alive is None else keep_alive - auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect handler = Sender( - self, target, partition=partition, send_timeout=send_timeout, - keep_alive=keep_alive, auto_reconnect=auto_reconnect, loop=loop) + self, target, partition=partition, send_timeout=send_timeout, loop=loop) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 20584ef13d4a..c1744837cf2c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -34,7 +34,7 @@ class Receiver(object): def __init__( # pylint: disable=super-init-not-called self, client, source, offset=None, prefetch=300, exclusive_receiver_priority=None, - keep_alive=None, auto_reconnect=False, loop=None): + keep_alive=None, auto_reconnect=True, loop=None): """ Instantiate an async receiver. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index a9b24d211fc7..6a8d6e94be07 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -33,7 +33,7 @@ class Sender(object): def __init__( # pylint: disable=super-init-not-called self, client, target, partition=None, send_timeout=60, - keep_alive=None, auto_reconnect=False, loop=None): + keep_alive=None, auto_reconnect=True, loop=None): """ Instantiate an EventHub event SenderAsync handler. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 95e92987a623..1b06e338955e 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -185,8 +185,6 @@ def get_partition_properties(self, partition): def create_receiver( self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, - keep_alive=None, - auto_reconnect=None, ): """ Add a receiver to the client for a particular consumer group and partition. @@ -213,8 +211,6 @@ def create_receiver( :caption: Add a receiver to the client for a particular consumer group and partition. """ - keep_alive = self.config.keep_alive if keep_alive is None else keep_alive - auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect prefetch = self.config.prefetch if prefetch is None else prefetch path = self.address.path + operation if operation else self.address.path @@ -222,10 +218,10 @@ def create_receiver( self.address.hostname, path, consumer_group, partition) handler = Receiver( self, source_url, event_position=event_position, exclusive_receiver_priority=exclusive_receiver_priority, - prefetch=prefetch, keep_alive=keep_alive, auto_reconnect=auto_reconnect) + prefetch=prefetch) return handler - def create_sender(self, partition=None, operation=None, send_timeout=None, keep_alive=None, auto_reconnect=None): + def create_sender(self, partition=None, operation=None, send_timeout=None): """ Add a sender to the client to send EventData object to an EventHub. @@ -260,9 +256,9 @@ def create_sender(self, partition=None, operation=None, send_timeout=None, keep_ if operation: target = target + operation send_timeout = self.config.send_timeout if send_timeout is None else send_timeout - keep_alive = self.config.keep_alive if keep_alive is None else keep_alive - auto_reconnect = self.config.auto_reconnect if auto_reconnect is None else auto_reconnect + keep_alive = 0 + auto_reconnect = True handler = Sender( - self, target, partition=partition, send_timeout=send_timeout, keep_alive=keep_alive, auto_reconnect=auto_reconnect) + self, target, partition=partition, send_timeout=send_timeout) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py index 198bbc75b2ec..b6e030c9e3a6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/configuration.py @@ -12,8 +12,6 @@ def __init__(self, **kwargs): self.max_retries = kwargs.get("max_retries", 3) self.network_tracing = kwargs.get("network_tracing", False) self.http_proxy = kwargs.get("http_proxy") - self.auto_reconnect = kwargs.get("auto_reconnect", False) - self.keep_alive = kwargs.get("keep_alive", 0) # 0 or None means to not keep alive self.transport_type = TransportType.AmqpOverWebsocket if self.http_proxy \ else kwargs.get("transport_type", TransportType.Amqp) self.auth_timeout = kwargs.get("auth_timeout", 60) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index e9a1c149d93b..11d99808f448 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -33,7 +33,7 @@ class Receiver(object): timeout = 0 _epoch = b'com.microsoft:epoch' - def __init__(self, client, source, event_position=None, prefetch=300, exclusive_receiver_priority=None, keep_alive=None, auto_reconnect=False): + def __init__(self, client, source, event_position=None, prefetch=300, exclusive_receiver_priority=None, keep_alive=None, auto_reconnect=True): """ Instantiate a receiver. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 8c5d7638e793..4152ba9cdc62 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -31,7 +31,7 @@ class Sender(object): """ - def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=False): + def __init__(self, client, target, partition=None, send_timeout=60, keep_alive=None, auto_reconnect=True): """ Instantiate an EventHub event Sender handler. From fdd3d44d8897fbc296b367ed85445b126ecc5a4c Mon Sep 17 00:00:00 2001 From: yijxie Date: Tue, 28 May 2019 20:05:43 -0700 Subject: [PATCH 25/49] consumer_group $default --- .../azure/eventhub/aio/event_hubs_client_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 819bd12de7ca..f6dffecd5bf6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -168,7 +168,7 @@ async def get_partition_properties(self, partition): await mgmt_client.close_async() def create_receiver( - self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 1b06e338955e..0e4487cf5c1f 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -183,7 +183,7 @@ def get_partition_properties(self, partition): mgmt_client.close() def create_receiver( - self, consumer_group, partition, event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, ): """ From 50e0163901b59aa066fa9b7e0954c1ffa663231c Mon Sep 17 00:00:00 2001 From: yijxie Date: Tue, 28 May 2019 20:15:34 -0700 Subject: [PATCH 26/49] hide open() --- .../azure-eventhubs/azure/eventhub/aio/receiver_async.py | 6 +++--- .../azure-eventhubs/azure/eventhub/aio/sender_async.py | 6 +++--- sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py | 6 +++--- sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index c1744837cf2c..73c2c7146d5e 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -96,7 +96,7 @@ def __aiter__(self): async def __anext__(self): if not self.running: - await self.open() + await self.__open() try: message = await self.messages_iter.__anext__() event_data = EventData(message=message) @@ -132,7 +132,7 @@ async def __anext__(self): await self.close(exception=error) raise error - async def open(self): + async def _open(self): """ Open the Receiver using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -302,7 +302,7 @@ async def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error if not self.running: - await self.open() + await self._open() data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 6a8d6e94be07..b1340e0dfed2 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -90,7 +90,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close(exc_val) - async def open(self): + async def _open(self): """ Open the Sender using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -220,7 +220,7 @@ async def close(self, exception=None): async def _send_event_data(self, event_data): if not self.running: - await self.open() + await self._open() try: self._handler.send_message(event_data.message) if self._outcome != MessageSendResult.Ok: @@ -324,7 +324,7 @@ def queue_message(self, event_data, callback=None): if self.error: raise self.error if not self.running: - self.open() + self._open() if event_data.partition_key and self.partition: raise ValueError("EventData partition key cannot be used with a partition sender.") if callback: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 11d99808f448..e85fd8dc764c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -87,7 +87,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): def __iter__(self): if not self.running: - self.open() + self._open() self.messages_iter = self._handler.receive_messages_iter() return self @@ -124,7 +124,7 @@ def __next__(self): self.close(exception=error) raise error - def open(self): + def _open(self): """ Open the Receiver using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -301,7 +301,7 @@ def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error if not self.running: - self.open() + self._open() data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 4152ba9cdc62..b33f036500ba 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -85,7 +85,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_val, exc_tb): self.close(exc_val) - def open(self): + def _open(self): """ Open the Sender using the supplied conneciton. If the handler has previously been redirected, the redirect @@ -212,7 +212,7 @@ def close(self, exception=None): def _send_event_data(self, event_data): if not self.running: - self.open() + self._open() try: self._handler.send_message(event_data.message) if self._outcome != MessageSendResult.Ok: @@ -316,7 +316,7 @@ def queue_message(self, event_data, callback=None): if self.error: raise self.error if not self.running: - self.open() + self._open() if event_data.partition_key and self.partition: # raise ValueError("EventData partition key cannot be used with a partition sender.") @@ -341,7 +341,7 @@ def send_pending_messages(self): if self.error: raise self.error if not self.running: - self.open() + self._open() try: self._handler.wait() except (errors.TokenExpired, errors.AuthenticationException): From 094ae4e1df2d338f7011c2c17e760a822fbd5ccd Mon Sep 17 00:00:00 2001 From: yijxie Date: Tue, 28 May 2019 20:16:45 -0700 Subject: [PATCH 27/49] partition -> partition_id --- .../azure/eventhub/aio/event_hubs_client_async.py | 8 ++++---- sdk/eventhub/azure-eventhubs/azure/eventhub/client.py | 10 ++++------ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index f6dffecd5bf6..2b51599f21bd 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -168,7 +168,7 @@ async def get_partition_properties(self, partition): await mgmt_client.close_async() def create_receiver( - self, partition, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition_id, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. @@ -199,14 +199,14 @@ def create_receiver( path = self.address.path + operation if operation else self.address.path source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( - self.address.hostname, path, consumer_group, partition) + self.address.hostname, path, consumer_group, partition_id) handler = Receiver( self, source_url, offset=event_position, exclusive_receiver_priority=exclusive_receiver_priority, prefetch=prefetch, loop=loop) return handler def create_sender( - self, partition=None, operation=None, send_timeout=None, loop=None): + self, partition_id=None, operation=None, send_timeout=None, loop=None): """ Add an async sender to the client to send ~azure.eventhub.common.EventData object to an EventHub. @@ -246,5 +246,5 @@ def create_sender( send_timeout = self.config.send_timeout if send_timeout is None else send_timeout handler = Sender( - self, target, partition=partition, send_timeout=send_timeout, loop=loop) + self, target, partition=partition_id, send_timeout=send_timeout, loop=loop) return handler diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 0e4487cf5c1f..5f5fd5923308 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -183,7 +183,7 @@ def get_partition_properties(self, partition): mgmt_client.close() def create_receiver( - self, partition, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition_id, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, ): """ @@ -215,13 +215,13 @@ def create_receiver( path = self.address.path + operation if operation else self.address.path source_url = "amqps://{}{}/ConsumerGroups/{}/Partitions/{}".format( - self.address.hostname, path, consumer_group, partition) + self.address.hostname, path, consumer_group, partition_id) handler = Receiver( self, source_url, event_position=event_position, exclusive_receiver_priority=exclusive_receiver_priority, prefetch=prefetch) return handler - def create_sender(self, partition=None, operation=None, send_timeout=None): + def create_sender(self, partition_id=None, operation=None, send_timeout=None): """ Add a sender to the client to send EventData object to an EventHub. @@ -256,9 +256,7 @@ def create_sender(self, partition=None, operation=None, send_timeout=None): if operation: target = target + operation send_timeout = self.config.send_timeout if send_timeout is None else send_timeout - keep_alive = 0 - auto_reconnect = True handler = Sender( - self, target, partition=partition, send_timeout=send_timeout) + self, target, partition=partition_id, send_timeout=send_timeout) return handler From 1c3df2fdb1d829b5f3d2e433063052d3de4c8845 Mon Sep 17 00:00:00 2001 From: yijxie Date: Wed, 29 May 2019 12:55:03 -0700 Subject: [PATCH 28/49] credentials -> credential in init --- .../eventhub/aio/event_hubs_client_async.py | 4 ++-- .../azure-eventhubs/azure/eventhub/client.py | 6 ++--- .../azure/eventhub/client_abstract.py | 23 +++++++------------ 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 2b51599f21bd..c43cb6d4e511 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -56,7 +56,7 @@ def _create_auth(self, username=None, password=None): transport_type = self.config.transport_type auth_timeout = self.config.auth_timeout - if isinstance(self.credentials, SharedKeyCredentials): + if isinstance(self.credential, SharedKeyCredentials): username = username or self._auth_config['username'] password = password or self._auth_config['password'] if "@sas.root" in username: @@ -66,7 +66,7 @@ def _create_auth(self, username=None, password=None): self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - elif isinstance(self.credentials, SASTokenCredentials): + elif isinstance(self.credential, SASTokenCredentials): token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 5f5fd5923308..245c0331d891 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -62,7 +62,7 @@ def _create_auth(self, username=None, password=None): auth_timeout = self.config.auth_timeout # TODO: the following code can be refactored to create auth from classes directly instead of using if-else - if isinstance(self.credentials, SharedKeyCredentials): + if isinstance(self.credential, SharedKeyCredentials): username = username or self._auth_config['username'] password = password or self._auth_config['password'] if "@sas.root" in username: @@ -72,7 +72,7 @@ def _create_auth(self, username=None, password=None): self.auth_uri, username, password, timeout=auth_timeout, http_proxy=http_proxy, transport_type=transport_type) - elif isinstance(self.credentials, SASTokenCredentials): + elif isinstance(self.credential, SASTokenCredentials): token = self.sas_token() if callable(self.sas_token) else self.sas_token try: expiry = int(parse_sas_token(token)['se']) @@ -86,7 +86,7 @@ def _create_auth(self, username=None, password=None): transport_type=transport_type) else: # Azure credential - get_jwt_token = functools.partial(self.credentials.get_token, + get_jwt_token = functools.partial(self.credential.get_token, ['https://eventhubs.azure.net//.default']) return authentication.JWTTokenAuth(self.auth_uri, self.auth_uri, get_jwt_token, http_proxy=http_proxy, diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 6848e37ae47d..48f36758fb50 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -95,7 +95,7 @@ class EventHubClientAbstract(object): """ - def __init__(self, host, event_hub_path, credentials, **kwargs): + def __init__(self, host, event_hub_path, credential, **kwargs): """ Constructs a new EventHubClient with the given address URL. @@ -128,29 +128,22 @@ def __init__(self, host, event_hub_path, credentials, **kwargs): self.address.hostname = host self.address.path = "/" + event_hub_path if event_hub_path else "" self._auth_config = {} - self.credentials = credentials - if isinstance(credentials, SASTokenCredentials): - self.sas_token = credentials.token - elif isinstance(credentials, SharedKeyCredentials): - self.username = credentials.policy - self.password = credentials.key + self.credential = credential + if isinstance(credential, SASTokenCredentials): + self.sas_token = credential.token + elif isinstance(credential, SharedKeyCredentials): + self.username = credential.policy + self.password = credential.key self._auth_config['username'] = self.username self._auth_config['password'] = self.password else: - self.aad_credential = credentials + self.aad_credential = credential self.host = host - #self.eh_name = self.address.path.lstrip('/') self.eh_name = event_hub_path - # self.http_proxy = kwargs.get("http_proxy") self.keep_alive = kwargs.get("keep_alive", 30) self.auto_reconnect = kwargs.get("auto_reconnect", True) - # self.mgmt_target = "amqps://{}/{}".format(self.address.hostname, self.eh_name) self.mgmt_target = "amqps://{}/{}".format(self.host, self.eh_name) - # url_username = unquote_plus(self.address.username) if self.address.username else None - # username = username or url_username - # url_password = unquote_plus(self.address.password) if self.address.password else None - # password = password or url_password self.auth_uri = "sb://{}{}".format(self.address.hostname, self.address.path) self.get_auth = functools.partial(self._create_auth) self.config = Configuration(**kwargs) From 9863a603d0d0434b58728d4cda95ac7744719a84 Mon Sep 17 00:00:00 2001 From: yijxie Date: Wed, 29 May 2019 15:49:02 -0700 Subject: [PATCH 29/49] set running=true after opened --- .../azure-eventhubs/azure/eventhub/aio/receiver_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 73c2c7146d5e..2400bec24e06 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -151,7 +151,6 @@ async def _open(self): """ # pylint: disable=protected-access - self.running = True if self.redirected: self.source = self.redirected.address source = Source(self.source) @@ -173,6 +172,7 @@ async def _open(self): properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) await self._handler.open_async() + self.running = True while not await self._handler.client_ready_async(): await asyncio.sleep(0.05) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index b1340e0dfed2..7effcaf63b13 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -108,7 +108,6 @@ async def _open(self): :caption: Open the Sender using the supplied conneciton. """ - self.running = True if self.redirected: self.target = self.redirected.address self._handler = SendClientAsync( @@ -122,6 +121,7 @@ async def _open(self): properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) await self._handler.open_async() + self.running = True while not await self._handler.client_ready_async(): await asyncio.sleep(0.05) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index e85fd8dc764c..4a06432e0fa0 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -143,7 +143,6 @@ def _open(self): """ # pylint: disable=protected-access - self.running = True if self.redirected: self.source = self.redirected.address source = Source(self.source) @@ -164,6 +163,7 @@ def _open(self): client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) self._handler.open() + self.running = True while not self._handler.client_ready(): time.sleep(0.05) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index b33f036500ba..8728abb8afa1 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -103,7 +103,6 @@ def _open(self): :caption: Open the Sender using the supplied conneciton. """ - self.running = True if self.redirected: self.target = self.redirected.address self._handler = SendClient( @@ -116,6 +115,7 @@ def _open(self): client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) self._handler.open() + self.running = True while not self._handler.client_ready(): time.sleep(0.05) From 25848c488c78d32d44aa4cde7fa4553214247e7b Mon Sep 17 00:00:00 2001 From: Yunhao Ling <47871814+yunhaoling@users.noreply.github.com> Date: Thu, 30 May 2019 10:14:05 -0700 Subject: [PATCH 30/49] Eventhub track2 - Update livetest (#7) * Update README.md * Update README.md Fix typos * Memory leak * Support timestamp filter * Support timestamp filter * Update README.md * Add sender and refactor * Added abstract classes Todo - Migrate Base Class Wireframes - Migrate Azure Classes * First draft of class wires directly ported from .net (might be some minor gaps) * send example * Set allowed sasl mechs * Remove client.py * Receiver update * Add dummy send api * logging updates * Error handling, reconnect and logging * Add app properties to event data * unbind transport on connection close * timestamp filter on py2 * module version * Reconnect once when link/session/connection close * Add SessionPolicy * Add client info * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * run client in non-blocking mode * Added unit testing * Implemented the following functionality - Azure_storage_checkpoint_manager - AzureBlobLease isExpired Todo Implement partition manager Implement partition context Test full implementation * Implemented Processing of First Epoh Todo - Fix lease bug that is breaking subsequent epochs * Changes - Completed End to End EPH Flow - Removed storage dependancy on downloading full blob to check lease state Todo - Add thread and queue for checking lease state and other storage operations - Ensure eventhub client shuts down properly - Find way to update partition pumps without restarting them - Other optimizations * Move examples out * Changes - Added thread pool executor to enable conncurent execution of partitions - Removed partition pump dependency on max_batch Todo - Ensure eventhub client shuts down properly (This is causing errors) - Add thread pool for making checkpoint code conccurent - Add thread and queue for checking lease state and other storage operations to enable async - Find way to reassign active partition pumps without restarting them - Other optimizations * Add async receive * Changes - Added logs - Fixed error causing client to prematurely shutdown * Manual link flow control for async receive * Workaround for stuck async receiver * Local variable names * Changes - Optimized logging and comments Todo - Add concurecny mechanim for azure storage - Depricate partition pump event queue and update to latest version of the client * Create Dockerfile * Stuck async receiver * credit keeps increasing in async receiver * Changes - Added asnyc event hub client support - Optimized logging and comments Todo - Add concurecny mechanim for azure storage * Updated docker file as requested * Added EPH example * Fix hardcoded HTTP header * Made suggested changes * Bug fix - Fixed event loop bugs. In windows eventloop is thread dependent but in ubuntu the eventloop is threadsafe so you need to differentiate the thread specific eventloop from the host one. * Updated loop naming convention to be consistent * Added option to pass asyncio event_loop to eph * Updated docker file * Fixed critical bug with partition manager and aquirec mechanisiims Todo : Identitfy and fix remaining bug that is causing all pumps to shut down when a second host starts * Bug fixes - Fixed bug where closing a pump closed a host - Fixed bug where error partitioned were not removed - Fixed bug where leases were renewed at an incorrect interval * Updated file headers Removed author reference * - Fixed bug in eph example that caused host to terminate prematurely - Made the lease renewal and checkpoint creation "multithreaded" * Increase the size of the connection pool The default connection pool size was too small for scenarios where multiple partitions were handled by one EventProcessorHost. If the amount of partitions handled is large, we might end up doing very many connections at the same time due to the multi-threaded blob-handling. For this reason, you might hit the OS limits that restrict the number of open files per process that in MacOS is not very big. This can be worked around with something like: `ulimit -n 2560` * Decrease info logging verbosity * added ability to toggle pump shutdown when all messages on a pump are processed. * Install also eventhubsprocessor * Default to keeping the pumps It is more optimal to keep the pumps alive even if there are no messages so that it is faster to pickup when messages start to arrive. * Pipe and event injector for Windows * Event injector updates * EHClient refactoring. EHClient leaks. Sender part 1. * Send support * ren eventhubsprocessor eventprocessorhost * Changes - Added event hub config to simplify installation story * Changes - Added optional eventprocessor_params for passing context to the event processor - Made the storage manager mandatatory * Fix memory leaks * logging * Fix: 1. process crash due to race in client stop and connection remote close. 2. handle client close in async receiver. 3. fail pending sends when sender is closed. 4. some debug logging. * tests * test: recv from multiple partitions * test utility * logging update * Support callback based send for high throughput * Workaroud memory issue in proton.reactor.ApplicationEvent * renamed eventprocessor to eventprocessorhost for consistency * updated docker file * fixed typo in url * Added amqp port to address * Updated sample documentation since url is auto encoded by config * Updated docs * Implement timeout for send * Async sender and example * Close injector pipe * Use send timer to also check queued messages * Add partition pump loop to partition_context This gives the EventProcessor access to the partition_pump loop object. This way if One desires to run synchronous code inside process_events_async one can utilize the loop object to run the synchronous code using await context.pump_loop.run_in_executor(None, bla) * Include details in send error * Release deliveries when sender is closed * added validation to unquoted sas key * added support for custom eventhub client prefetch size * Update README.md * Update README.md * Added Docker instructions and fixed Dockerfile (#18) * Removed Dockerfile from the main folder and fixed Dockerfile example * Added build and run Dockerfile documentation * Update Readme * Removed rm qpid-proton folder * Removed /usr/share copy * Disallow a sender/receiver to be registered more than once * Make everything async in EPH I have removed all usage of threads thoroughout the code. Using threads to run pumps etc. Causes async code written into the event-processor to become caotic (you need to follow which loop is currently being used in the call to prevent loops not being found or using the wrong loop (There is the main loop and then loops that are created inside threads) Things become caotic when the event processor is being called by objects that run under different loops. So, no Threading except usage of asyncio run_in_executor. This is done mostly for azure blob api calls. Also changed the bla_async methods to not block. this way, when calling open_async for the the event-processor-host, the command will exit once the EPH is started. Due to the above, see the edited example/eph.py where I added a monitor that makes sure the EPH is still running (Could be replaced by loop.run_forever()) in the example file I have also incorporated a test class for gracefully killing the EPH after 30 seconds. this works, nevertheless takes a while to close as we are waiting for timeouts on the eventhubs connections. * Started removing proton code * Removed most of proton _impl * Removed more code * Working sender * Updates to sender * Added some tests/samples * Some progress on clients * Fixed samples * Added azure namespace * #25 Partition key cannot be set for events * Updated version * Updated README * Renamed package to eventhub * Started EPH modifications * Updated imports * Fixed target urls * Updated logging * Updated async message receive * updated test imports * Added mgmt call to get eh info * Updated samples * Updated receive test * Added send and receive test clients * Updated uamqp dependency * Merged updates from dev * Fixed typos * Updated EPH sample * Started docstrings * Converted tests to pytest * Updates to batch receive * Started adding docstrings * More docstrings * bumped version * Started porting test suite * More tests and improvements * Moved eph tests * Some sample cleanup * Some test updates * Some test restructure * Docstring cleanup * Fixed some merge artifacts * Fixed formatting error * Removed delivery count * Nested package directory * Support custom URL suffix * Support custom URL suffix * Support for EventData device ID * Reverted nested directory * Updated release notes * Workaround for partitionkey * Finished partition key workaround * beta2 fixes * pylint fixes * Trigger CI * Test fixes * Added package manifest * Added warning for Python 2.7 support Support for issues #36 and #38 * Started adding scenario tests * More test scenarios * Better docstring formatting * Started iothub support * Fixed long running test * Fixed typo and memory leak * Restructure * IoThub support * Updates for RC1 release * Fix long running test * Docstring and sample cleanups * Working on error retry * Improved error processing * Fixed partition manager * Progress on IotHub error * Some test updates * Updated uamqp dependency * Restructure for independent connections * Added HTTP proxy support Fix for issue #41 * Fixed some tests + samples * pylint fixes * bumped version * Added keepalive config and some eph fixes * Made reconnect configurable * Added more EPH options * Bumped version * Pylint fix * Pylint fix * Added send and auth timeouts * Changed log formatting. Retry on reconnect * Pylint fixes * Renamed internal async module * Updated send example to match recv Fix for issue #56 * Added build badge to readme * Fix for repeat startup * Added more storage connect options to EPH * Bumped version * Handler blocked until client started * Added event data methods * Fix pylint * Fix 3.7 CI * Fix 3.7 CI * Updated pylint version * Pylint fixes * Updated README * Fixed readme badge refresh * Fixed bug in Azure namespace package * Updated manifest * Parse enqueued time as UTC Fixes #72. * Updates for release 1.2.0 (#81) * Made setup 2.7 compatible * Separated async tests * Support 2.7 types * Bumped version * Added non-ascii tests * Fix CI * Fix Py27 pylint * Added iot sample * Updated sender/receiver client opening * bumped version * Updated tests * Fixed test name * Fixed test env settings * Skip eph test * Updates for v1.3.0 (#91) * Added support for storing the state of the Event Processor along the Checkpoint. Both Checkpoint and the EP state are stored as pickled objects. * Fixing pylint complaints. * Switched from pickle back to JSON for lease persistence. * Fixes bug when accessing leases that don't contain EP context. Also, minor renaming. * Better SAS token support * Fixed pylint * Improved auth error handling * Test stabilization * Improved stored EPH context * Updated EPH context storing * Skip test on OSX * Skip tests on OSX Fail due to large message body bug. * Some cleanup * Fixed error handling * Improved SAS token parsing * Fixed datetime offset (#99) * Fixed datetime offset * Updated pylint * Removed 3.4 pylint pass * Fixed bug in error handling (#100) * Migrate event hub sdk to central repo 1. add verifiable code snippets into docstring 2. update readme according to the template 3. add livetest mark and config 4. optimize code layout/structure * 1. document formatting 2. separate async/sync example tests * Fix build error: 1. uamqp dependency mismatch 2. rename test_examples in eventhub to avoid mismatch * This should fix build error * remove tests import and add sys path to solve build error * add live test for sending BatchEvent with application_properties, new live test passed with new uamqp wheel locally installed * Add get_partition_info in Event Hub * add get_partition_info * Add telemetry information to the connection properties * Disable smart split in batch message * 1. Add amqp over websocket test 2. Add proxy sample 3. Update some comment and code * update some test code * Add __str__ to EventData * Update test code --- .../azure/eventhub/aio/receiver_async.py | 2 +- .../azure-eventhubs/azure/eventhub/common.py | 22 ++++ sdk/eventhub/azure-eventhubs/conftest.py | 6 +- .../asynctests/test_iothub_receive_async.py | 28 ++--- .../tests/asynctests/test_longrunning_eph.py | 27 +++-- .../test_longrunning_eph_with_context.py | 26 ++-- .../test_longrunning_receive_async.py | 62 +++++----- .../asynctests/test_longrunning_send_async.py | 37 +++--- .../tests/asynctests/test_negative_async.py | 112 +++++++++--------- .../tests/asynctests/test_receive_async.py | 58 ++++----- .../tests/asynctests/test_reconnect_async.py | 44 ++----- .../tests/asynctests/test_send_async.py | 39 +++--- .../tests/test_iothub_receive.py | 10 +- .../azure-eventhubs/tests/test_iothub_send.py | 4 +- .../tests/test_longrunning_receive.py | 15 ++- .../tests/test_longrunning_send.py | 13 +- .../azure-eventhubs/tests/test_negative.py | 67 +++++------ .../azure-eventhubs/tests/test_receive.py | 43 +++---- .../azure-eventhubs/tests/test_reconnect.py | 35 +----- .../azure-eventhubs/tests/test_send.py | 21 ++-- 20 files changed, 317 insertions(+), 354 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 73c2c7146d5e..29b434e2c841 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -240,7 +240,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements async def reconnect(self): """If the Receiver was disconnected from the service with a retryable error - attempt to reconnect.""" - while not await self._reconnect_async(): + while not await self._reconnect(): await asyncio.sleep(self.reconnect_backoff) async def close(self, exception=None): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 82be5e890ddb..a6f2b137c439 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -122,6 +122,25 @@ def __init__(self, body=None, to_device=None, message=None): else: self.message = Message(body, properties=self.msg_properties) + def __str__(self): + dic = { + 'body': self.body_as_str(), + 'application_properties': str(self.application_properties) + } + + if self.sequence_number: + dic['sequence_number'] = str(self.sequence_number) + if self.offset: + dic['offset'] = str(self.offset) + if self.enqueued_time: + dic['enqueued_time'] = str(self.enqueued_time) + if self.device_id: + dic['device_id'] = str(self.device_id) + if self.partition_key: + dic['partition_key'] = str(self.partition_key) + + return str(dic) + @property def sequence_number(self): """ @@ -301,6 +320,9 @@ def __init__(self, value, inclusive=False): self.value = value if value else "-1" self.inclusive = inclusive + def __str__(self): + return str(self.value) + def selector(self): """ Creates a selector expression of the offset. diff --git a/sdk/eventhub/azure-eventhubs/conftest.py b/sdk/eventhub/azure-eventhubs/conftest.py index adfa6a635a65..3235342620c7 100644 --- a/sdk/eventhub/azure-eventhubs/conftest.py +++ b/sdk/eventhub/azure-eventhubs/conftest.py @@ -166,11 +166,9 @@ def device_id(): def connstr_receivers(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=False) partitions = client.get_partition_ids() - - recv_offset = EventPosition("@latest") receivers = [] for p in partitions: - receiver = client.create_receiver("$default", p, prefetch=500, event_position=EventPosition("@latest")) + receiver = client.create_receiver(partition_id=p, prefetch=500, event_position=EventPosition("@latest")) receivers.append(receiver) receiver.receive(timeout=1) yield connection_str, receivers @@ -186,7 +184,7 @@ def connstr_senders(connection_str): senders = [] for p in partitions: - sender = client.create_sender(partition=p) + sender = client.create_sender(partition_id=p) senders.append(sender) yield connection_str, senders for s in senders: diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py index a8bc39757d87..396b7d697c87 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py @@ -9,43 +9,43 @@ import pytest import time -from azure import eventhub -from azure.eventhub import EventData, Offset, EventHubError, EventHubClientAsync +from azure.eventhub.aio import EventHubClient +from azure.eventhub import EventData, EventPosition, EventHubError async def pump(receiver, sleep=None): messages = 0 if sleep: await asyncio.sleep(sleep) - batch = await receiver.receive(timeout=1) - messages += len(batch) + async with receiver: + batch = await receiver.receive(timeout=1) + messages += len(batch) return messages async def get_partitions(iot_connection_str): try: - client = EventHubClientAsync.from_iothub_connection_string(iot_connection_str, debug=True) - client.add_async_receiver("$default", "0", prefetch=1000, operation='/messages/events') - await client.run_async() - partitions = await client.get_eventhub_info_async() - return partitions["partition_ids"] + client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) + receiver = client.create_receiver(partition_id="0", prefetch=1000, operation='/messages/events') + async with receiver: + partitions = await client.get_properties() + return partitions["partition_ids"] finally: - await client.stop_async() + pass @pytest.mark.liveTest @pytest.mark.asyncio async def test_iothub_receive_multiple_async(iot_connection_str): partitions = await get_partitions(iot_connection_str) - client = EventHubClientAsync.from_iothub_connection_string(iot_connection_str, debug=True) + client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) try: receivers = [] for p in partitions: - receivers.append(client.add_async_receiver("$default", p, prefetch=10, operation='/messages/events')) - await client.run_async() + receivers.append(client.create_receiver(partition_id=p, prefetch=10, operation='/messages/events')) outputs = await asyncio.gather(*[pump(r) for r in receivers]) assert isinstance(outputs[0], int) and outputs[0] <= 10 assert isinstance(outputs[1], int) and outputs[1] <= 10 finally: - await client.stop_async() + pass diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph.py index 9a51d067e312..78611b3bf2ef 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph.py @@ -13,7 +13,8 @@ import pytest from logging.handlers import RotatingFileHandler -from azure.eventhub import EventHubClientAsync, EventData +from azure.eventhub.aio import EventHubClient +from azure.eventhub import EventData from azure.eventprocessorhost import ( AbstractEventProcessor, AzureStorageCheckpointLeaseManager, @@ -123,13 +124,14 @@ async def pump(pid, sender, duration): total = 0 try: - while time.time() < deadline: - data = EventData(body=b"D" * 512) - sender.transfer(data) - total += 1 - if total % 100 == 0: - await sender.wait_async() - #logger.info("{}: Send total {}".format(pid, total)) + async with sender: + while time.time() < deadline: + data = EventData(body=b"D" * 512) + sender.queue_message(data) + total += 1 + if total % 100 == 0: + await sender.send_pending_messages() + #logger.info("{}: Send total {}".format(pid, total)) except Exception as err: logger.error("{}: Send failed {}".format(pid, err)) raise @@ -164,14 +166,13 @@ def test_long_running_eph(live_eventhub): live_eventhub['key_name'], live_eventhub['access_key'], live_eventhub['event_hub']) - send_client = EventHubClientAsync.from_connection_string(conn_str) + send_client = EventHubClient.from_connection_string(conn_str) pumps = [] for pid in ["0", "1"]: - sender = send_client.add_async_sender(partition=pid, send_timeout=0, keep_alive=False) + sender = send_client.create_sender(partition_id=pid, send_timeout=0, keep_alive=False) pumps.append(pump(pid, sender, 15)) - loop.run_until_complete(send_client.run_async()) results = loop.run_until_complete(asyncio.gather(*pumps, return_exceptions=True)) - loop.run_until_complete(send_client.stop_async()) + assert not any(results) # Eventhub config and storage manager @@ -198,7 +199,7 @@ def test_long_running_eph(live_eventhub): EventProcessor, eh_config, storage_manager, - ep_params=["param1","param2"], + ep_params=["param1", "param2"], eph_options=eh_options, loop=loop) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph_with_context.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph_with_context.py index 3c926dd77470..7b4a9021db1d 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph_with_context.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_eph_with_context.py @@ -13,7 +13,8 @@ import pytest from logging.handlers import RotatingFileHandler -from azure.eventhub import EventHubClientAsync, EventData +from azure.eventhub.aio import EventHubClient +from azure.eventhub import EventData from azure.eventprocessorhost import ( AbstractEventProcessor, AzureStorageCheckpointLeaseManager, @@ -128,13 +129,14 @@ async def pump(pid, sender, duration): total = 0 try: - while time.time() < deadline: - data = EventData(body=b"D" * 512) - sender.transfer(data) - total += 1 - if total % 100 == 0: - await sender.wait_async() - #logger.info("{}: Send total {}".format(pid, total)) + async with sender: + while time.time() < deadline: + data = EventData(body=b"D" * 512) + sender.queue_message(data) + total += 1 + if total % 100 == 0: + await sender.send_pending_messages() + #logger.info("{}: Send total {}".format(pid, total)) except Exception as err: logger.error("{}: Send failed {}".format(pid, err)) raise @@ -169,14 +171,12 @@ def test_long_running_context_eph(live_eventhub): live_eventhub['key_name'], live_eventhub['access_key'], live_eventhub['event_hub']) - send_client = EventHubClientAsync.from_connection_string(conn_str) + send_client = EventHubClient.from_connection_string(conn_str) pumps = [] for pid in ["0", "1"]: - sender = send_client.add_async_sender(partition=pid, send_timeout=0, keep_alive=False) + sender = send_client.add_async_sender(partition_id=pid, send_timeout=0) pumps.append(pump(pid, sender, 15)) - loop.run_until_complete(send_client.run_async()) results = loop.run_until_complete(asyncio.gather(*pumps, return_exceptions=True)) - loop.run_until_complete(send_client.stop_async()) assert not any(results) # Eventhub config and storage manager @@ -223,4 +223,4 @@ def test_long_running_context_eph(live_eventhub): config['namespace'] = os.environ['EVENT_HUB_NAMESPACE'] config['consumer_group'] = "$Default" config['partition'] = "0" - test_long_running_eph(config) + test_long_running_context_eph(config) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py index b3e7dca8a2dc..6036db2788c5 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py @@ -18,8 +18,8 @@ import pytest from logging.handlers import RotatingFileHandler -from azure.eventhub import Offset -from azure.eventhub import EventHubClientAsync +from azure.eventhub import EventPosition +from azure.eventhub.aio import EventHubClient def get_logger(filename, level=logging.INFO): @@ -48,7 +48,7 @@ def get_logger(filename, level=logging.INFO): async def get_partitions(client): - eh_data = await client.get_eventhub_info_async() + eh_data = await client.get_properties() return eh_data["partition_ids"] @@ -56,27 +56,29 @@ async def pump(_pid, receiver, _args, _dl): total = 0 iteration = 0 deadline = time.time() + _dl + try: - while time.time() < deadline: - batch = await receiver.receive(timeout=1) - size = len(batch) - total += size - iteration += 1 - if size == 0: - print("{}: No events received, queue size {}, delivered {}".format( - _pid, - receiver.queue_size, - total)) - elif iteration >= 5: - iteration = 0 - print("{}: total received {}, last sn={}, last offset={}".format( - _pid, - total, - batch[-1].sequence_number, - batch[-1].offset.value)) - print("{}: total received {}".format( - _pid, - total)) + async with receiver: + while time.time() < deadline: + batch = await receiver.receive(timeout=1) + size = len(batch) + total += size + iteration += 1 + if size == 0: + print("{}: No events received, queue size {}, delivered {}".format( + _pid, + receiver.queue_size, + total)) + elif iteration >= 5: + iteration = 0 + print("{}: total received {}, last sn={}, last offset={}".format( + _pid, + total, + batch[-1].sequence_number, + batch[-1].offset.value)) + print("{}: total received {}".format( + _pid, + total)) except Exception as e: print("Partition {} receiver failed: {}".format(_pid, e)) raise @@ -98,11 +100,11 @@ def test_long_running_receive_async(connection_str): loop = asyncio.get_event_loop() args, _ = parser.parse_known_args() if args.conn_str: - client = EventHubClientAsync.from_connection_string( + client = EventHubClient.from_connection_string( args.conn_str, eventhub=args.eventhub, auth_timeout=240, debug=False) elif args.address: - client = EventHubClientAsync( + client = EventHubClient( args.address, auth_timeout=240, username=args.sas_policy, @@ -121,16 +123,14 @@ def test_long_running_receive_async(connection_str): partitions = args.partitions.split(",") pumps = [] for pid in partitions: - receiver = client.add_async_receiver( - consumer_group=args.consumer, - partition=pid, - offset=Offset(args.offset), + receiver = client.create_receiver( + partition_id=pid, + event_position=EventPosition(args.offset), prefetch=50) pumps.append(pump(pid, receiver, args, args.duration)) - loop.run_until_complete(client.run_async()) loop.run_until_complete(asyncio.gather(*pumps)) finally: - loop.run_until_complete(client.stop_async()) + pass if __name__ == '__main__': diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py index 56832f87a87d..ef4817900f27 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py @@ -13,7 +13,8 @@ import pytest from logging.handlers import RotatingFileHandler -from azure.eventhub import EventHubClientAsync, EventData +from azure.eventhub import EventData +from azure.eventhub.aio import EventHubClient def get_logger(filename, level=logging.INFO): @@ -47,7 +48,7 @@ def check_send_successful(outcome, condition): async def get_partitions(args): - eh_data = await args.get_eventhub_info_async() + eh_data = await args.get_properties() return eh_data["partition_ids"] @@ -65,16 +66,17 @@ def data_generator(): logger.info("{}: Sending single messages".format(pid)) try: - while time.time() < deadline: - if args.batch > 1: - data = EventData(batch=data_generator()) - else: - data = EventData(body=b"D" * args.payload) - sender.transfer(data, callback=check_send_successful) - total += args.batch - if total % 100 == 0: - await sender.wait_async() - logger.info("{}: Send total {}".format(pid, total)) + async with sender: + while time.time() < deadline: + if args.batch > 1: + data = EventData(body=data_generator()) + else: + data = EventData(body=b"D" * args.payload) + sender.queue_message(data, callback=check_send_successful) + total += args.batch + if total % 100 == 0: + await sender.send_pending_messages() + logger.info("{}: Send total {}".format(pid, total)) except Exception as err: logger.error("{}: Send failed {}".format(pid, err)) raise @@ -99,11 +101,11 @@ def test_long_running_partition_send_async(connection_str): args, _ = parser.parse_known_args() if args.conn_str: - client = EventHubClientAsync.from_connection_string( + client = EventHubClient.from_connection_string( args.conn_str, eventhub=args.eventhub, debug=True) elif args.address: - client = EventHubClientAsync( + client = EventHubClient( args.address, username=args.sas_policy, password=args.sas_key, @@ -126,16 +128,15 @@ def test_long_running_partition_send_async(connection_str): partitions = args.partitions.split(",") pumps = [] for pid in partitions: - sender = client.add_async_sender(partition=pid, send_timeout=0, keep_alive=False) + sender = client.create_sender(partition_id=pid, send_timeout=0) pumps.append(pump(pid, sender, args, args.duration)) - loop.run_until_complete(client.run_async()) results = loop.run_until_complete(asyncio.gather(*pumps, return_exceptions=True)) assert not results except Exception as e: logger.error("Sender failed: {}".format(e)) finally: - logger.info("Shutting down sender") - loop.run_until_complete(client.stop_async()) + pass + if __name__ == '__main__': test_long_running_partition_send_async(os.environ.get('EVENT_HUB_CONNECTION_STR')) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py index 4b2e8b0a367b..08a8bb88f7ca 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py @@ -10,102 +10,99 @@ import time import sys -from azure import eventhub from azure.eventhub import ( - EventHubClientAsync, EventData, - Offset, + EventPosition, EventHubError) - +from azure.eventhub.aio import EventHubClient @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_invalid_hostname_async(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(invalid_hostname, debug=True) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(invalid_hostname, debug=True) + sender = client.create_sender() with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_hostname_async(invalid_hostname): - client = EventHubClientAsync.from_connection_string(invalid_hostname, debug=True) - sender = client.add_async_receiver("$default", "0") + client = EventHubClient.from_connection_string(invalid_hostname, debug=True) + sender = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_invalid_key_async(invalid_key, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(invalid_key, debug=False) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(invalid_key, debug=False) + sender = client.create_sender() with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_key_async(invalid_key): - client = EventHubClientAsync.from_connection_string(invalid_key, debug=True) - sender = client.add_async_receiver("$default", "0") + client = EventHubClient.from_connection_string(invalid_key, debug=True) + sender = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_invalid_policy_async(invalid_policy, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(invalid_policy, debug=False) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(invalid_policy, debug=False) + sender = client.create_sender() with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_policy_async(invalid_policy): - client = EventHubClientAsync.from_connection_string(invalid_policy, debug=True) - sender = client.add_async_receiver("$default", "0") + client = EventHubClient.from_connection_string(invalid_policy, debug=True) + sender = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - await client.run_async() + await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_partition_key_with_partition_async(connection_str): - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) - sender = client.add_async_sender(partition="1") + client = EventHubClient.from_connection_string(connection_str, debug=True) + sender = client.create_sender(partition_id="1") try: - await client.run_async() data = EventData(b"Data") data.partition_key = b"PKey" with pytest.raises(ValueError): await sender.send(data) finally: - await client.stop_async() + await sender.close() @pytest.mark.liveTest @pytest.mark.asyncio async def test_non_existing_entity_sender_async(connection_str): - client = EventHubClientAsync.from_connection_string(connection_str, eventhub="nemo", debug=False) - sender = client.add_async_sender(partition="1") + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + sender = client.create_sender(partition_id="1") with pytest.raises(EventHubError): - await client.run_async() + await await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_non_existing_entity_receiver_async(connection_str): - client = EventHubClientAsync.from_connection_string(connection_str, eventhub="nemo", debug=False) - receiver = client.add_async_receiver("$default", "0") + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - await client.run_async() + await receiver._open() @pytest.mark.liveTest @@ -113,14 +110,13 @@ async def test_non_existing_entity_receiver_async(connection_str): async def test_receive_from_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) - receiver = client.add_async_receiver("$default", p) + client = EventHubClient.from_connection_string(connection_str, debug=True) + receiver = client.create_receiver(partition_id=p) try: with pytest.raises(EventHubError): - await client.run_async() await receiver.receive(timeout=10) finally: - await client.stop_async() + await receiver.close() @pytest.mark.liveTest @@ -128,13 +124,13 @@ async def test_receive_from_invalid_partitions_async(connection_str): async def test_send_to_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender(partition=p) + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender(partition_id=p) try: with pytest.raises(EventHubError): - await client.run_async() + await sender._open() finally: - await client.stop_async() + await sender.close() @pytest.mark.liveTest @@ -142,52 +138,50 @@ async def test_send_to_invalid_partitions_async(connection_str): async def test_send_too_large_message_async(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() try: - await client.run_async() data = EventData(b"A" * 300000) with pytest.raises(EventHubError): await sender.send(data) finally: - await client.stop_async() + await sender.close() @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_null_body_async(connection_str): - client = EventHubClientAsync.from_connection_string(connection_str, debug=False) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(connection_str, debug=False) + sender = client.create_sender() try: - await client.run_async() with pytest.raises(ValueError): data = EventData(None) await sender.send(data) finally: - await client.stop_async() + await sender.close() async def pump(receiver): - messages = 0 - count = 0 - batch = await receiver.receive(timeout=10) - while batch and count <= 5: - count += 1 - messages += len(batch) + async with receiver: + messages = 0 + count = 0 batch = await receiver.receive(timeout=10) - return messages + while batch and count <= 5: + count += 1 + messages += len(batch) + batch = await receiver.receive(timeout=10) + return messages @pytest.mark.liveTest @pytest.mark.asyncio async def test_max_receivers_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, debug=True) receivers = [] for i in range(6): - receivers.append(client.add_async_receiver("$default", "0", prefetch=1000, offset=Offset('@latest'))) + receivers.append(client.create_receiver(partition_id="0", prefetch=1000, event_position=EventPosition('@latest'))) try: - await client.run_async() outputs = await asyncio.gather( pump(receivers[0]), pump(receivers[1]), @@ -201,4 +195,4 @@ async def test_max_receivers_async(connstr_senders): assert len(failed) == 1 print(failed[0].message) finally: - await client.stop_async() + pass diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py index b9e8adc7045b..96c4489fabc2 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py @@ -18,7 +18,7 @@ async def test_receive_end_of_stream_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -34,7 +34,7 @@ async def test_receive_end_of_stream_async(connstr_senders): async def test_receive_with_offset_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -44,7 +44,7 @@ async def test_receive_with_offset_async(connstr_senders): assert len(received) == 1 offset = received[0].offset - offset_receiver = client.create_receiver("$default", "0", offset=offset) + offset_receiver = client.create_receiver(partition_id="0", event_position=offset) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -58,7 +58,7 @@ async def test_receive_with_offset_async(connstr_senders): async def test_receive_with_inclusive_offset_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -68,7 +68,7 @@ async def test_receive_with_inclusive_offset_async(connstr_senders): assert len(received) == 1 offset = received[0].offset - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset.value, inclusive=True)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset.value, inclusive=True)) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -79,7 +79,7 @@ async def test_receive_with_inclusive_offset_async(connstr_senders): async def test_receive_with_datetime_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -88,7 +88,7 @@ async def test_receive_with_datetime_async(connstr_senders): assert len(received) == 1 offset = received[0].enqueued_time - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset)) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -101,9 +101,10 @@ async def test_receive_with_datetime_async(connstr_senders): @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_sequence_no_async(connstr_senders): + # TODO: sampe problem as the sync version connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -112,7 +113,7 @@ async def test_receive_with_sequence_no_async(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset)) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -127,7 +128,7 @@ async def test_receive_with_sequence_no_async(connstr_senders): async def test_receive_with_inclusive_sequence_no_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -136,7 +137,7 @@ async def test_receive_with_inclusive_sequence_no_async(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", offset=EventPosition(offset, inclusive=True)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset, inclusive=True)) async with offset_receiver: received = await offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -147,7 +148,7 @@ async def test_receive_with_inclusive_sequence_no_async(connstr_senders): async def test_receive_batch_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 @@ -174,19 +175,19 @@ async def pump(receiver, sleep=None): @pytest.mark.liveTest @pytest.mark.asyncio -async def test_epoch_receiver_async(connstr_senders): +async def test_exclusive_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] - for epoch in [10, 20]: - receivers.append(client.create_epoch_receiver("$default", "0", epoch, prefetch=5)) + for exclusive_receiver_priority in [10, 20]: + receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=exclusive_receiver_priority, prefetch=5)) outputs = await asyncio.gather( pump(receivers[0]), pump(receivers[1]), return_exceptions=True) - assert isinstance(outputs[0], EventHubError) + assert isinstance(outputs[0], EventHubError) # TODO; it's LinkDetach error assert outputs[1] == 1 @@ -197,13 +198,13 @@ async def test_multiple_receiver_async(connstr_senders): senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str, debug=True) - partitions = await client.get_eventhub_information() + partitions = await client.get_properties() assert partitions["partition_ids"] == ["0", "1"] receivers = [] for i in range(2): - receivers.append(client.create_receiver("$default", "0", prefetch=10)) + receivers.append(client.create_receiver(partition_id="0", prefetch=10)) try: - more_partitions = await client.get_eventhub_information() + more_partitions = await client.get_properties() assert more_partitions["partition_ids"] == ["0", "1"] outputs = await asyncio.gather( pump(receivers[0]), @@ -218,14 +219,14 @@ async def test_multiple_receiver_async(connstr_senders): @pytest.mark.liveTest @pytest.mark.asyncio -async def test_epoch_receiver_after_non_epoch_receiver_async(connstr_senders): +async def test_exclusive_receiver_after_non_exclusive_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] - receivers.append(client.create_receiver("$default", "0", prefetch=10)) - receivers.append(client.create_epoch_receiver("$default", "0", 15, prefetch=10)) + receivers.append(client.create_receiver(partition_id="0", prefetch=10)) + receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=15, prefetch=10)) try: outputs = await asyncio.gather( pump(receivers[0]), @@ -240,14 +241,14 @@ async def test_epoch_receiver_after_non_epoch_receiver_async(connstr_senders): @pytest.mark.liveTest @pytest.mark.asyncio -async def test_non_epoch_receiver_after_epoch_receiver_async(connstr_senders): +async def test_non_exclusive_receiver_after_exclusive_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) client = EventHubClient.from_connection_string(connection_str, debug=False) receivers = [] - receivers.append(client.create_epoch_receiver("$default", "0", 15, prefetch=10)) - receivers.append(client.create_receiver("$default", "0", prefetch=10)) + receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=15, prefetch=10)) + receivers.append(client.create_receiver(partition_id="0", prefetch=10)) try: outputs = await asyncio.gather( pump(receivers[0]), @@ -263,7 +264,6 @@ async def test_non_epoch_receiver_after_epoch_receiver_async(connstr_senders): @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_batch_with_app_prop_async(connstr_senders): - #pytest.skip("Waiting on uAMQP release") connection_str, senders = connstr_senders app_prop_key = "raw_prop" app_prop_value = "raw_value" @@ -280,12 +280,12 @@ def batched(): yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, offset=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) assert len(received) == 0 - senders[0].send_batch(batched()) + senders[0].send(batched()) await asyncio.sleep(1) @@ -303,7 +303,7 @@ def batched(): async def test_receive_over_websocket_async(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) event_list = [] for i in range(20): diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py index 9fafc0dc0069..db030e7981a0 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py @@ -9,28 +9,26 @@ import asyncio import pytest -from azure import eventhub from azure.eventhub import ( - EventHubClientAsync, EventData, - Offset, + EventPosition, EventHubError) +from azure.eventhub.aio import EventHubClient @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_long_interval_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(connection_str, debug=True) + sender = client.create_sender() try: - await client.run_async() await sender.send(EventData(b"A single event")) for _ in range(2): await asyncio.sleep(300) await sender.send(EventData(b"A single event")) finally: - await client.stop_async() + await sender.close() received = [] for r in receivers: @@ -41,11 +39,12 @@ async def test_send_with_long_interval_async(connstr_receivers): def pump(receiver): messages = [] - batch = receiver.receive(timeout=1) - messages.extend(batch) - while batch: + with receiver: batch = receiver.receive(timeout=1) messages.extend(batch) + while batch: + batch = receiver.receive(timeout=1) + messages.extend(batch) return messages @@ -53,10 +52,9 @@ def pump(receiver): @pytest.mark.asyncio async def test_send_with_forced_conn_close_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClientAsync.from_connection_string(connection_str, debug=True) - sender = client.add_async_sender() + client = EventHubClient.from_connection_string(connection_str, debug=True) + sender = client.create_sender() try: - await client.run_async() await sender.send(EventData(b"A single event")) sender._handler._message_sender.destroy() await asyncio.sleep(300) @@ -67,28 +65,10 @@ async def test_send_with_forced_conn_close_async(connstr_receivers): await sender.send(EventData(b"A single event")) await sender.send(EventData(b"A single event")) finally: - await client.stop_async() + await sender.close() received = [] for r in receivers: received.extend(pump(r)) assert len(received) == 5 assert list(received[0].body)[0] == b"A single event" - - -# def test_send_with_forced_link_detach(connstr_receivers): -# connection_str, receivers = connstr_receivers -# client = EventHubClient.from_connection_string(connection_str, debug=True) -# sender = client.add_sender() -# size = 20 * 1024 -# try: -# client.run() -# for i in range(1000): -# sender.transfer(EventData([b"A"*size, b"B"*size, b"C"*size, b"D"*size, b"A"*size, b"B"*size, b"C"*size, b"D"*size, b"A"*size, b"B"*size, b"C"*size, b"D"*size])) -# sender.wait() -# finally: -# client.stop() - -# received = [] -# for r in receivers: -# received.extend(r.receive(timeout=10)) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py index da5177fb966d..55096ec63c86 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py @@ -22,14 +22,15 @@ async def test_send_with_partition_key_async(connstr_receivers): client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() - data_val = 0 - for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: - partition_key = b"test_partition_" + partition - for i in range(50): - data = EventData(str(data_val)) - data.partition_key = partition_key - data_val += 1 - await sender.send(data) + async with sender: + data_val = 0 + for partition in [b"a", b"b", b"c", b"d", b"e", b"f"]: + partition_key = b"test_partition_" + partition + for i in range(50): + data = EventData(str(data_val)) + data.partition_key = partition_key + data_val += 1 + await sender.send(data) found_partition_keys = {} for index, partition in enumerate(receivers): @@ -80,6 +81,7 @@ async def test_send_single_event_async(connstr_receivers): @pytest.mark.asyncio async def test_send_batch_async(connstr_receivers): connection_str, receivers = connstr_receivers + def batched(): for i in range(10): yield EventData("Event number {}".format(i)) @@ -87,7 +89,7 @@ def batched(): client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() async with sender: - await sender.send_batch(batched()) + await sender.send(batched()) time.sleep(1) received = [] @@ -104,7 +106,7 @@ def batched(): async def test_send_partition_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="1") + sender = client.create_sender(partition_id="1") async with sender: await sender.send(EventData(b"Data")) @@ -119,7 +121,7 @@ async def test_send_partition_async(connstr_receivers): async def test_send_non_ascii_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="0") + sender = client.create_sender(partition_id="0") async with sender: await sender.send(EventData("é,è,à,ù,â,ê,î,ô,û")) await sender.send(EventData(json.dumps({"foo": "漢字"}))) @@ -140,9 +142,9 @@ def batched(): yield EventData("Event number {}".format(i)) client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="1") + sender = client.create_sender(partition_id="1") async with sender: - await sender.send_batch(batched()) + await sender.send(batched()) partition_0 = receivers[0].receive(timeout=2) assert len(partition_0) == 0 @@ -172,8 +174,8 @@ async def test_send_array_async(connstr_receivers): async def test_send_multiple_clients_async(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender_0 = client.create_sender(partition="0") - sender_1 = client.create_sender(partition="1") + sender_0 = client.create_sender(partition_id="0") + sender_1 = client.create_sender(partition_id="1") async with sender_0 and sender_1: await sender_0.send(EventData(b"Message 0")) await sender_1.send(EventData(b"Message 1")) @@ -187,7 +189,6 @@ async def test_send_multiple_clients_async(connstr_receivers): @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_batch_with_app_prop_async(connstr_receivers): - # pytest.skip("Waiting on uAMQP release") connection_str, receivers = connstr_receivers app_prop_key = "raw_prop" app_prop_value = "raw_value" @@ -197,16 +198,16 @@ def batched(): for i in range(10): ed = EventData("Event number {}".format(i)) ed.application_properties = app_prop - yield "Event number {}".format(i) + yield ed for i in range(10, 20): ed = EventData("Event number {}".format(i)) ed.application_properties = app_prop - yield "Event number {}".format(i) + yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() async with sender: - await sender.send_batch(batched()) + await sender.send(batched()) time.sleep(1) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py index ce3db34940e8..ce5fa973a069 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py @@ -8,19 +8,17 @@ import pytest import time -from azure import eventhub -from azure.eventhub import EventData, EventHubClient, Offset +from azure.eventhub import EventData, EventHubClient @pytest.mark.liveTest def test_iothub_receive_sync(iot_connection_str, device_id): client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) - receiver = client.add_receiver("$default", "0", operation='/messages/events') + receiver = client.create_receiver(partition_id="0", operation='/messages/events') try: - client.run() - partitions = client.get_eventhub_info() + partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1", "2", "3"] received = receiver.receive(timeout=5) assert len(received) == 0 finally: - client.stop() \ No newline at end of file + receiver.close() diff --git a/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py b/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py index 96d4adaa4cf1..b9ef0a778a33 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py @@ -11,7 +11,6 @@ from uamqp.message import MessageProperties -from azure import eventhub from azure.eventhub import EventData, EventHubClient @@ -20,10 +19,9 @@ def test_iothub_send_single_event(iot_connection_str, device_id): client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) sender = client.add_sender(operation='/messages/devicebound') try: - client.run() outcome = sender.send(EventData(b"A single event", to_device=device_id)) assert outcome.value == 0 except: raise finally: - client.stop() + sender.close() diff --git a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py index 1afbd9c05103..1c4c22ed9257 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py @@ -18,7 +18,7 @@ from logging.handlers import RotatingFileHandler -from azure.eventhub import Offset +from azure.eventhub import EventPosition from azure.eventhub import EventHubClient def get_logger(filename, level=logging.INFO): @@ -47,7 +47,7 @@ def get_logger(filename, level=logging.INFO): def get_partitions(args): - eh_data = args.get_eventhub_info() + eh_data = args.get_properties() return eh_data["partition_ids"] @@ -117,15 +117,14 @@ def test_long_running_receive(connection_str): partitions = args.partitions.split(",") pumps = {} for pid in partitions: - pumps[pid] = client.add_receiver( - consumer_group=args.consumer, - partition=pid, - offset=Offset(args.offset), + pumps[pid] = client.create_receiver( + partition_id=pid, + event_position=EventPosition(args.offset), prefetch=50) - client.run() pump(pumps, args.duration) finally: - client.stop() + for pid in partitions: + pumps[pid].close() if __name__ == '__main__': diff --git a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_send.py b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_send.py index 31744d8550dd..90c6d0dc3cf9 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_send.py @@ -51,8 +51,7 @@ def check_send_successful(outcome, condition): def main(client, args): - sender = client.add_sender() - client.run() + sender = client.create_sender() deadline = time.time() + args.duration total = 0 @@ -70,16 +69,16 @@ def data_generator(): if args.batch > 1: data = EventData(batch=data_generator()) else: - data = EventData(body=b"D" * args.payload) - sender.transfer(data, callback=check_send_successful) + data = EventData(batch=b"D" * args.payload) + sender.queue_message(data, callback=check_send_successful) total += args.batch if total % 10000 == 0: - sender.wait() - print("Send total {}".format(total)) + sender.send_pending_messages() + print("Send total {}".format(total)) except Exception as err: print("Send failed {}".format(err)) finally: - client.stop() + sender.close() print("Sent total {}".format(total)) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_negative.py b/sdk/eventhub/azure-eventhubs/tests/test_negative.py index 28fd7493ef13..d0290bd88e4a 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_negative.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_negative.py @@ -9,10 +9,9 @@ import time import sys -from azure import eventhub from azure.eventhub import ( EventData, - Offset, + EventPosition, EventHubError, EventHubClient) @@ -21,81 +20,80 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_hostname, debug=False) - sender = client.add_sender() + sender = client.create_sender() with pytest.raises(EventHubError): - client.run() + sender._open() @pytest.mark.liveTest def test_receive_with_invalid_hostname_sync(invalid_hostname): client = EventHubClient.from_connection_string(invalid_hostname, debug=True) - receiver = client.add_receiver("$default", "0") + receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - client.run() + receiver._open() @pytest.mark.liveTest def test_send_with_invalid_key(invalid_key, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_key, debug=False) - sender = client.add_sender() + sender = client.create_sender() with pytest.raises(EventHubError): - client.run() + sender._open() @pytest.mark.liveTest def test_receive_with_invalid_key_sync(invalid_key): client = EventHubClient.from_connection_string(invalid_key, debug=True) - receiver = client.add_receiver("$default", "0") + receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - client.run() + receiver._open() @pytest.mark.liveTest def test_send_with_invalid_policy(invalid_policy, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_policy, debug=False) - sender = client.add_sender() + sender = client.create_sender() with pytest.raises(EventHubError): - client.run() + sender._open() @pytest.mark.liveTest def test_receive_with_invalid_policy_sync(invalid_policy): client = EventHubClient.from_connection_string(invalid_policy, debug=True) - receiver = client.add_receiver("$default", "0") + receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - client.run() + receiver._open() @pytest.mark.liveTest def test_send_partition_key_with_partition_sync(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=True) - sender = client.add_sender(partition="1") + sender = client.create_sender(partition_id="1") try: - client.run() data = EventData(b"Data") data.partition_key = b"PKey" with pytest.raises(ValueError): sender.send(data) finally: - client.stop() + sender.close() @pytest.mark.liveTest def test_non_existing_entity_sender(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) - sender = client.add_sender(partition="1") + sender = client.create_sender(partition_id="1") with pytest.raises(EventHubError): - client.run() + sender._open() @pytest.mark.liveTest def test_non_existing_entity_receiver(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) - receiver = client.add_receiver("$default", "0") + receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): - client.run() + receiver._open() @pytest.mark.liveTest @@ -103,13 +101,12 @@ def test_receive_from_invalid_partitions_sync(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: client = EventHubClient.from_connection_string(connection_str, debug=True) - receiver = client.add_receiver("$default", p) + receiver = client.create_receiver(partition_id=p) try: with pytest.raises(EventHubError): - client.run() receiver.receive(timeout=10) finally: - client.stop() + receiver.close() @pytest.mark.liveTest @@ -117,12 +114,12 @@ def test_send_to_invalid_partitions(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender(partition=p) + sender = client.create_sender(partition_id=p) try: with pytest.raises(EventHubError): - client.run() + sender._open() finally: - client.stop() + sender.close() @pytest.mark.liveTest @@ -130,38 +127,34 @@ def test_send_too_large_message(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") client = EventHubClient.from_connection_string(connection_str, debug=True) - sender = client.add_sender() + sender = client.create_sender() try: - client.run() data = EventData(b"A" * 300000) with pytest.raises(EventHubError): sender.send(data) finally: - client.stop() + sender.close() @pytest.mark.liveTest def test_send_null_body(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.add_sender() + sender = client.create_sender() try: - client.run() with pytest.raises(ValueError): data = EventData(None) sender.send(data) finally: - client.stop() + sender.close() @pytest.mark.liveTest def test_message_body_types(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.add_receiver("$default", "0", offset=Offset('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) try: - client.run() - received = receiver.receive(timeout=5) assert len(received) == 0 senders[0].send(EventData(b"Bytes Data")) @@ -207,4 +200,4 @@ def test_message_body_types(connstr_senders): except: raise finally: - client.stop() \ No newline at end of file + receiver.close() diff --git a/sdk/eventhub/azure-eventhubs/tests/test_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_receive.py index 277d07b856b7..93bb3b01d75c 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_receive.py @@ -37,7 +37,7 @@ def test_receive_end_of_stream(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -53,11 +53,11 @@ def test_receive_end_of_stream(connstr_senders): def test_receive_with_offset_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - partitions = client.get_eventhub_information() + partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: - more_partitions = client.get_eventhub_information() + more_partitions = client.get_properties() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) @@ -70,7 +70,7 @@ def test_receive_with_offset_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", event_position=offset) + offset_receiver = client.create_receiver(partition_id="0", event_position=offset) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -83,7 +83,7 @@ def test_receive_with_offset_sync(connstr_senders): def test_receive_with_inclusive_offset(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -97,7 +97,7 @@ def test_receive_with_inclusive_offset(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset.value, inclusive=True)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset.value, inclusive=True)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -107,11 +107,11 @@ def test_receive_with_inclusive_offset(connstr_senders): def test_receive_with_datetime_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - partitions = client.get_eventhub_information() + partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1"] - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: - more_partitions = client.get_eventhub_information() + more_partitions = client.get_properties() assert more_partitions["partition_ids"] == ["0", "1"] received = receiver.receive(timeout=5) assert len(received) == 0 @@ -123,7 +123,7 @@ def test_receive_with_datetime_sync(connstr_senders): assert list(received[0].body) == [b'Data'] assert received[0].body_as_str() == "Data" - offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -145,7 +145,7 @@ def test_receive_with_custom_datetime_sync(connstr_senders): for i in range(5): senders[0].send(EventData(b"Message after timestamp")) - receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset)) with receiver: all_received = [] received = receiver.receive(timeout=1) @@ -161,9 +161,11 @@ def test_receive_with_custom_datetime_sync(connstr_senders): @pytest.mark.liveTest def test_receive_with_sequence_no(connstr_senders): + # TODO: liveTest fail when just one event data is sent connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) + with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -173,7 +175,7 @@ def test_receive_with_sequence_no(connstr_senders): assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset, False)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 0 @@ -182,12 +184,11 @@ def test_receive_with_sequence_no(connstr_senders): received = offset_receiver.receive(timeout=5) assert len(received) == 1 - @pytest.mark.liveTest def test_receive_with_inclusive_sequence_no(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -195,7 +196,7 @@ def test_receive_with_inclusive_sequence_no(connstr_senders): received = receiver.receive(timeout=5) assert len(received) == 1 offset = received[0].sequence_number - offset_receiver = client.create_receiver("$default", "0", event_position=EventPosition(offset, inclusive=True)) + offset_receiver = client.create_receiver(partition_id="0", event_position=EventPosition(offset, inclusive=True)) with offset_receiver: received = offset_receiver.receive(timeout=5) assert len(received) == 1 @@ -205,7 +206,7 @@ def test_receive_with_inclusive_sequence_no(connstr_senders): def test_receive_batch(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 @@ -234,12 +235,12 @@ def batched(): yield ed client = EventHubClient.from_connection_string(connection_str, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) assert len(received) == 0 - senders[0].send_batch(batched()) + senders[0].send(batched()) time.sleep(1) @@ -256,7 +257,7 @@ def batched(): def test_receive_over_websocket_sync(connstr_senders): connection_str, senders = connstr_senders client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) - receiver = client.create_receiver("$default", "0", prefetch=500, event_position=EventPosition('@latest')) + receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) event_list = [] for i in range(20): diff --git a/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py b/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py index d44fb77106bb..0f57daf7aba0 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py @@ -8,10 +8,9 @@ import time import pytest -from azure import eventhub from azure.eventhub import ( EventData, - Offset, + EventPosition, EventHubError, EventHubClient) @@ -20,15 +19,12 @@ def test_send_with_long_interval_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=True) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: sender.send(EventData(b"A single event")) for _ in range(2): time.sleep(300) sender.send(EventData(b"A single event")) - finally: - client.stop() received = [] for r in receivers: @@ -42,9 +38,8 @@ def test_send_with_long_interval_sync(connstr_receivers): def test_send_with_forced_conn_close_sync(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=True) - sender = client.add_sender() - try: - client.run() + sender = client.create_sender() + with sender: sender.send(EventData(b"A single event")) sender._handler._message_sender.destroy() time.sleep(300) @@ -54,29 +49,9 @@ def test_send_with_forced_conn_close_sync(connstr_receivers): time.sleep(300) sender.send(EventData(b"A single event")) sender.send(EventData(b"A single event")) - finally: - client.stop() received = [] for r in receivers: received.extend(r.receive(timeout=1)) assert len(received) == 5 assert list(received[0].body)[0] == b"A single event" - - -# def test_send_with_forced_link_detach(connstr_receivers): -# connection_str, receivers = connstr_receivers -# client = EventHubClient.from_connection_string(connection_str, debug=True) -# sender = client.add_sender() -# size = 20 * 1024 -# try: -# client.run() -# for i in range(1000): -# sender.transfer(EventData([b"A"*size, b"B"*size, b"C"*size, b"D"*size, b"A"*size, b"B"*size, b"C"*size, b"D"*size, b"A"*size, b"B"*size, b"C"*size, b"D"*size])) -# sender.wait() -# finally: -# client.stop() - -# received = [] -# for r in receivers: -# received.extend(r.receive(timeout=10)) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_send.py b/sdk/eventhub/azure-eventhubs/tests/test_send.py index 728d21202aba..3831294e3c04 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_send.py @@ -93,6 +93,7 @@ def test_send_single_event(connstr_receivers): @pytest.mark.liveTest def test_send_batch_sync(connstr_receivers): connection_str, receivers = connstr_receivers + def batched(): for i in range(10): yield EventData("Event number {}".format(i)) @@ -100,7 +101,7 @@ def batched(): client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() with sender: - sender.send_batch(batched()) + sender.send(batched()) time.sleep(1) received = [] @@ -116,7 +117,7 @@ def batched(): def test_send_partition(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="1") + sender = client.create_sender(partition_id="1") with sender: sender.send(EventData(b"Data")) @@ -130,7 +131,7 @@ def test_send_partition(connstr_receivers): def test_send_non_ascii(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="0") + sender = client.create_sender(partition_id="0") with sender: sender.send(EventData(u"é,è,à,ù,â,ê,î,ô,û")) sender.send(EventData(json.dumps({"foo": u"漢字"}))) @@ -144,14 +145,15 @@ def test_send_non_ascii(connstr_receivers): @pytest.mark.liveTest def test_send_partition_batch(connstr_receivers): connection_str, receivers = connstr_receivers + def batched(): for i in range(10): yield EventData("Event number {}".format(i)) client = EventHubClient.from_connection_string(connection_str, debug=False) - sender = client.create_sender(partition="1") + sender = client.create_sender(partition_id="1") with sender: - sender.send_batch(batched()) + sender.send(batched()) time.sleep(1) partition_0 = receivers[0].receive(timeout=2) @@ -180,8 +182,8 @@ def test_send_array_sync(connstr_receivers): def test_send_multiple_clients(connstr_receivers): connection_str, receivers = connstr_receivers client = EventHubClient.from_connection_string(connection_str, debug=False) - sender_0 = client.create_sender(partition="0") - sender_1 = client.create_sender(partition="1") + sender_0 = client.create_sender(partition_id="0") + sender_1 = client.create_sender(partition_id="1") with sender_0: sender_0.send(EventData(b"Message 0")) with sender_1: @@ -195,7 +197,6 @@ def test_send_multiple_clients(connstr_receivers): @pytest.mark.liveTest def test_send_batch_with_app_prop_sync(connstr_receivers): - #pytest.skip("Waiting on uAMQP release") connection_str, receivers = connstr_receivers app_prop_key = "raw_prop" app_prop_value = "raw_value" @@ -214,8 +215,10 @@ def batched(): client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() with sender: - sender.send_batch(batched()) + sender.send(batched()) + time.sleep(1) + received = [] for r in receivers: received.extend(r.receive(timeout=3)) From d26c967442f604c4d937e367f52cefaa050c1f17 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 11:51:06 -0700 Subject: [PATCH 31/49] Add eh error classes --- .../azure/eventhub/__init__.py | 6 +- .../azure/eventhub/aio/receiver_async.py | 115 +++++++++------ .../azure/eventhub/aio/sender_async.py | 123 ++++++---------- .../azure-eventhubs/azure/eventhub/client.py | 3 +- .../azure-eventhubs/azure/eventhub/common.py | 83 ----------- .../azure-eventhubs/azure/eventhub/error.py | 103 ++++++++++++++ .../azure/eventhub/receiver.py | 124 +++++++++++------ .../azure-eventhubs/azure/eventhub/sender.py | 131 ++++++------------ 8 files changed, 345 insertions(+), 343 deletions(-) create mode 100644 sdk/eventhub/azure-eventhubs/azure/eventhub/error.py diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index 56a1ff935c0c..3c13e70013e5 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -5,7 +5,8 @@ __version__ = "1.3.1" -from azure.eventhub.common import EventData, EventHubError, EventPosition +from azure.eventhub.common import EventData, EventPosition +from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, EventHubMessageError from azure.eventhub.client import EventHubClient from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver @@ -17,6 +18,9 @@ "__version__", "EventData", "EventHubError", + "EventHubConnectionError", + "EventHubMessageError", + "EventHubAuthenticationError", "EventPosition", "EventHubClient", "Sender", diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 72673c8658cc..2bbb6ae0aff9 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -11,7 +11,7 @@ from uamqp import ReceiveClientAsync, Source from azure.eventhub import EventHubError, EventData -from azure.eventhub.common import _error_handler +from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, _error_handler log = logging.getLogger(__name__) @@ -91,46 +91,48 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close(exc_val) def __aiter__(self): - self.messages_iter = self._handler.receive_messages_iter_async() return self async def __anext__(self): - if not self.running: - await self.__open() - try: - message = await self.messages_iter.__anext__() - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") - await self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") + await self._open() + while True: + try: + if not self.iter_started: + self.messages_iter = self._handler.receive_messages_iter_async() + self.iter_started = True + message = await self.messages_iter.__anext__() + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except StopAsyncIteration: - raise - except asyncio.CancelledError: - # TODO: stop self.message_iter - raise - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - await self.close(exception=error) - raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except StopAsyncIteration: + raise + except asyncio.CancelledError: + # TODO: stop self.message_iter + raise + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close(exception=error) + raise error async def _open(self): """ @@ -171,10 +173,37 @@ async def _open(self): client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) - await self._handler.open_async() - self.running = True - while not await self._handler.client_ready_async(): - await asyncio.sleep(0.05) + + if not self.running: + try: + await self._handler.open_async() + self.running = True + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) + except errors.AuthenticationException: + log.info("Receiver failed authentication. Retrying...") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("Receiver detached. Failed to connect") + error = EventHubConnectionError(str(shutdown), shutdown) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("Receiver couldn't authenticate.", shutdown) + error = EventHubAuthenticationError(str(shutdown)) + raise error + else: + log.info("Receiver connection error (%r).", shutdown) + error = EventHubConnectionError(str(shutdown)) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r)", e) + error = EventHubError("Receiver connect failed: {}".format(e)) + raise error async def _reconnect(self): # pylint: disable=too-many-statements # pylint: disable=protected-access @@ -197,6 +226,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) + self.iter_started = False try: await self._handler.open_async() while not await self._handler.client_ready_async(): @@ -301,8 +331,7 @@ async def receive(self, max_batch_size=None, timeout=None): """ if self.error: raise self.error - if not self.running: - await self._open() + await self._open() data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 7effcaf63b13..859014266e3c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -12,7 +12,9 @@ from azure.eventhub import MessageSendResult from azure.eventhub import EventHubError -from azure.eventhub.common import _error_handler, EventData, _BatchSendEventData +from azure.eventhub.common import EventData, _BatchSendEventData +from azure.eventhub.error import EventHubError, EventHubConnectionError, \ + EventHubAuthenticationError, EventHubMessageError, _error_handler log = logging.getLogger(__name__) @@ -120,10 +122,36 @@ async def _open(self): client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) - await self._handler.open_async() - self.running = True - while not await self._handler.client_ready_async(): - await asyncio.sleep(0.05) + if not self.running: + try: + await self._handler.open_async() + self.running = True + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) + except errors.AuthenticationException: + log.info("Sender failed authentication. Retrying...") + await self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Sender detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("Sender detached. Failed to connect") + error = EventHubConnectionError(str(shutdown), shutdown) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("Sender couldn't authenticate.", shutdown) + error = EventHubAuthenticationError(str(shutdown)) + raise error + else: + log.info("Sender connection error (%r).", shutdown) + error = EventHubConnectionError(str(shutdown)) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r)", e) + error = EventHubError("Sender connect failed: {}".format(e)) + raise error async def _reconnect(self): await self._handler.close_async() @@ -145,7 +173,7 @@ async def _reconnect(self): return True except errors.TokenExpired as shutdown: log.info("AsyncSender disconnected due to token expiry. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubAuthenticationError(str(shutdown), shutdown) await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -153,7 +181,7 @@ async def _reconnect(self): log.info("AsyncSender detached. Attempting reconnect.") return False log.info("AsyncSender reconnect failed. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -161,7 +189,7 @@ async def _reconnect(self): log.info("AsyncSender detached. Attempting reconnect.") return False log.info("AsyncSender reconnect failed. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -169,7 +197,7 @@ async def _reconnect(self): log.info("AsyncSender couldn't authenticate. Attempting reconnect.") return False log.info("AsyncSender connection error (%r). Shutting down.", shutdown) - error = EventHubError(str(shutdown)) + error = EventHubConnectionError(str(shutdown)) await self.close(exception=error) raise error except Exception as e: @@ -219,14 +247,13 @@ async def close(self, exception=None): await self._handler.close_async() async def _send_event_data(self, event_data): - if not self.running: - await self._open() + await self._open() try: self._handler.send_message(event_data.message) if self._outcome != MessageSendResult.Ok: raise Sender._error(self._outcome, self._condition) except errors.MessageException as failed: - error = EventHubError(str(failed), failed) + error = EventHubMessageError(str(failed), failed) await self.close(exception=error) raise error except (errors.TokenExpired, errors.AuthenticationException): @@ -238,7 +265,7 @@ async def _send_event_data(self, event_data): await self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -247,7 +274,7 @@ async def _send_event_data(self, event_data): await self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except Exception as e: @@ -266,10 +293,10 @@ def _verify_partition(event_datas): partition_key = ed.partition_key yield ed except StopIteration: - raise ValueError("batch_event_data must not be empty") + raise ValueError("event_data must not be empty") for ed in ed_iter: if ed.partition_key != partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") + log.warning("partition key of all EventData must be the same if being sent in a batch") yield ed async def send(self, event_data): @@ -302,70 +329,6 @@ async def send(self, event_data): wrapper_event_data.message.on_send_complete = self._on_outcome await self._send_event_data(wrapper_event_data) - def queue_message(self, event_data, callback=None): - """ - Transfers an event data and notifies the callback when the operation is done. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :param callback: Callback to be run once the message has been send. - This must be a function that accepts two arguments. - :type callback: callable[~uamqp.constants.MessageSendResult, ~azure.eventhub.common.EventHubError] - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_transfer] - :end-before: [END eventhub_client_transfer] - :language: python - :dedent: 4 - :caption: Transfers an event data and notifies the callback when the operation is done. - - """ - if self.error: - raise self.error - if not self.running: - self._open() - if event_data.partition_key and self.partition: - raise ValueError("EventData partition key cannot be used with a partition sender.") - if callback: - event_data.message.on_send_complete = lambda o, c: callback(o, Sender._error(o, c)) - self._handler.queue_message(event_data.message) - - async def send_pending_messages(self): - """ - Wait until all transferred events have been sent. - """ - if self.error: - raise self.error - if not self.running: - raise ValueError("Unable to send until client has been started.") - try: - await self._handler.wait_async() - except (errors.TokenExpired, errors.AuthenticationException): - log.info("AsyncSender disconnected due to token error. Attempting reconnect.") - await self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect() - else: - log.info("AsyncSender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("AsyncSender detached. Attempting reconnect.") - await self.reconnect() - else: - log.info("AsyncSender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r).", e) - raise EventHubError("Send failed: {}".format(e)) - def _on_outcome(self, outcome, condition): """ Called when the outcome is received for a delivery. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 245c0331d891..79a8378ddd93 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -24,7 +24,8 @@ from azure.eventhub import __version__ from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver -from azure.eventhub.common import EventHubError, parse_sas_token +from azure.eventhub.common import parse_sas_token +from azure.eventhub.error import EventHubError from .client_abstract import EventHubClientAbstract from .common import SASTokenCredentials, SharedKeyCredentials diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index a6f2b137c439..cc8be6f45c11 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -16,37 +16,6 @@ from uamqp import types, constants, errors from uamqp.message import MessageHeader, MessageProperties -_NO_RETRY_ERRORS = ( - b"com.microsoft:argument-out-of-range", - b"com.microsoft:entity-disabled", - b"com.microsoft:auth-failed", - b"com.microsoft:precondition-failed", - b"com.microsoft:argument-error" -) - -def _error_handler(error): - """ - Called internally when an event has failed to send so we - can parse the error to determine whether we should attempt - to retry sending the event again. - Returns the action to take according to error type. - - :param error: The error received in the send attempt. - :type error: Exception - :rtype: ~uamqp.errors.ErrorAction - """ - if error.condition == b'com.microsoft:server-busy': - return errors.ErrorAction(retry=True, backoff=4) - if error.condition == b'com.microsoft:timeout': - return errors.ErrorAction(retry=True, backoff=2) - if error.condition == b'com.microsoft:operation-cancelled': - return errors.ErrorAction(retry=True) - if error.condition == b"com.microsoft:container-close": - return errors.ErrorAction(retry=True, backoff=4) - if error.condition in _NO_RETRY_ERRORS: - return errors.ErrorAction(retry=False) - return errors.ErrorAction(retry=True) - def parse_sas_token(sas_token): """Parse a SAS token into its components. @@ -362,58 +331,6 @@ def from_enqueued_time(enqueued_time, inclusive=False): NEW_EVENTS_ONLY = EventPosition("@latest") -class EventHubError(Exception): - """ - Represents an error happened in the client. - - :ivar message: The error message. - :vartype message: str - :ivar error: The error condition, if available. - :vartype error: str - :ivar details: The error details, if included in the - service response. - :vartype details: dict[str, str] - """ - - def __init__(self, message, details=None): - self.error = None - self.message = message - self.details = details - if isinstance(message, constants.MessageSendResult): - self.message = "Message send failed with result: {}".format(message) - if details and isinstance(details, Exception): - try: - condition = details.condition.value.decode('UTF-8') - except AttributeError: - condition = details.condition.decode('UTF-8') - _, _, self.error = condition.partition(':') - self.message += "\nError: {}".format(self.error) - try: - self._parse_error(details.description) - for detail in self.details: - self.message += "\n{}".format(detail) - except: # pylint: disable=bare-except - self.message += "\n{}".format(details) - super(EventHubError, self).__init__(self.message) - - def _parse_error(self, error_list): - details = [] - self.message = error_list if isinstance(error_list, six.text_type) else error_list.decode('UTF-8') - details_index = self.message.find(" Reference:") - if details_index >= 0: - details_msg = self.message[details_index + 1:] - self.message = self.message[0:details_index] - - tracking_index = details_msg.index(", TrackingId:") - system_index = details_msg.index(", SystemTracker:") - timestamp_index = details_msg.index(", Timestamp:") - details.append(details_msg[:tracking_index]) - details.append(details_msg[tracking_index + 2: system_index]) - details.append(details_msg[system_index + 2: timestamp_index]) - details.append(details_msg[timestamp_index + 2:]) - self.details = details - - # TODO: move some behaviors to these two classes. class SASTokenCredentials(object): def __init__(self, token): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py new file mode 100644 index 000000000000..441ebf02d3a2 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py @@ -0,0 +1,103 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from uamqp import types, constants, errors +import six + +_NO_RETRY_ERRORS = ( + b"com.microsoft:argument-out-of-range", + b"com.microsoft:entity-disabled", + b"com.microsoft:auth-failed", + b"com.microsoft:precondition-failed", + b"com.microsoft:argument-error" +) + +def _error_handler(error): + """ + Called internally when an event has failed to send so we + can parse the error to determine whether we should attempt + to retry sending the event again. + Returns the action to take according to error type. + + :param error: The error received in the send attempt. + :type error: Exception + :rtype: ~uamqp.errors.ErrorAction + """ + if error.condition == b'com.microsoft:server-busy': + return errors.ErrorAction(retry=True, backoff=4) + if error.condition == b'com.microsoft:timeout': + return errors.ErrorAction(retry=True, backoff=2) + if error.condition == b'com.microsoft:operation-cancelled': + return errors.ErrorAction(retry=True) + if error.condition == b"com.microsoft:container-close": + return errors.ErrorAction(retry=True, backoff=4) + if error.condition in _NO_RETRY_ERRORS: + return errors.ErrorAction(retry=False) + return errors.ErrorAction(retry=True) + + +class EventHubError(Exception): + """ + Represents an error happened in the client. + + :ivar message: The error message. + :vartype message: str + :ivar error: The error condition, if available. + :vartype error: str + :ivar details: The error details, if included in the + service response. + :vartype details: dict[str, str] + """ + + def __init__(self, message, details=None): + self.error = None + self.message = message + self.details = details + if isinstance(message, constants.MessageSendResult): + self.message = "Message send failed with result: {}".format(message) + if details and isinstance(details, Exception): + try: + condition = details.condition.value.decode('UTF-8') + except AttributeError: + condition = details.condition.decode('UTF-8') + _, _, self.error = condition.partition(':') + self.message += "\nError: {}".format(self.error) + try: + self._parse_error(details.description) + for detail in self.details: + self.message += "\n{}".format(detail) + except: # pylint: disable=bare-except + self.message += "\n{}".format(details) + super(EventHubError, self).__init__(self.message) + + def _parse_error(self, error_list): + details = [] + self.message = error_list if isinstance(error_list, six.text_type) else error_list.decode('UTF-8') + details_index = self.message.find(" Reference:") + if details_index >= 0: + details_msg = self.message[details_index + 1:] + self.message = self.message[0:details_index] + + tracking_index = details_msg.index(", TrackingId:") + system_index = details_msg.index(", SystemTracker:") + timestamp_index = details_msg.index(", Timestamp:") + details.append(details_msg[:tracking_index]) + details.append(details_msg[tracking_index + 2: system_index]) + details.append(details_msg[system_index + 2: timestamp_index]) + details.append(details_msg[timestamp_index + 2:]) + self.details = details + + +class EventHubAuthenticationError(EventHubError): + pass + + +class EventHubConnectionError(EventHubError): + pass + + +class EventHubMessageError(EventHubError): + pass + diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 4a06432e0fa0..b061615f0c61 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -11,7 +11,8 @@ from uamqp import types, errors from uamqp import ReceiveClient, Source -from azure.eventhub.common import EventHubError, EventData, _error_handler +from azure.eventhub.common import EventData +from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, _error_handler log = logging.getLogger(__name__) @@ -51,6 +52,7 @@ def __init__(self, client, source, event_position=None, prefetch=300, exclusive_ self.client = client self.source = source self.offset = event_position + self.iter_started = False self.prefetch = prefetch self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive @@ -86,43 +88,45 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.close(exc_val) def __iter__(self): - if not self.running: - self._open() - self.messages_iter = self._handler.receive_messages_iter() return self def __next__(self): - try: - message = next(self.messages_iter) - event_data = EventData(message=message) - self.offset = event_data.offset - return event_data - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") - self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") + self._open() + while True: + try: + if not self.iter_started: + self.messages_iter = self._handler.receive_messages_iter() + self.iter_started = True + message = next(self.messages_iter) + event_data = EventData(message=message) + self.offset = event_data.offset + return event_data + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except StopIteration: - raise - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - self.close(exception=error) - raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except StopIteration: + raise + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + self.close(exception=error) + raise error def _open(self): """ @@ -162,10 +166,37 @@ def _open(self): keep_alive_interval=self.keep_alive, client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) - self._handler.open() - self.running = True - while not self._handler.client_ready(): - time.sleep(0.05) + if not self.running: + try: + self._handler.open() + self.running = True + while not self._handler.client_ready(): + time.sleep(0.05) + + except errors.AuthenticationException: + log.info("Receiver failed authentication. Retrying...") + self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + else: + log.info("Receiver detached. Failed to connect") + error = EventHubConnectionError(str(shutdown), shutdown) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("Receiver couldn't authenticate.", shutdown) + error = EventHubAuthenticationError(str(shutdown)) + raise error + else: + log.info("Receiver connection error (%r).", shutdown) + error = EventHubConnectionError(str(shutdown)) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r)", e) + error = EventHubError("Receiver connect failed: {}".format(e)) + raise error def _reconnect(self): # pylint: disable=too-many-statements # pylint: disable=protected-access @@ -187,6 +218,7 @@ def _reconnect(self): # pylint: disable=too-many-statements keep_alive_interval=self.keep_alive, client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) + self.iter_started = False try: self._handler.open() while not self._handler.client_ready(): @@ -194,7 +226,7 @@ def _reconnect(self): # pylint: disable=too-many-statements return True except errors.TokenExpired as shutdown: log.info("Receiver disconnected due to token expiry. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubAuthenticationError(str(shutdown), shutdown) self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -202,7 +234,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver detached. Attempting reconnect.") return False log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -210,7 +242,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver detached. Attempting reconnect.") return False log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -218,7 +250,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver couldn't authenticate. Attempting reconnect.") return False log.info("Receiver connection error (%r). Shutting down.", shutdown) - error = EventHubError(str(shutdown)) + error = EventHubConnectionError(str(shutdown)) self.close(exception=error) raise error except Exception as e: @@ -300,8 +332,8 @@ def receive(self, max_batch_size=None, timeout=None): """ if self.error: raise self.error - if not self.running: - self._open() + self._open() + data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 @@ -323,7 +355,7 @@ def receive(self, max_batch_size=None, timeout=None): self.reconnect() return data_batch log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -332,7 +364,7 @@ def receive(self, max_batch_size=None, timeout=None): self.reconnect() return data_batch log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 8728abb8afa1..77546cb1dc1d 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -12,7 +12,9 @@ from uamqp import SendClient from uamqp.constants import MessageSendResult -from azure.eventhub.common import EventHubError, EventData, _BatchSendEventData, _error_handler +from azure.eventhub.common import EventData, _BatchSendEventData +from azure.eventhub.error import EventHubError, EventHubConnectionError, \ + EventHubAuthenticationError, EventHubMessageError, _error_handler log = logging.getLogger(__name__) @@ -114,10 +116,36 @@ def _open(self): keep_alive_interval=self.keep_alive, client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) - self._handler.open() - self.running = True - while not self._handler.client_ready(): - time.sleep(0.05) + if not self.running: + try: + self._handler.open() + self.running = True + while not self._handler.client_ready(): + time.sleep(0.05) + except errors.AuthenticationException: + log.info("Sender failed authentication. Retrying...") + self.reconnect() + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Sender detached. Attempting reconnect.") + self.reconnect() + else: + log.info("Sender detached. Failed to connect") + error = EventHubConnectionError(str(shutdown), shutdown) + raise error + except errors.AMQPConnectionError as shutdown: + if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: + log.info("Sender couldn't authenticate.", shutdown) + error = EventHubAuthenticationError(str(shutdown)) + raise error + else: + log.info("Sender connection error (%r).", shutdown) + error = EventHubConnectionError(str(shutdown)) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r)", e) + error = EventHubError("Sender connect failed: {}".format(e)) + raise error def _reconnect(self): # pylint: disable=protected-access @@ -139,7 +167,7 @@ def _reconnect(self): return True except errors.TokenExpired as shutdown: log.info("Sender disconnected due to token expiry. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubAuthenticationError(str(shutdown), shutdown) self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -147,7 +175,7 @@ def _reconnect(self): log.info("Sender detached. Attempting reconnect.") return False log.info("Sender reconnect failed. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -155,7 +183,7 @@ def _reconnect(self): log.info("Sender detached. Attempting reconnect.") return False log.info("Sender reconnect failed. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -163,7 +191,7 @@ def _reconnect(self): log.info("Sender couldn't authenticate. Attempting reconnect.") return False log.info("Sender connection error (%r). Shutting down.", shutdown) - error = EventHubError(str(shutdown)) + error = EventHubConnectionError(str(shutdown)) self.close(exception=error) raise error except Exception as e: @@ -211,14 +239,14 @@ def close(self, exception=None): self._handler.close() def _send_event_data(self, event_data): - if not self.running: - self._open() + self._open() + try: self._handler.send_message(event_data.message) if self._outcome != MessageSendResult.Ok: raise Sender._error(self._outcome, self._condition) except errors.MessageException as failed: - error = EventHubError(str(failed), failed) + error = EventHubMessageError(str(failed), failed) self.close(exception=error) raise error except (errors.TokenExpired, errors.AuthenticationException): @@ -230,7 +258,7 @@ def _send_event_data(self, event_data): self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -239,7 +267,7 @@ def _send_event_data(self, event_data): self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: @@ -294,81 +322,6 @@ def send(self, event_data): wrapper_event_data.message.on_send_complete = self._on_outcome self._send_event_data(wrapper_event_data) - def queue_message(self, event_data, callback=None): - """ - Transfers an event data and notifies the callback when the operation is done. - - :param event_data: The event to be sent. - :type event_data: ~azure.eventhub.common.EventData - :param callback: Callback to be run once the message has been send. - This must be a function that accepts two arguments. - :type callback: callable[~uamqp.constants.MessageSendResult, ~azure.eventhub.common.EventHubError] - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_transfer] - :end-before: [END eventhub_client_transfer] - :language: python - :dedent: 4 - :caption: Transfers an event data and notifies the callback when the operation is done. - - """ - if self.error: - raise self.error - if not self.running: - self._open() - - if event_data.partition_key and self.partition: - # raise ValueError("EventData partition key cannot be used with a partition sender.") - log.warning("EventData partition key should not be used with a partition sender.") - if callback: - event_data.message.on_send_complete = lambda o, c: callback(o, Sender._error(o, c)) - self._handler.queue_message(event_data.message) - - def send_pending_messages(self): - """ - Wait until all transferred events have been sent. - - Example: - .. literalinclude:: ../examples/test_examples_eventhub.py - :start-after: [START eventhub_client_transfer] - :end-before: [END eventhub_client_transfer] - :language: python - :dedent: 4 - :caption: Wait until all transferred events have been sent. - - """ - if self.error: - raise self.error - if not self.running: - self._open() - try: - self._handler.wait() - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Sender disconnected due to token error. Attempting reconnect.") - self.reconnect() - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Sender detached. Attempting reconnect.") - self.reconnect() - else: - log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Sender detached. Attempting reconnect.") - self.reconnect() - else: - log.info("Sender detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r).", e) - raise EventHubError("Send failed: {}".format(e)) - def _on_outcome(self, outcome, condition): """ Called when the outcome is received for a delivery. From 309dff11f33d6aed7ded19d556ed9680e0f0df2b Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 14:27:51 -0700 Subject: [PATCH 32/49] EventHubError extends AzureError --- sdk/eventhub/azure-eventhubs/azure/eventhub/error.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py index 441ebf02d3a2..a017750dd57e 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py @@ -5,6 +5,7 @@ from uamqp import types, constants, errors import six +from azure.core import AzureError _NO_RETRY_ERRORS = ( b"com.microsoft:argument-out-of-range", @@ -38,7 +39,7 @@ def _error_handler(error): return errors.ErrorAction(retry=True) -class EventHubError(Exception): +class EventHubError(AzureError): """ Represents an error happened in the client. From d5ed5ccdbd6cbd8b11f4cb072663c6f3a2c3c133 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 14:29:26 -0700 Subject: [PATCH 33/49] Fix EventPosition default value issue --- sdk/eventhub/azure-eventhubs/azure/eventhub/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index cc8be6f45c11..1e9244d33e37 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -286,7 +286,7 @@ def __init__(self, value, inclusive=False): :param inclusive: Whether to include the supplied value as the start point. :type inclusive: bool """ - self.value = value if value else "-1" + self.value = value if value is not None else "-1" self.inclusive = inclusive def __str__(self): From ae0ce5fda1b4687a19970ec0b29b5441ee6d1ed3 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 14:30:30 -0700 Subject: [PATCH 34/49] change $default to $Default --- .../azure/eventhub/aio/event_hubs_client_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index c43cb6d4e511..32d817b1d20c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -168,7 +168,7 @@ async def get_partition_properties(self, partition): await mgmt_client.close_async() def create_receiver( - self, partition_id, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition_id, consumer_group="$Default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, loop=None): """ Add an async receiver to the client for a particular consumer group and partition. diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index 79a8378ddd93..cbc6d7ce1dd2 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -184,7 +184,7 @@ def get_partition_properties(self, partition): mgmt_client.close() def create_receiver( - self, partition_id, consumer_group="$default", event_position=None, exclusive_receiver_priority=None, operation=None, + self, partition_id, consumer_group="$Default", event_position=None, exclusive_receiver_priority=None, operation=None, prefetch=None, ): """ From e3f9281e34b1cbde4d3ab800dfd583acde0d361c Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 16:21:10 -0700 Subject: [PATCH 35/49] Handle TokenAuthError --- .../azure/eventhub/aio/receiver_async.py | 2 +- .../azure-eventhubs/azure/eventhub/aio/sender_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/error.py | 10 +++++++--- .../azure-eventhubs/azure/eventhub/receiver.py | 8 ++++---- sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 10 +++++----- 5 files changed, 18 insertions(+), 14 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 2bbb6ae0aff9..e07e2d2c8211 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -232,7 +232,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements while not await self._handler.client_ready_async(): await asyncio.sleep(0.05) return True - except errors.TokenExpired as shutdown: + except errors.AuthenticationException as shutdown: log.info("AsyncReceiver disconnected due to token expiry. Shutting down.") error = EventHubError(str(shutdown), shutdown) await self.close(exception=error) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 859014266e3c..4c6a27d7bbe9 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -171,7 +171,7 @@ async def _reconnect(self): self._handler.queue_message(*unsent_events) await self._handler.wait_async() return True - except errors.TokenExpired as shutdown: + except errors.AuthenticationException as shutdown: log.info("AsyncSender disconnected due to token expiry. Shutting down.") error = EventHubAuthenticationError(str(shutdown), shutdown) await self.close(exception=error) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py index a017750dd57e..b20cc7cd36a0 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py @@ -62,9 +62,13 @@ def __init__(self, message, details=None): try: condition = details.condition.value.decode('UTF-8') except AttributeError: - condition = details.condition.decode('UTF-8') - _, _, self.error = condition.partition(':') - self.message += "\nError: {}".format(self.error) + try: + condition = details.condition.decode('UTF-8') + except AttributeError: + condition = None + if condition: + _, _, self.error = condition.partition(':') + self.message += "\nError: {}".format(self.error) try: self._parse_error(details.description) for detail in self.details: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index b061615f0c61..15b09609ba88 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -187,11 +187,11 @@ def _open(self): except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Receiver couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown)) + error = EventHubAuthenticationError(str(shutdown), shutdown) raise error else: log.info("Receiver connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = EventHubConnectionError(str(shutdown), shutdown) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -224,7 +224,7 @@ def _reconnect(self): # pylint: disable=too-many-statements while not self._handler.client_ready(): time.sleep(0.05) return True - except errors.TokenExpired as shutdown: + except errors.AuthenticationException as shutdown: log.info("Receiver disconnected due to token expiry. Shutting down.") error = EventHubAuthenticationError(str(shutdown), shutdown) self.close(exception=error) @@ -250,7 +250,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver couldn't authenticate. Attempting reconnect.") return False log.info("Receiver connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 77546cb1dc1d..518425567fe7 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -136,11 +136,11 @@ def _open(self): except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Sender couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown)) + error = EventHubAuthenticationError(str(shutdown), shutdown) raise error else: log.info("Sender connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = EventHubConnectionError(str(shutdown), shutdown) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -165,7 +165,7 @@ def _reconnect(self): self._handler.queue_message(*unsent_events) self._handler.wait() return True - except errors.TokenExpired as shutdown: + except errors.AuthenticationException as shutdown: log.info("Sender disconnected due to token expiry. Shutting down.") error = EventHubAuthenticationError(str(shutdown), shutdown) self.close(exception=error) @@ -191,7 +191,7 @@ def _reconnect(self): log.info("Sender couldn't authenticate. Attempting reconnect.") return False log.info("Sender connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = EventHubConnectionError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: @@ -289,7 +289,7 @@ def _verify_partition(event_datas): raise ValueError("batch_event_data must not be empty") for ed in ed_iter: if ed.partition_key != partition_key: - raise ValueError("partition key of all EventData must be the same if being sent in a batch") + log.warning("partition key of all event_data must be the same if being sent in a batch") yield ed def send(self, event_data): From 55e552009fba4a86816711302da90e5abb4233c2 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 16:46:58 -0700 Subject: [PATCH 36/49] wait for ready in _reconnect --- sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py | 2 ++ sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 4c6a27d7bbe9..88e01bfdfd3f 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -168,6 +168,8 @@ async def _reconnect(self): loop=self.loop) try: await self._handler.open_async() + while not await self._handler.client_ready_async(): + await asyncio.sleep(0.05) self._handler.queue_message(*unsent_events) await self._handler.wait_async() return True diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 518425567fe7..7d9800bf4430 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -162,6 +162,8 @@ def _reconnect(self): properties=self.client.create_properties(self.client.config.user_agent)) try: self._handler.open() + while not self._handler.client_ready(): + time.sleep(0.05) self._handler.queue_message(*unsent_events) self._handler.wait() return True From f7e572d477410279e165da52c9e3b24af30a2502 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 20:23:27 -0700 Subject: [PATCH 37/49] fix get_partition_ids issue --- .../azure/eventhub/aio/event_hubs_client_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 32d817b1d20c..c55f5c71e47a 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -117,7 +117,7 @@ async def get_properties(self): await mgmt_client.close_async() async def get_partition_ids(self): - return await self.get_properties()['partition_ids'] + return (await self.get_properties())['partition_ids'] async def get_partition_properties(self, partition): """ From 2cb8e307d12d7f6b08811f84e0ed5afebcf7f1cc Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 20:23:57 -0700 Subject: [PATCH 38/49] Fix reconnect issue --- .../azure/eventhub/aio/receiver_async.py | 104 ++++++++++-------- .../azure/eventhub/aio/sender_async.py | 2 +- .../azure/eventhub/receiver.py | 88 +++++++-------- 3 files changed, 105 insertions(+), 89 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index e07e2d2c8211..8919eb7bc992 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -111,18 +111,20 @@ async def __anext__(self): if shutdown.action.retry and self.auto_reconnect: log.info("Receiver detached. Attempting reconnect.") await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error + else: + log.info("Receiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + await self.close(exception=error) + raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: log.info("Receiver detached. Attempting reconnect.") await self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error + else: + log.info("Receiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + await self.close(exception=error) + raise error except StopAsyncIteration: raise except asyncio.CancelledError: @@ -300,13 +302,25 @@ async def close(self, exception=None): elif isinstance(exception, EventHubError): self.error = exception elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): - self.error = EventHubError(str(exception), exception) + self.error = EventHubConnectionError(str(exception), exception) elif exception: self.error = EventHubError(str(exception)) else: self.error = EventHubError("This receive handler is now closed.") await self._handler.close_async() + @property + def queue_size(self): + """ + The current size of the unprocessed Event queue. + + :rtype: int + """ + # pylint: disable=protected-access + if self._handler._received_messages: + return self._handler._received_messages.qsize() + return 0 + async def receive(self, max_batch_size=None, timeout=None): """ Receive events asynchronously from the EventHub. @@ -332,41 +346,41 @@ async def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error await self._open() - data_batch = [] - try: - timeout_ms = 1000 * timeout if timeout else 0 - message_batch = await self._handler.receive_message_batch_async( - max_batch_size=max_batch_size, - timeout=timeout_ms) - for message in message_batch: - event_data = EventData(message=message) - self.offset = event_data.offset - data_batch.append(event_data) - return data_batch - except (errors.TokenExpired, errors.AuthenticationException): - log.info("AsyncReceiver disconnected due to token error. Attempting reconnect.") - await self.reconnect() - return data_batch - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("AsyncReceiver detached. Attempting reconnect.") - await self.reconnect() + while True: + data_batch = [] + try: + timeout_ms = 1000 * timeout if timeout else 0 + message_batch = await self._handler.receive_message_batch_async( + max_batch_size=max_batch_size, + timeout=timeout_ms) + for message in message_batch: + event_data = EventData(message=message) + self.offset = event_data.offset + data_batch.append(event_data) return data_batch - log.info("AsyncReceiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("AsyncReceiver detached. Attempting reconnect.") + except (errors.TokenExpired, errors.AuthenticationException): + log.info("AsyncReceiver disconnected due to token error. Attempting reconnect.") await self.reconnect() - return data_batch - log.info("AsyncReceiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - await self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - await self.close(exception=error) - raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("AsyncReceiver detached. Attempting reconnect.") + await self.reconnect() + else: + log.info("AsyncReceiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + await self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + await self.close(exception=error) + raise error diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 88e01bfdfd3f..c0b31182a740 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -241,7 +241,7 @@ async def close(self, exception=None): elif isinstance(exception, EventHubError): self.error = exception elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): - self.error = EventHubError(str(exception), exception) + self.error = EventHubConnectionError(str(exception), exception) elif exception: self.error = EventHubError(str(exception)) else: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 15b09609ba88..3a4ab34229e3 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -108,18 +108,20 @@ def __next__(self): if shutdown.action.retry and self.auto_reconnect: log.info("Receiver detached. Attempting reconnect.") self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error + else: + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error except errors.MessageHandlerError as shutdown: if self.auto_reconnect: log.info("Receiver detached. Attempting reconnect.") self.reconnect() - log.info("Receiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) - self.close(exception=error) - raise error + else: + log.info("Receiver detached. Shutting down.") + error = EventHubError(str(shutdown), shutdown) + self.close(exception=error) + raise error except StopIteration: raise except Exception as e: @@ -335,40 +337,40 @@ def receive(self, max_batch_size=None, timeout=None): self._open() data_batch = [] - try: - timeout_ms = 1000 * timeout if timeout else 0 - message_batch = self._handler.receive_message_batch( - max_batch_size=max_batch_size, - timeout=timeout_ms) - for message in message_batch: - event_data = EventData(message=message) - self.offset = event_data.offset - data_batch.append(event_data) - return data_batch - except (errors.TokenExpired, errors.AuthenticationException): - log.info("Receiver disconnected due to token error. Attempting reconnect.") - self.reconnect() - return data_batch - except (errors.LinkDetach, errors.ConnectionClose) as shutdown: - if shutdown.action.retry and self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") - self.reconnect() + while True: + try: + timeout_ms = 1000 * timeout if timeout else 0 + message_batch = self._handler.receive_message_batch( + max_batch_size=max_batch_size, + timeout=timeout_ms) + for message in message_batch: + event_data = EventData(message=message) + self.offset = event_data.offset + data_batch.append(event_data) return data_batch - log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except errors.MessageHandlerError as shutdown: - if self.auto_reconnect: - log.info("Receiver detached. Attempting reconnect.") + except (errors.TokenExpired, errors.AuthenticationException): + log.info("Receiver disconnected due to token error. Attempting reconnect.") self.reconnect() - return data_batch - log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) - self.close(exception=error) - raise error - except Exception as e: - log.info("Unexpected error occurred (%r). Shutting down.", e) - error = EventHubError("Receive failed: {}".format(e)) - self.close(exception=error) - raise error + except (errors.LinkDetach, errors.ConnectionClose) as shutdown: + if shutdown.action.retry and self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + else: + log.info("Receiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except errors.MessageHandlerError as shutdown: + if self.auto_reconnect: + log.info("Receiver detached. Attempting reconnect.") + self.reconnect() + else: + log.info("Receiver detached. Shutting down.") + error = EventHubConnectionError(str(shutdown), shutdown) + self.close(exception=error) + raise error + except Exception as e: + log.info("Unexpected error occurred (%r). Shutting down.", e) + error = EventHubError("Receive failed: {}".format(e)) + self.close(exception=error) + raise error From 3ded0ada3264f1348e7e64db64081a003fb7101f Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 21:36:36 -0700 Subject: [PATCH 39/49] small fix --- .../azure-eventhubs/azure/eventhub/aio/receiver_async.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 8919eb7bc992..95b6455e3088 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -346,8 +346,9 @@ async def receive(self, max_batch_size=None, timeout=None): if self.error: raise self.error await self._open() + + data_batch = [] while True: - data_batch = [] try: timeout_ms = 1000 * timeout if timeout else 0 message_batch = await self._handler.receive_message_batch_async( From d4154749b7420d0772433752fc70eaccc6638c57 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 21:37:30 -0700 Subject: [PATCH 40/49] fix async live test --- sdk/eventhub/azure-eventhubs/conftest.py | 3 ++- .../asynctests/test_longrunning_receive_async.py | 12 +++++++----- .../tests/asynctests/test_longrunning_send_async.py | 11 ++++++----- .../tests/asynctests/test_negative_async.py | 2 +- .../tests/asynctests/test_reconnect_async.py | 10 ++++++---- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/conftest.py b/sdk/eventhub/azure-eventhubs/conftest.py index 3235342620c7..68a211917f4c 100644 --- a/sdk/eventhub/azure-eventhubs/conftest.py +++ b/sdk/eventhub/azure-eventhubs/conftest.py @@ -168,7 +168,8 @@ def connstr_receivers(connection_str): partitions = client.get_partition_ids() receivers = [] for p in partitions: - receiver = client.create_receiver(partition_id=p, prefetch=500, event_position=EventPosition("@latest")) + #receiver = client.create_receiver(partition_id=p, prefetch=500, event_position=EventPosition("@latest")) + receiver = client.create_receiver(partition_id=p, prefetch=500, event_position=EventPosition("-1")) receivers.append(receiver) receiver.receive(timeout=1) yield connection_str, receivers diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py index 6036db2788c5..71ce5c4ee7d5 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py @@ -85,7 +85,8 @@ async def pump(_pid, receiver, _args, _dl): @pytest.mark.liveTest -def test_long_running_receive_async(connection_str): +@pytest.mark.asyncio +async def test_long_running_receive_async(connection_str): parser = argparse.ArgumentParser() parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30) parser.add_argument("--consumer", help="Consumer group name", default="$default") @@ -118,7 +119,7 @@ def test_long_running_receive_async(connection_str): try: if not args.partitions: - partitions = loop.run_until_complete(get_partitions(client)) + partitions = await client.get_partition_ids() else: partitions = args.partitions.split(",") pumps = [] @@ -126,12 +127,13 @@ def test_long_running_receive_async(connection_str): receiver = client.create_receiver( partition_id=pid, event_position=EventPosition(args.offset), - prefetch=50) + prefetch=50, + loop=loop) pumps.append(pump(pid, receiver, args, args.duration)) - loop.run_until_complete(asyncio.gather(*pumps)) + await asyncio.gather(*pumps) finally: pass if __name__ == '__main__': - test_long_running_receive_async(os.environ.get('EVENT_HUB_CONNECTION_STR')) + asyncio.run(test_long_running_receive_async(os.environ.get('EVENT_HUB_CONNECTION_STR'))) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py index ef4817900f27..dd87e5324558 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_send_async.py @@ -84,7 +84,8 @@ def data_generator(): @pytest.mark.liveTest -def test_long_running_partition_send_async(connection_str): +@pytest.mark.asyncio +async def test_long_running_partition_send_async(connection_str): parser = argparse.ArgumentParser() parser.add_argument("--duration", help="Duration in seconds of the test", type=int, default=30) parser.add_argument("--payload", help="payload size", type=int, default=1024) @@ -103,7 +104,7 @@ def test_long_running_partition_send_async(connection_str): if args.conn_str: client = EventHubClient.from_connection_string( args.conn_str, - eventhub=args.eventhub, debug=True) + eventhub=args.eventhub, network_tracing=True) elif args.address: client = EventHubClient( args.address, @@ -119,7 +120,7 @@ def test_long_running_partition_send_async(connection_str): try: if not args.partitions: - partitions = loop.run_until_complete(get_partitions(client)) + partitions = await client.get_partition_ids() else: pid_range = args.partitions.split("-") if len(pid_range) > 1: @@ -130,7 +131,7 @@ def test_long_running_partition_send_async(connection_str): for pid in partitions: sender = client.create_sender(partition_id=pid, send_timeout=0) pumps.append(pump(pid, sender, args, args.duration)) - results = loop.run_until_complete(asyncio.gather(*pumps, return_exceptions=True)) + results = await asyncio.gather(*pumps, return_exceptions=True) assert not results except Exception as e: logger.error("Sender failed: {}".format(e)) @@ -139,4 +140,4 @@ def test_long_running_partition_send_async(connection_str): if __name__ == '__main__': - test_long_running_partition_send_async(os.environ.get('EVENT_HUB_CONNECTION_STR')) + asyncio.run(test_long_running_partition_send_async(os.environ.get('EVENT_HUB_CONNECTION_STR'))) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py index 08a8bb88f7ca..cfdf12bd6282 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py @@ -93,7 +93,7 @@ async def test_non_existing_entity_sender_async(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) sender = client.create_sender(partition_id="1") with pytest.raises(EventHubError): - await await sender._open() + await sender._open() @pytest.mark.liveTest diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py index db030e7981a0..40469bd6e3fc 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py @@ -24,16 +24,18 @@ async def test_send_with_long_interval_async(connstr_receivers): sender = client.create_sender() try: await sender.send(EventData(b"A single event")) - for _ in range(2): - await asyncio.sleep(300) + for _ in range(1): + #await asyncio.sleep(300) + sender._handler._connection._conn.destroy() await sender.send(EventData(b"A single event")) finally: await sender.close() received = [] for r in receivers: - received.extend(r.receive(timeout=1)) - assert len(received) == 3 + r._handler._connection._conn.destroy() + received.extend(r.receive(timeout=1)) + assert len(received) == 2 assert list(received[0].body)[0] == b"A single event" From 5e7d5f79fb12871e4032bf51a9831a83c157605d Mon Sep 17 00:00:00 2001 From: Yunhao Ling <47871814+yunhaoling@users.noreply.github.com> Date: Thu, 30 May 2019 22:42:33 -0700 Subject: [PATCH 41/49] Eventhub track2 Live test update (#8) * Set allowed sasl mechs * Remove client.py * Receiver update * Add dummy send api * logging updates * Error handling, reconnect and logging * Add app properties to event data * unbind transport on connection close * timestamp filter on py2 * module version * Reconnect once when link/session/connection close * Add SessionPolicy * Add client info * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * Updates - Cleaned wireframes to be PEP compliant - Implemented single partition pump and single event_hub partition pump scenario Todo - Add Unit Tests for partition pump and event hub partition pump - Implement Partition Manager - Implement Checkpointing and Lease Managment * run client in non-blocking mode * Added unit testing * Implemented the following functionality - Azure_storage_checkpoint_manager - AzureBlobLease isExpired Todo Implement partition manager Implement partition context Test full implementation * Implemented Processing of First Epoh Todo - Fix lease bug that is breaking subsequent epochs * Changes - Completed End to End EPH Flow - Removed storage dependancy on downloading full blob to check lease state Todo - Add thread and queue for checking lease state and other storage operations - Ensure eventhub client shuts down properly - Find way to update partition pumps without restarting them - Other optimizations * Move examples out * Changes - Added thread pool executor to enable conncurent execution of partitions - Removed partition pump dependency on max_batch Todo - Ensure eventhub client shuts down properly (This is causing errors) - Add thread pool for making checkpoint code conccurent - Add thread and queue for checking lease state and other storage operations to enable async - Find way to reassign active partition pumps without restarting them - Other optimizations * Add async receive * Changes - Added logs - Fixed error causing client to prematurely shutdown * Manual link flow control for async receive * Workaround for stuck async receiver * Local variable names * Changes - Optimized logging and comments Todo - Add concurecny mechanim for azure storage - Depricate partition pump event queue and update to latest version of the client * Create Dockerfile * Stuck async receiver * credit keeps increasing in async receiver * Changes - Added asnyc event hub client support - Optimized logging and comments Todo - Add concurecny mechanim for azure storage * Updated docker file as requested * Added EPH example * Fix hardcoded HTTP header * Made suggested changes * Bug fix - Fixed event loop bugs. In windows eventloop is thread dependent but in ubuntu the eventloop is threadsafe so you need to differentiate the thread specific eventloop from the host one. * Updated loop naming convention to be consistent * Added option to pass asyncio event_loop to eph * Updated docker file * Fixed critical bug with partition manager and aquirec mechanisiims Todo : Identitfy and fix remaining bug that is causing all pumps to shut down when a second host starts * Bug fixes - Fixed bug where closing a pump closed a host - Fixed bug where error partitioned were not removed - Fixed bug where leases were renewed at an incorrect interval * Updated file headers Removed author reference * - Fixed bug in eph example that caused host to terminate prematurely - Made the lease renewal and checkpoint creation "multithreaded" * Increase the size of the connection pool The default connection pool size was too small for scenarios where multiple partitions were handled by one EventProcessorHost. If the amount of partitions handled is large, we might end up doing very many connections at the same time due to the multi-threaded blob-handling. For this reason, you might hit the OS limits that restrict the number of open files per process that in MacOS is not very big. This can be worked around with something like: `ulimit -n 2560` * Decrease info logging verbosity * added ability to toggle pump shutdown when all messages on a pump are processed. * Install also eventhubsprocessor * Default to keeping the pumps It is more optimal to keep the pumps alive even if there are no messages so that it is faster to pickup when messages start to arrive. * Pipe and event injector for Windows * Event injector updates * EHClient refactoring. EHClient leaks. Sender part 1. * Send support * ren eventhubsprocessor eventprocessorhost * Changes - Added event hub config to simplify installation story * Changes - Added optional eventprocessor_params for passing context to the event processor - Made the storage manager mandatatory * Fix memory leaks * logging * Fix: 1. process crash due to race in client stop and connection remote close. 2. handle client close in async receiver. 3. fail pending sends when sender is closed. 4. some debug logging. * tests * test: recv from multiple partitions * test utility * logging update * Support callback based send for high throughput * Workaroud memory issue in proton.reactor.ApplicationEvent * renamed eventprocessor to eventprocessorhost for consistency * updated docker file * fixed typo in url * Added amqp port to address * Updated sample documentation since url is auto encoded by config * Updated docs * Implement timeout for send * Async sender and example * Close injector pipe * Use send timer to also check queued messages * Add partition pump loop to partition_context This gives the EventProcessor access to the partition_pump loop object. This way if One desires to run synchronous code inside process_events_async one can utilize the loop object to run the synchronous code using await context.pump_loop.run_in_executor(None, bla) * Include details in send error * Release deliveries when sender is closed * added validation to unquoted sas key * added support for custom eventhub client prefetch size * Update README.md * Update README.md * Added Docker instructions and fixed Dockerfile (#18) * Removed Dockerfile from the main folder and fixed Dockerfile example * Added build and run Dockerfile documentation * Update Readme * Removed rm qpid-proton folder * Removed /usr/share copy * Disallow a sender/receiver to be registered more than once * Make everything async in EPH I have removed all usage of threads thoroughout the code. Using threads to run pumps etc. Causes async code written into the event-processor to become caotic (you need to follow which loop is currently being used in the call to prevent loops not being found or using the wrong loop (There is the main loop and then loops that are created inside threads) Things become caotic when the event processor is being called by objects that run under different loops. So, no Threading except usage of asyncio run_in_executor. This is done mostly for azure blob api calls. Also changed the bla_async methods to not block. this way, when calling open_async for the the event-processor-host, the command will exit once the EPH is started. Due to the above, see the edited example/eph.py where I added a monitor that makes sure the EPH is still running (Could be replaced by loop.run_forever()) in the example file I have also incorporated a test class for gracefully killing the EPH after 30 seconds. this works, nevertheless takes a while to close as we are waiting for timeouts on the eventhubs connections. * Started removing proton code * Removed most of proton _impl * Removed more code * Working sender * Updates to sender * Added some tests/samples * Some progress on clients * Fixed samples * Added azure namespace * #25 Partition key cannot be set for events * Updated version * Updated README * Renamed package to eventhub * Started EPH modifications * Updated imports * Fixed target urls * Updated logging * Updated async message receive * updated test imports * Added mgmt call to get eh info * Updated samples * Updated receive test * Added send and receive test clients * Updated uamqp dependency * Merged updates from dev * Fixed typos * Updated EPH sample * Started docstrings * Converted tests to pytest * Updates to batch receive * Started adding docstrings * More docstrings * bumped version * Started porting test suite * More tests and improvements * Moved eph tests * Some sample cleanup * Some test updates * Some test restructure * Docstring cleanup * Fixed some merge artifacts * Fixed formatting error * Removed delivery count * Nested package directory * Support custom URL suffix * Support custom URL suffix * Support for EventData device ID * Reverted nested directory * Updated release notes * Workaround for partitionkey * Finished partition key workaround * beta2 fixes * pylint fixes * Trigger CI * Test fixes * Added package manifest * Added warning for Python 2.7 support Support for issues #36 and #38 * Started adding scenario tests * More test scenarios * Better docstring formatting * Started iothub support * Fixed long running test * Fixed typo and memory leak * Restructure * IoThub support * Updates for RC1 release * Fix long running test * Docstring and sample cleanups * Working on error retry * Improved error processing * Fixed partition manager * Progress on IotHub error * Some test updates * Updated uamqp dependency * Restructure for independent connections * Added HTTP proxy support Fix for issue #41 * Fixed some tests + samples * pylint fixes * bumped version * Added keepalive config and some eph fixes * Made reconnect configurable * Added more EPH options * Bumped version * Pylint fix * Pylint fix * Added send and auth timeouts * Changed log formatting. Retry on reconnect * Pylint fixes * Renamed internal async module * Updated send example to match recv Fix for issue #56 * Added build badge to readme * Fix for repeat startup * Added more storage connect options to EPH * Bumped version * Handler blocked until client started * Added event data methods * Fix pylint * Fix 3.7 CI * Fix 3.7 CI * Updated pylint version * Pylint fixes * Updated README * Fixed readme badge refresh * Fixed bug in Azure namespace package * Updated manifest * Parse enqueued time as UTC Fixes #72. * Updates for release 1.2.0 (#81) * Made setup 2.7 compatible * Separated async tests * Support 2.7 types * Bumped version * Added non-ascii tests * Fix CI * Fix Py27 pylint * Added iot sample * Updated sender/receiver client opening * bumped version * Updated tests * Fixed test name * Fixed test env settings * Skip eph test * Updates for v1.3.0 (#91) * Added support for storing the state of the Event Processor along the Checkpoint. Both Checkpoint and the EP state are stored as pickled objects. * Fixing pylint complaints. * Switched from pickle back to JSON for lease persistence. * Fixes bug when accessing leases that don't contain EP context. Also, minor renaming. * Better SAS token support * Fixed pylint * Improved auth error handling * Test stabilization * Improved stored EPH context * Updated EPH context storing * Skip test on OSX * Skip tests on OSX Fail due to large message body bug. * Some cleanup * Fixed error handling * Improved SAS token parsing * Fixed datetime offset (#99) * Fixed datetime offset * Updated pylint * Removed 3.4 pylint pass * Fixed bug in error handling (#100) * Migrate event hub sdk to central repo 1. add verifiable code snippets into docstring 2. update readme according to the template 3. add livetest mark and config 4. optimize code layout/structure * 1. document formatting 2. separate async/sync example tests * Fix build error: 1. uamqp dependency mismatch 2. rename test_examples in eventhub to avoid mismatch * This should fix build error * remove tests import and add sys path to solve build error * add live test for sending BatchEvent with application_properties, new live test passed with new uamqp wheel locally installed * Add get_partition_info in Event Hub * add get_partition_info * Add telemetry information to the connection properties * Disable smart split in batch message * 1. Add amqp over websocket test 2. Add proxy sample 3. Update some comment and code * update some test code * Add __str__ to EventData * Update test code * Update event position * Update live test * Update reconnect live test * Update too large data size --- .../tests/asynctests/test_negative_async.py | 2 +- .../tests/test_iothub_receive.py | 4 ++- .../azure-eventhubs/tests/test_iothub_send.py | 7 ++-- .../tests/test_longrunning_receive.py | 2 +- .../azure-eventhubs/tests/test_negative.py | 33 ++++++++++--------- .../azure-eventhubs/tests/test_receive.py | 23 +++++++------ .../azure-eventhubs/tests/test_reconnect.py | 12 +++---- .../azure-eventhubs/tests/test_send.py | 24 +++++++------- 8 files changed, 54 insertions(+), 53 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py index cfdf12bd6282..38ffb6268a4f 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py @@ -141,7 +141,7 @@ async def test_send_too_large_message_async(connection_str): client = EventHubClient.from_connection_string(connection_str, debug=False) sender = client.create_sender() try: - data = EventData(b"A" * 300000) + data = EventData(b"A" * 1100000) with pytest.raises(EventHubError): await sender.send(data) finally: diff --git a/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py index ce5fa973a069..82add37ef868 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_iothub_receive.py @@ -13,8 +13,10 @@ @pytest.mark.liveTest def test_iothub_receive_sync(iot_connection_str, device_id): - client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) + pytest.skip("current code will cause ErrorCodes.LinkRedirect") + client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) receiver = client.create_receiver(partition_id="0", operation='/messages/events') + receiver._open() try: partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1", "2", "3"] diff --git a/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py b/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py index b9ef0a778a33..df7a184a227e 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_iothub_send.py @@ -16,11 +16,10 @@ @pytest.mark.liveTest def test_iothub_send_single_event(iot_connection_str, device_id): - client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) - sender = client.add_sender(operation='/messages/devicebound') + client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) + sender = client.create_sender(operation='/messages/devicebound') try: - outcome = sender.send(EventData(b"A single event", to_device=device_id)) - assert outcome.value == 0 + sender.send(EventData(b"A single event", to_device=device_id)) except: raise finally: diff --git a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py index 1c4c22ed9257..7ae53a0b2496 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_longrunning_receive.py @@ -97,7 +97,7 @@ def test_long_running_receive(connection_str): if args.conn_str: client = EventHubClient.from_connection_string( args.conn_str, - eventhub=args.eventhub, debug=False) + eventhub=args.eventhub, network_tracing=False) elif args.address: client = EventHubClient( args.address, diff --git a/sdk/eventhub/azure-eventhubs/tests/test_negative.py b/sdk/eventhub/azure-eventhubs/tests/test_negative.py index d0290bd88e4a..0e3eb719e488 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_negative.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_negative.py @@ -19,7 +19,7 @@ @pytest.mark.liveTest def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_hostname, debug=False) + client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=False) sender = client.create_sender() with pytest.raises(EventHubError): sender._open() @@ -27,7 +27,7 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): @pytest.mark.liveTest def test_receive_with_invalid_hostname_sync(invalid_hostname): - client = EventHubClient.from_connection_string(invalid_hostname, debug=True) + client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): receiver._open() @@ -36,7 +36,7 @@ def test_receive_with_invalid_hostname_sync(invalid_hostname): @pytest.mark.liveTest def test_send_with_invalid_key(invalid_key, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_key, debug=False) + client = EventHubClient.from_connection_string(invalid_key, network_tracing=False) sender = client.create_sender() with pytest.raises(EventHubError): sender._open() @@ -44,7 +44,7 @@ def test_send_with_invalid_key(invalid_key, connstr_receivers): @pytest.mark.liveTest def test_receive_with_invalid_key_sync(invalid_key): - client = EventHubClient.from_connection_string(invalid_key, debug=True) + client = EventHubClient.from_connection_string(invalid_key, network_tracing=True) receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): receiver._open() @@ -53,7 +53,7 @@ def test_receive_with_invalid_key_sync(invalid_key): @pytest.mark.liveTest def test_send_with_invalid_policy(invalid_policy, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_policy, debug=False) + client = EventHubClient.from_connection_string(invalid_policy, network_tracing=False) sender = client.create_sender() with pytest.raises(EventHubError): sender._open() @@ -61,7 +61,7 @@ def test_send_with_invalid_policy(invalid_policy, connstr_receivers): @pytest.mark.liveTest def test_receive_with_invalid_policy_sync(invalid_policy): - client = EventHubClient.from_connection_string(invalid_policy, debug=True) + client = EventHubClient.from_connection_string(invalid_policy, network_tracing=True) receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): receiver._open() @@ -69,7 +69,8 @@ def test_receive_with_invalid_policy_sync(invalid_policy): @pytest.mark.liveTest def test_send_partition_key_with_partition_sync(connection_str): - client = EventHubClient.from_connection_string(connection_str, debug=True) + pytest.skip("Skipped tentatively. Confirm whether to throw ValueError or just warn users") + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender(partition_id="1") try: data = EventData(b"Data") @@ -82,7 +83,7 @@ def test_send_partition_key_with_partition_sync(connection_str): @pytest.mark.liveTest def test_non_existing_entity_sender(connection_str): - client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) sender = client.create_sender(partition_id="1") with pytest.raises(EventHubError): sender._open() @@ -90,7 +91,7 @@ def test_non_existing_entity_sender(connection_str): @pytest.mark.liveTest def test_non_existing_entity_receiver(connection_str): - client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) receiver = client.create_receiver(partition_id="0") with pytest.raises(EventHubError): receiver._open() @@ -100,7 +101,7 @@ def test_non_existing_entity_receiver(connection_str): def test_receive_from_invalid_partitions_sync(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receiver = client.create_receiver(partition_id=p) try: with pytest.raises(EventHubError): @@ -113,7 +114,7 @@ def test_receive_from_invalid_partitions_sync(connection_str): def test_send_to_invalid_partitions(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id=p) try: with pytest.raises(EventHubError): @@ -126,10 +127,10 @@ def test_send_to_invalid_partitions(connection_str): def test_send_too_large_message(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() try: - data = EventData(b"A" * 300000) + data = EventData(b"A" * 1100000) with pytest.raises(EventHubError): sender.send(data) finally: @@ -138,8 +139,8 @@ def test_send_too_large_message(connection_str): @pytest.mark.liveTest def test_send_null_body(connection_str): - partitions = ["XYZ", "-1", "1000", "-" ] - client = EventHubClient.from_connection_string(connection_str, debug=False) + partitions = ["XYZ", "-1", "1000", "-"] + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() try: with pytest.raises(ValueError): @@ -152,7 +153,7 @@ def test_send_null_body(connection_str): @pytest.mark.liveTest def test_message_body_types(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) try: received = receiver.receive(timeout=5) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_receive.py b/sdk/eventhub/azure-eventhubs/tests/test_receive.py index 93bb3b01d75c..38944e553bdf 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_receive.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_receive.py @@ -14,7 +14,7 @@ # def test_receive_without_events(connstr_senders): # connection_str, senders = connstr_senders -# client = EventHubClient.from_connection_string(connection_str, debug=True) +# client = EventHubClient.from_connection_string(connection_str, network_tracing=True) # receiver = client.create_receiver("$default", "0", event_position=EventPosition('@latest')) # finish = datetime.datetime.now() + datetime.timedelta(seconds=240) # count = 0 @@ -36,7 +36,7 @@ @pytest.mark.liveTest def test_receive_end_of_stream(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -52,7 +52,7 @@ def test_receive_end_of_stream(connstr_senders): @pytest.mark.liveTest def test_receive_with_offset_sync(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1"] receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) @@ -82,7 +82,7 @@ def test_receive_with_offset_sync(connstr_senders): @pytest.mark.liveTest def test_receive_with_inclusive_offset(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: @@ -106,7 +106,7 @@ def test_receive_with_inclusive_offset(connstr_senders): @pytest.mark.liveTest def test_receive_with_datetime_sync(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) partitions = client.get_properties() assert partitions["partition_ids"] == ["0", "1"] receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) @@ -135,7 +135,7 @@ def test_receive_with_datetime_sync(connstr_senders): @pytest.mark.liveTest def test_receive_with_custom_datetime_sync(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) for i in range(5): senders[0].send(EventData(b"Message before timestamp")) time.sleep(60) @@ -161,9 +161,8 @@ def test_receive_with_custom_datetime_sync(connstr_senders): @pytest.mark.liveTest def test_receive_with_sequence_no(connstr_senders): - # TODO: liveTest fail when just one event data is sent connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: @@ -187,7 +186,7 @@ def test_receive_with_sequence_no(connstr_senders): @pytest.mark.liveTest def test_receive_with_inclusive_sequence_no(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -205,7 +204,7 @@ def test_receive_with_inclusive_sequence_no(connstr_senders): @pytest.mark.liveTest def test_receive_batch(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -234,7 +233,7 @@ def batched(): ed.application_properties = batch_app_prop yield ed - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) with receiver: received = receiver.receive(timeout=5) @@ -256,7 +255,7 @@ def batched(): @pytest.mark.liveTest def test_receive_over_websocket_sync(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) event_list = [] diff --git a/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py b/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py index 0f57daf7aba0..b24cca267c82 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_reconnect.py @@ -18,11 +18,11 @@ @pytest.mark.liveTest def test_send_with_long_interval_sync(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() with sender: sender.send(EventData(b"A single event")) - for _ in range(2): + for _ in range(1): time.sleep(300) sender.send(EventData(b"A single event")) @@ -30,22 +30,22 @@ def test_send_with_long_interval_sync(connstr_receivers): for r in receivers: received.extend(r.receive(timeout=1)) - assert len(received) == 3 + assert len(received) == 2 assert list(received[0].body)[0] == b"A single event" @pytest.mark.liveTest def test_send_with_forced_conn_close_sync(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() with sender: sender.send(EventData(b"A single event")) - sender._handler._message_sender.destroy() + sender._handler._connection._conn.destroy() time.sleep(300) sender.send(EventData(b"A single event")) sender.send(EventData(b"A single event")) - sender._handler._message_sender.destroy() + sender._handler._connection._conn.destroy() time.sleep(300) sender.send(EventData(b"A single event")) sender.send(EventData(b"A single event")) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_send.py b/sdk/eventhub/azure-eventhubs/tests/test_send.py index 3831294e3c04..9e443a56149d 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_send.py @@ -16,7 +16,7 @@ @pytest.mark.liveTest def test_send_with_partition_key(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: data_val = 0 @@ -44,7 +44,7 @@ def test_send_and_receive_large_body_size(connstr_receivers): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: payload = 250 * 1024 @@ -61,7 +61,7 @@ def test_send_and_receive_large_body_size(connstr_receivers): @pytest.mark.liveTest def test_send_and_receive_zero_length_body(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: sender.send(EventData("")) @@ -77,7 +77,7 @@ def test_send_and_receive_zero_length_body(connstr_receivers): @pytest.mark.liveTest def test_send_single_event(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: sender.send(EventData(b"A single event")) @@ -98,7 +98,7 @@ def batched(): for i in range(10): yield EventData("Event number {}".format(i)) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: sender.send(batched()) @@ -116,7 +116,7 @@ def batched(): @pytest.mark.liveTest def test_send_partition(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="1") with sender: sender.send(EventData(b"Data")) @@ -130,7 +130,7 @@ def test_send_partition(connstr_receivers): @pytest.mark.liveTest def test_send_non_ascii(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="0") with sender: sender.send(EventData(u"é,è,à,ù,â,ê,î,ô,û")) @@ -150,7 +150,7 @@ def batched(): for i in range(10): yield EventData("Event number {}".format(i)) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="1") with sender: sender.send(batched()) @@ -165,7 +165,7 @@ def batched(): @pytest.mark.liveTest def test_send_array_sync(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() with sender: sender.send(EventData([b"A", b"B", b"C"])) @@ -181,7 +181,7 @@ def test_send_array_sync(connstr_receivers): @pytest.mark.liveTest def test_send_multiple_clients(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender_0 = client.create_sender(partition_id="0") sender_1 = client.create_sender(partition_id="1") with sender_0: @@ -212,7 +212,7 @@ def batched(): ed.application_properties = app_prop yield ed - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() with sender: sender.send(batched()) @@ -233,7 +233,7 @@ def batched(): @pytest.mark.liveTest def test_send_over_websocket_sync(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, network_tracing=False) sender = client.create_sender() event_list = [] From 56af6f370e898ef7c1369292af3df7c253925369 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 23:26:49 -0700 Subject: [PATCH 42/49] debug->network_tracing --- .../asynctests/test_iothub_receive_async.py | 4 +-- .../test_longrunning_receive_async.py | 2 +- .../tests/asynctests/test_receive_async.py | 26 +++++++++---------- .../tests/asynctests/test_reconnect_async.py | 4 +-- .../tests/asynctests/test_send_async.py | 22 ++++++++-------- 5 files changed, 29 insertions(+), 29 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py index 396b7d697c87..b19b39723f7f 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py @@ -25,7 +25,7 @@ async def pump(receiver, sleep=None): async def get_partitions(iot_connection_str): try: - client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) + client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) receiver = client.create_receiver(partition_id="0", prefetch=1000, operation='/messages/events') async with receiver: partitions = await client.get_properties() @@ -38,7 +38,7 @@ async def get_partitions(iot_connection_str): @pytest.mark.asyncio async def test_iothub_receive_multiple_async(iot_connection_str): partitions = await get_partitions(iot_connection_str) - client = EventHubClient.from_iothub_connection_string(iot_connection_str, debug=True) + client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) try: receivers = [] for p in partitions: diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py index 71ce5c4ee7d5..1f50144674b8 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_longrunning_receive_async.py @@ -103,7 +103,7 @@ async def test_long_running_receive_async(connection_str): if args.conn_str: client = EventHubClient.from_connection_string( args.conn_str, - eventhub=args.eventhub, auth_timeout=240, debug=False) + eventhub=args.eventhub, auth_timeout=240, network_tracing=False) elif args.address: client = EventHubClient( args.address, diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py index 96c4489fabc2..18db98649264 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_receive_async.py @@ -17,7 +17,7 @@ @pytest.mark.asyncio async def test_receive_end_of_stream_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -33,7 +33,7 @@ async def test_receive_end_of_stream_async(connstr_senders): @pytest.mark.asyncio async def test_receive_with_offset_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -57,7 +57,7 @@ async def test_receive_with_offset_async(connstr_senders): @pytest.mark.asyncio async def test_receive_with_inclusive_offset_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -78,7 +78,7 @@ async def test_receive_with_inclusive_offset_async(connstr_senders): @pytest.mark.asyncio async def test_receive_with_datetime_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -103,7 +103,7 @@ async def test_receive_with_datetime_async(connstr_senders): async def test_receive_with_sequence_no_async(connstr_senders): # TODO: sampe problem as the sync version connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -127,7 +127,7 @@ async def test_receive_with_sequence_no_async(connstr_senders): @pytest.mark.asyncio async def test_receive_with_inclusive_sequence_no_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -147,7 +147,7 @@ async def test_receive_with_inclusive_sequence_no_async(connstr_senders): @pytest.mark.asyncio async def test_receive_batch_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -179,7 +179,7 @@ async def test_exclusive_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receivers = [] for exclusive_receiver_priority in [10, 20]: receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=exclusive_receiver_priority, prefetch=5)) @@ -197,7 +197,7 @@ async def test_multiple_receiver_async(connstr_senders): connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) partitions = await client.get_properties() assert partitions["partition_ids"] == ["0", "1"] receivers = [] @@ -223,7 +223,7 @@ async def test_exclusive_receiver_after_non_exclusive_receiver_async(connstr_sen connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receivers = [] receivers.append(client.create_receiver(partition_id="0", prefetch=10)) receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=15, prefetch=10)) @@ -245,7 +245,7 @@ async def test_non_exclusive_receiver_after_exclusive_receiver_async(connstr_sen connection_str, senders = connstr_senders senders[0].send(EventData(b"Receiving only a single event")) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receivers = [] receivers.append(client.create_receiver(partition_id="0", exclusive_receiver_priority=15, prefetch=10)) receivers.append(client.create_receiver(partition_id="0", prefetch=10)) @@ -279,7 +279,7 @@ def batched(): ed.application_properties = app_prop yield ed - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) async with receiver: received = await receiver.receive(timeout=5) @@ -302,7 +302,7 @@ def batched(): @pytest.mark.asyncio async def test_receive_over_websocket_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, network_tracing=False) receiver = client.create_receiver(partition_id="0", prefetch=500, event_position=EventPosition('@latest')) event_list = [] diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py index 40469bd6e3fc..ebbace8c8a05 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_reconnect_async.py @@ -20,7 +20,7 @@ @pytest.mark.asyncio async def test_send_with_long_interval_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() try: await sender.send(EventData(b"A single event")) @@ -54,7 +54,7 @@ def pump(receiver): @pytest.mark.asyncio async def test_send_with_forced_conn_close_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender() try: await sender.send(EventData(b"A single event")) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py index 55096ec63c86..45eb277205f0 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py @@ -19,7 +19,7 @@ @pytest.mark.asyncio async def test_send_with_partition_key_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: @@ -47,7 +47,7 @@ async def test_send_with_partition_key_async(connstr_receivers): @pytest.mark.asyncio async def test_send_and_receive_zero_length_body_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: await sender.send(EventData("")) @@ -64,7 +64,7 @@ async def test_send_and_receive_zero_length_body_async(connstr_receivers): @pytest.mark.asyncio async def test_send_single_event_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: await sender.send(EventData(b"A single event")) @@ -86,7 +86,7 @@ def batched(): for i in range(10): yield EventData("Event number {}".format(i)) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: await sender.send(batched()) @@ -105,7 +105,7 @@ def batched(): @pytest.mark.asyncio async def test_send_partition_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="1") async with sender: await sender.send(EventData(b"Data")) @@ -120,7 +120,7 @@ async def test_send_partition_async(connstr_receivers): @pytest.mark.asyncio async def test_send_non_ascii_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="0") async with sender: await sender.send(EventData("é,è,à,ù,â,ê,î,ô,û")) @@ -141,7 +141,7 @@ def batched(): for i in range(10): yield EventData("Event number {}".format(i)) - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id="1") async with sender: await sender.send(batched()) @@ -156,7 +156,7 @@ def batched(): @pytest.mark.asyncio async def test_send_array_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: await sender.send(EventData([b"A", b"B", b"C"])) @@ -173,7 +173,7 @@ async def test_send_array_async(connstr_receivers): @pytest.mark.asyncio async def test_send_multiple_clients_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender_0 = client.create_sender(partition_id="0") sender_1 = client.create_sender(partition_id="1") async with sender_0 and sender_1: @@ -204,7 +204,7 @@ def batched(): ed.application_properties = app_prop yield ed - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() async with sender: await sender.send(batched()) @@ -226,7 +226,7 @@ def batched(): @pytest.mark.asyncio async def test_send_over_websocket_async(connstr_receivers): connection_str, receivers = connstr_receivers - client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, debug=False) + client = EventHubClient.from_connection_string(connection_str, transport_type=TransportType.AmqpOverWebsocket, network_tracing=False) sender = client.create_sender() event_list = [] From 6a33049ddf452f433469e3ce10876290822ada78 Mon Sep 17 00:00:00 2001 From: yijxie Date: Thu, 30 May 2019 23:30:05 -0700 Subject: [PATCH 43/49] Negative test fix --- .../azure/eventhub/aio/receiver_async.py | 8 +-- .../tests/asynctests/test_negative_async.py | 57 ++++++++++--------- .../azure-eventhubs/tests/test_negative.py | 25 ++++---- 3 files changed, 49 insertions(+), 41 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 95b6455e3088..832c7f96b4a6 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -236,7 +236,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements return True except errors.AuthenticationException as shutdown: log.info("AsyncReceiver disconnected due to token expiry. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubAuthenticationError(str(shutdown), shutdown) await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -244,7 +244,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver detached. Attempting reconnect.") return False log.info("AsyncReceiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -252,7 +252,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver detached. Attempting reconnect.") return False log.info("AsyncReceiver detached. Shutting down.") - error = EventHubError(str(shutdown), shutdown) + error = EventHubConnectionError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -260,7 +260,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver couldn't authenticate. Attempting reconnect.") return False log.info("AsyncReceiver connection error (%r). Shutting down.", shutdown) - error = EventHubError(str(shutdown)) + error = EventHubConnectionError(str(shutdown)) await self.close(exception=error) raise error except Exception as e: diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py index 38ffb6268a4f..cff4f73b9b68 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py @@ -13,25 +13,29 @@ from azure.eventhub import ( EventData, EventPosition, - EventHubError) + EventHubError, + EventHubConnectionError, + EventHubAuthenticationError, + EventHubMessageError, +) from azure.eventhub.aio import EventHubClient @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_with_invalid_hostname_async(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_hostname, debug=True) + client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_hostname_async(invalid_hostname): - client = EventHubClient.from_connection_string(invalid_hostname, debug=True) + client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @@ -39,18 +43,18 @@ async def test_receive_with_invalid_hostname_async(invalid_hostname): @pytest.mark.asyncio async def test_send_with_invalid_key_async(invalid_key, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_key, debug=False) + client = EventHubClient.from_connection_string(invalid_key, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_key_async(invalid_key): - client = EventHubClient.from_connection_string(invalid_key, debug=True) + client = EventHubClient.from_connection_string(invalid_key, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @@ -58,25 +62,26 @@ async def test_receive_with_invalid_key_async(invalid_key): @pytest.mark.asyncio async def test_send_with_invalid_policy_async(invalid_policy, connstr_receivers): _, receivers = connstr_receivers - client = EventHubClient.from_connection_string(invalid_policy, debug=False) + client = EventHubClient.from_connection_string(invalid_policy, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_receive_with_invalid_policy_async(invalid_policy): - client = EventHubClient.from_connection_string(invalid_policy, debug=True) + client = EventHubClient.from_connection_string(invalid_policy, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_partition_key_with_partition_async(connection_str): - client = EventHubClient.from_connection_string(connection_str, debug=True) + pytest.skip("Skipped tentatively. Confirm whether to throw ValueError or just warn users") + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) sender = client.create_sender(partition_id="1") try: data = EventData(b"Data") @@ -90,18 +95,18 @@ async def test_send_partition_key_with_partition_async(connection_str): @pytest.mark.liveTest @pytest.mark.asyncio async def test_non_existing_entity_sender_async(connection_str): - client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) sender = client.create_sender(partition_id="1") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await sender._open() @pytest.mark.liveTest @pytest.mark.asyncio async def test_non_existing_entity_receiver_async(connection_str): - client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", debug=False) + client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): await receiver._open() @@ -110,10 +115,10 @@ async def test_non_existing_entity_receiver_async(connection_str): async def test_receive_from_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receiver = client.create_receiver(partition_id=p) try: - with pytest.raises(EventHubError): + with pytest.raises(EventHubConnectionError): await receiver.receive(timeout=10) finally: await receiver.close() @@ -124,10 +129,10 @@ async def test_receive_from_invalid_partitions_async(connection_str): async def test_send_to_invalid_partitions_async(connection_str): partitions = ["XYZ", "-1", "1000", "-" ] for p in partitions: - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id=p) try: - with pytest.raises(EventHubError): + with pytest.raises(EventHubConnectionError): await sender._open() finally: await sender.close() @@ -138,11 +143,11 @@ async def test_send_to_invalid_partitions_async(connection_str): async def test_send_too_large_message_async(connection_str): if sys.platform.startswith('darwin'): pytest.skip("Skipping on OSX - open issue regarding message size") - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() try: data = EventData(b"A" * 1100000) - with pytest.raises(EventHubError): + with pytest.raises(EventHubMessageError): await sender.send(data) finally: await sender.close() @@ -151,7 +156,7 @@ async def test_send_too_large_message_async(connection_str): @pytest.mark.liveTest @pytest.mark.asyncio async def test_send_null_body_async(connection_str): - client = EventHubClient.from_connection_string(connection_str, debug=False) + client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender() try: with pytest.raises(ValueError): @@ -177,7 +182,7 @@ async def pump(receiver): @pytest.mark.asyncio async def test_max_receivers_async(connstr_senders): connection_str, senders = connstr_senders - client = EventHubClient.from_connection_string(connection_str, debug=True) + client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receivers = [] for i in range(6): receivers.append(client.create_receiver(partition_id="0", prefetch=1000, event_position=EventPosition('@latest'))) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_negative.py b/sdk/eventhub/azure-eventhubs/tests/test_negative.py index 0e3eb719e488..31978155ed75 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_negative.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_negative.py @@ -13,6 +13,9 @@ EventData, EventPosition, EventHubError, + EventHubAuthenticationError, + EventHubConnectionError, + EventHubMessageError, EventHubClient) @@ -21,7 +24,7 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): sender._open() @@ -29,7 +32,7 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): def test_receive_with_invalid_hostname_sync(invalid_hostname): client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): receiver._open() @@ -38,7 +41,7 @@ def test_send_with_invalid_key(invalid_key, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_key, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): sender._open() @@ -46,7 +49,7 @@ def test_send_with_invalid_key(invalid_key, connstr_receivers): def test_receive_with_invalid_key_sync(invalid_key): client = EventHubClient.from_connection_string(invalid_key, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): receiver._open() @@ -55,7 +58,7 @@ def test_send_with_invalid_policy(invalid_policy, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_policy, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): sender._open() @@ -63,7 +66,7 @@ def test_send_with_invalid_policy(invalid_policy, connstr_receivers): def test_receive_with_invalid_policy_sync(invalid_policy): client = EventHubClient.from_connection_string(invalid_policy, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): receiver._open() @@ -85,7 +88,7 @@ def test_send_partition_key_with_partition_sync(connection_str): def test_non_existing_entity_sender(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) sender = client.create_sender(partition_id="1") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): sender._open() @@ -93,7 +96,7 @@ def test_non_existing_entity_sender(connection_str): def test_non_existing_entity_receiver(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubError): + with pytest.raises(EventHubAuthenticationError): receiver._open() @@ -104,7 +107,7 @@ def test_receive_from_invalid_partitions_sync(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receiver = client.create_receiver(partition_id=p) try: - with pytest.raises(EventHubError): + with pytest.raises(EventHubConnectionError): receiver.receive(timeout=10) finally: receiver.close() @@ -117,7 +120,7 @@ def test_send_to_invalid_partitions(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id=p) try: - with pytest.raises(EventHubError): + with pytest.raises(EventHubConnectionError): sender._open() finally: sender.close() @@ -131,7 +134,7 @@ def test_send_too_large_message(connection_str): sender = client.create_sender() try: data = EventData(b"A" * 1100000) - with pytest.raises(EventHubError): + with pytest.raises(EventHubMessageError): sender.send(data) finally: sender.close() From 3ec4429ec3364ea3646fac733a505bbfc0ef5b09 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 31 May 2019 15:15:02 -0700 Subject: [PATCH 44/49] Remove partition_key, send with batching_label --- .../azure/eventhub/aio/sender_async.py | 28 ++++++++--------- .../azure-eventhubs/azure/eventhub/common.py | 27 ++++++++++++----- .../azure-eventhubs/azure/eventhub/sender.py | 30 +++++++++---------- .../tests/asynctests/test_send_async.py | 8 ++--- .../azure-eventhubs/tests/test_send.py | 8 ++--- 5 files changed, 57 insertions(+), 44 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index c0b31182a740..75b16f472033 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -288,20 +288,16 @@ async def _send_event_data(self, event_data): return self._outcome @staticmethod - def _verify_partition(event_datas): - ed_iter = iter(event_datas) - try: - ed = next(ed_iter) - partition_key = ed.partition_key - yield ed - except StopIteration: - raise ValueError("event_data must not be empty") - for ed in ed_iter: - if ed.partition_key != partition_key: - log.warning("partition key of all EventData must be the same if being sent in a batch") - yield ed + def _set_batching_label(event_datas, batching_label): + if batching_label: + ed_iter = iter(event_datas) + for ed in ed_iter: + ed._batching_label = batching_label + yield ed + else: + return event_datas - async def send(self, event_data): + async def send(self, event_data, batching_label): """ Sends an event data and blocks until acknowledgement is received or operation times out. @@ -325,9 +321,13 @@ async def send(self, event_data): if self.error: raise self.error if isinstance(event_data, EventData): + if batching_label: + event_data._batching_label = batching_label wrapper_event_data = event_data else: - wrapper_event_data = _BatchSendEventData(self._verify_partition(event_data)) + wrapper_event_data = _BatchSendEventData( + self._set_batching_label(event_data, batching_label), + batching_label=batching_label) wrapper_event_data.message.on_send_complete = self._on_outcome await self._send_event_data(wrapper_event_data) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index 1e9244d33e37..aac1535bdbc2 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -105,8 +105,9 @@ def __str__(self): dic['enqueued_time'] = str(self.enqueued_time) if self.device_id: dic['device_id'] = str(self.device_id) - if self.partition_key: - dic['partition_key'] = str(self.partition_key) + if self._batching_label: + dic['_batching_label'] = str(self._batching_label) + return str(dic) @@ -154,7 +155,7 @@ def device_id(self): return self._annotations.get(EventData.PROP_DEVICE_ID, None) @property - def partition_key(self): + def _batching_label(self): """ The partition key of the event data object. @@ -165,8 +166,8 @@ def partition_key(self): except KeyError: return self._annotations.get(EventData.PROP_PARTITION_KEY, None) - @partition_key.setter - def partition_key(self, value): + @_batching_label.setter + def _batching_label(self, value): """ Set the partition key of the event data object. @@ -181,6 +182,7 @@ def partition_key(self, value): self.message.header = header self._annotations = annotations + @property def application_properties(self): """ @@ -254,9 +256,20 @@ def encode_message(self): class _BatchSendEventData(EventData): - def __init__(self, batch_event_data): - # TODO: rethink if to_device should be included in + def __init__(self, batch_event_data, batching_label=None): self.message = BatchMessage(data=batch_event_data, multi_messages=False, properties=None) + self.set_batching_label(batching_label) + + def set_batching_label(self, value): + if value: + annotations = self.message.annotations + if annotations is None: + annotations = dict() + annotations[types.AMQPSymbol(EventData.PROP_PARTITION_KEY)] = value + header = MessageHeader() + header.durable = True + self.message.annotations = annotations + self.message.header = header class EventPosition(object): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index 7d9800bf4430..efad7b9f0caa 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -281,20 +281,16 @@ def _send_event_data(self, event_data): return self._outcome @staticmethod - def _verify_partition(event_datas): - ed_iter = iter(event_datas) - try: - ed = next(ed_iter) - partition_key = ed.partition_key - yield ed - except StopIteration: - raise ValueError("batch_event_data must not be empty") - for ed in ed_iter: - if ed.partition_key != partition_key: - log.warning("partition key of all event_data must be the same if being sent in a batch") - yield ed - - def send(self, event_data): + def _set_batching_label(event_datas, batching_label): + if batching_label: + ed_iter = iter(event_datas) + for ed in ed_iter: + ed._batching_label = batching_label + yield ed + else: + return event_datas + + def send(self, event_data, batching_label=None): """ Sends an event data and blocks until acknowledgement is received or operation times out. @@ -318,9 +314,13 @@ def send(self, event_data): if self.error: raise self.error if isinstance(event_data, EventData): + if batching_label: + event_data._batching_label = batching_label wrapper_event_data = event_data else: - wrapper_event_data = _BatchSendEventData(self._verify_partition(event_data)) + wrapper_event_data = _BatchSendEventData( + self._set_batching_label(event_data, batching_label), + batching_label=batching_label) wrapper_event_data.message.on_send_complete = self._on_outcome self._send_event_data(wrapper_event_data) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py index 45eb277205f0..9883be044345 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_send_async.py @@ -28,19 +28,19 @@ async def test_send_with_partition_key_async(connstr_receivers): partition_key = b"test_partition_" + partition for i in range(50): data = EventData(str(data_val)) - data.partition_key = partition_key + # data.partition_key = partition_key data_val += 1 - await sender.send(data) + await sender.send(data, batching_label=partition_key) found_partition_keys = {} for index, partition in enumerate(receivers): received = partition.receive(timeout=5) for message in received: try: - existing = found_partition_keys[message.partition_key] + existing = found_partition_keys[message._batching_label] assert existing == index except KeyError: - found_partition_keys[message.partition_key] = index + found_partition_keys[message._batching_label] = index @pytest.mark.liveTest diff --git a/sdk/eventhub/azure-eventhubs/tests/test_send.py b/sdk/eventhub/azure-eventhubs/tests/test_send.py index 9e443a56149d..3af0cbed2ef2 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_send.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_send.py @@ -24,19 +24,19 @@ def test_send_with_partition_key(connstr_receivers): partition_key = b"test_partition_" + partition for i in range(50): data = EventData(str(data_val)) - data.partition_key = partition_key + #data.partition_key = partition_key data_val += 1 - sender.send(data) + sender.send(data, batching_label=partition_key) found_partition_keys = {} for index, partition in enumerate(receivers): received = partition.receive(timeout=5) for message in received: try: - existing = found_partition_keys[message.partition_key] + existing = found_partition_keys[message._batching_label] assert existing == index except KeyError: - found_partition_keys[message.partition_key] = index + found_partition_keys[message._batching_label] = index @pytest.mark.liveTest From 343f3915a439743126c04ce2a495c6a0bb407e05 Mon Sep 17 00:00:00 2001 From: yijxie Date: Fri, 31 May 2019 22:45:03 -0700 Subject: [PATCH 45/49] Fix review problems --- .../azure/eventhub/__init__.py | 8 +-- .../eventhub/aio/event_hubs_client_async.py | 4 +- .../azure/eventhub/aio/receiver_async.py | 33 ++++++----- .../azure/eventhub/aio/sender_async.py | 41 +++++++------- .../azure-eventhubs/azure/eventhub/client.py | 2 +- .../azure/eventhub/client_abstract.py | 6 +- .../azure-eventhubs/azure/eventhub/common.py | 6 ++ .../azure-eventhubs/azure/eventhub/error.py | 6 +- .../azure/eventhub/receiver.py | 27 +++++---- .../azure-eventhubs/azure/eventhub/sender.py | 37 ++++++------ .../asynctests/test_iothub_receive_async.py | 30 ++++------ .../tests/asynctests/test_negative_async.py | 56 +++++++++---------- .../azure-eventhubs/tests/test_negative.py | 28 +++++----- 13 files changed, 135 insertions(+), 149 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py index 3c13e70013e5..9766b6816ab8 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/__init__.py @@ -6,7 +6,7 @@ __version__ = "1.3.1" from azure.eventhub.common import EventData, EventPosition -from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, EventHubMessageError +from azure.eventhub.error import EventHubError, EventDataError, ConnectError, AuthenticationError from azure.eventhub.client import EventHubClient from azure.eventhub.sender import Sender from azure.eventhub.receiver import Receiver @@ -18,9 +18,9 @@ "__version__", "EventData", "EventHubError", - "EventHubConnectionError", - "EventHubMessageError", - "EventHubAuthenticationError", + "ConnectError", + "EventDataError", + "AuthenticationError", "EventPosition", "EventHubClient", "Sender", diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index c55f5c71e47a..32b1fab44b54 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -67,7 +67,7 @@ def _create_auth(self, username=None, password=None): transport_type=transport_type) elif isinstance(self.credential, SASTokenCredentials): - token = self.sas_token() if callable(self.sas_token) else self.sas_token + token = self.credential.get_sas_token() try: expiry = int(parse_sas_token(token)['se']) except (KeyError, TypeError, IndexError): @@ -80,7 +80,7 @@ def _create_auth(self, username=None, password=None): transport_type=transport_type) else: - get_jwt_token = functools.partial(self.aad_credential.get_token, ['https://eventhubs.azure.net//.default']) + get_jwt_token = functools.partial(self.credential.get_token, ['https://eventhubs.azure.net//.default']) return authentication.JWTTokenAsync(self.auth_uri, self.auth_uri, get_jwt_token, http_proxy=http_proxy, transport_type=transport_type) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index 832c7f96b4a6..b5363a4cc0e2 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -11,7 +11,7 @@ from uamqp import ReceiveClientAsync, Source from azure.eventhub import EventHubError, EventData -from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, _error_handler +from azure.eventhub.error import EventHubError, AuthenticationError, ConnectError, _error_handler log = logging.getLogger(__name__) @@ -54,7 +54,7 @@ def __init__( # pylint: disable=super-init-not-called self.client = client self.source = source self.offset = offset - self.iter_started = False + self.messages_iter = None self.prefetch = prefetch self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive @@ -97,9 +97,8 @@ async def __anext__(self): await self._open() while True: try: - if not self.iter_started: + if not self.messages_iter: self.messages_iter = self._handler.receive_messages_iter_async() - self.iter_started = True message = await self.messages_iter.__anext__() event_data = EventData(message=message) self.offset = event_data.offset @@ -113,7 +112,7 @@ async def __anext__(self): await self.reconnect() else: log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -122,7 +121,7 @@ async def __anext__(self): await self.reconnect() else: log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except StopAsyncIteration: @@ -191,16 +190,16 @@ async def _open(self): await self.reconnect() else: log.info("Receiver detached. Failed to connect") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Receiver couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown)) + error = AuthenticationError(str(shutdown)) raise error else: log.info("Receiver connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = ConnectError(str(shutdown)) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -228,7 +227,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent), loop=self.loop) - self.iter_started = False + self.messages_iter = None try: await self._handler.open_async() while not await self._handler.client_ready_async(): @@ -236,7 +235,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements return True except errors.AuthenticationException as shutdown: log.info("AsyncReceiver disconnected due to token expiry. Shutting down.") - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -244,7 +243,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver detached. Attempting reconnect.") return False log.info("AsyncReceiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -252,7 +251,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver detached. Attempting reconnect.") return False log.info("AsyncReceiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -260,7 +259,7 @@ async def _reconnect(self): # pylint: disable=too-many-statements log.info("AsyncReceiver couldn't authenticate. Attempting reconnect.") return False log.info("AsyncReceiver connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = ConnectError(str(shutdown)) await self.close(exception=error) raise error except Exception as e: @@ -302,7 +301,7 @@ async def close(self, exception=None): elif isinstance(exception, EventHubError): self.error = exception elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): - self.error = EventHubConnectionError(str(exception), exception) + self.error = ConnectError(str(exception), exception) elif exception: self.error = EventHubError(str(exception)) else: @@ -368,7 +367,7 @@ async def receive(self, max_batch_size=None, timeout=None): await self.reconnect() else: log.info("AsyncReceiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -377,7 +376,7 @@ async def receive(self, max_batch_size=None, timeout=None): await self.reconnect() else: log.info("AsyncReceiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except Exception as e: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py index 75b16f472033..e263131ff859 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/sender_async.py @@ -13,8 +13,8 @@ from azure.eventhub import MessageSendResult from azure.eventhub import EventHubError from azure.eventhub.common import EventData, _BatchSendEventData -from azure.eventhub.error import EventHubError, EventHubConnectionError, \ - EventHubAuthenticationError, EventHubMessageError, _error_handler +from azure.eventhub.error import EventHubError, ConnectError, \ + AuthenticationError, EventDataError, _error_handler log = logging.getLogger(__name__) @@ -137,16 +137,16 @@ async def _open(self): await self.reconnect() else: log.info("Sender detached. Failed to connect") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Sender couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown)) + error = AuthenticationError(str(shutdown)) raise error else: log.info("Sender connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = ConnectError(str(shutdown)) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -175,7 +175,7 @@ async def _reconnect(self): return True except errors.AuthenticationException as shutdown: log.info("AsyncSender disconnected due to token expiry. Shutting down.") - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) await self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -183,7 +183,7 @@ async def _reconnect(self): log.info("AsyncSender detached. Attempting reconnect.") return False log.info("AsyncSender reconnect failed. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -191,7 +191,7 @@ async def _reconnect(self): log.info("AsyncSender detached. Attempting reconnect.") return False log.info("AsyncSender reconnect failed. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -199,7 +199,7 @@ async def _reconnect(self): log.info("AsyncSender couldn't authenticate. Attempting reconnect.") return False log.info("AsyncSender connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown)) + error = ConnectError(str(shutdown)) await self.close(exception=error) raise error except Exception as e: @@ -241,7 +241,7 @@ async def close(self, exception=None): elif isinstance(exception, EventHubError): self.error = exception elif isinstance(exception, (errors.LinkDetach, errors.ConnectionClose)): - self.error = EventHubConnectionError(str(exception), exception) + self.error = ConnectError(str(exception), exception) elif exception: self.error = EventHubError(str(exception)) else: @@ -255,7 +255,7 @@ async def _send_event_data(self, event_data): if self._outcome != MessageSendResult.Ok: raise Sender._error(self._outcome, self._condition) except errors.MessageException as failed: - error = EventHubMessageError(str(failed), failed) + error = EventDataError(str(failed), failed) await self.close(exception=error) raise error except (errors.TokenExpired, errors.AuthenticationException): @@ -267,7 +267,7 @@ async def _send_event_data(self, event_data): await self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -276,7 +276,7 @@ async def _send_event_data(self, event_data): await self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) await self.close(exception=error) raise error except Exception as e: @@ -289,15 +289,12 @@ async def _send_event_data(self, event_data): @staticmethod def _set_batching_label(event_datas, batching_label): - if batching_label: - ed_iter = iter(event_datas) - for ed in ed_iter: - ed._batching_label = batching_label - yield ed - else: - return event_datas + ed_iter = iter(event_datas) + for ed in ed_iter: + ed._batching_label = batching_label + yield ed - async def send(self, event_data, batching_label): + async def send(self, event_data, batching_label=None): """ Sends an event data and blocks until acknowledgement is received or operation times out. @@ -327,7 +324,7 @@ async def send(self, event_data, batching_label): else: wrapper_event_data = _BatchSendEventData( self._set_batching_label(event_data, batching_label), - batching_label=batching_label) + batching_label=batching_label) if batching_label else _BatchSendEventData(event_data) wrapper_event_data.message.on_send_complete = self._on_outcome await self._send_event_data(wrapper_event_data) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py index cbc6d7ce1dd2..8fb7940850e9 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client.py @@ -74,7 +74,7 @@ def _create_auth(self, username=None, password=None): transport_type=transport_type) elif isinstance(self.credential, SASTokenCredentials): - token = self.sas_token() if callable(self.sas_token) else self.sas_token + token = self.credential.get_sas_token() try: expiry = int(parse_sas_token(token)['se']) except (KeyError, TypeError, IndexError): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py index 48f36758fb50..26435fd93635 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/client_abstract.py @@ -129,15 +129,11 @@ def __init__(self, host, event_hub_path, credential, **kwargs): self.address.path = "/" + event_hub_path if event_hub_path else "" self._auth_config = {} self.credential = credential - if isinstance(credential, SASTokenCredentials): - self.sas_token = credential.token - elif isinstance(credential, SharedKeyCredentials): + if isinstance(credential, SharedKeyCredentials): self.username = credential.policy self.password = credential.key self._auth_config['username'] = self.username self._auth_config['password'] = self.password - else: - self.aad_credential = credential self.host = host self.eh_name = event_hub_path diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py index aac1535bdbc2..3af21e5d2e86 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/common.py @@ -349,6 +349,12 @@ class SASTokenCredentials(object): def __init__(self, token): self.token = token + def get_sas_token(self): + if callable(self.token): + return self.token() + else: + return self.token + class SharedKeyCredentials(object): def __init__(self, policy, key): diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py index b20cc7cd36a0..69aaa701496b 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/error.py @@ -95,14 +95,14 @@ def _parse_error(self, error_list): self.details = details -class EventHubAuthenticationError(EventHubError): +class AuthenticationError(EventHubError): pass -class EventHubConnectionError(EventHubError): +class ConnectError(EventHubError): pass -class EventHubMessageError(EventHubError): +class EventDataError(EventHubError): pass diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index 3a4ab34229e3..edd897cd9077 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -12,7 +12,7 @@ from uamqp import ReceiveClient, Source from azure.eventhub.common import EventData -from azure.eventhub.error import EventHubError, EventHubAuthenticationError, EventHubConnectionError, _error_handler +from azure.eventhub.error import EventHubError, AuthenticationError, ConnectError, _error_handler log = logging.getLogger(__name__) @@ -52,7 +52,7 @@ def __init__(self, client, source, event_position=None, prefetch=300, exclusive_ self.client = client self.source = source self.offset = event_position - self.iter_started = False + self.messages_iter = None self.prefetch = prefetch self.exclusive_receiver_priority = exclusive_receiver_priority self.keep_alive = keep_alive @@ -94,9 +94,8 @@ def __next__(self): self._open() while True: try: - if not self.iter_started: + if not self.messages_iter: self.messages_iter = self._handler.receive_messages_iter() - self.iter_started = True message = next(self.messages_iter) event_data = EventData(message=message) self.offset = event_data.offset @@ -184,16 +183,16 @@ def _open(self): self.reconnect() else: log.info("Receiver detached. Failed to connect") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Receiver couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) raise error else: log.info("Receiver connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -220,7 +219,7 @@ def _reconnect(self): # pylint: disable=too-many-statements keep_alive_interval=self.keep_alive, client_name=self.name, properties=self.client.create_properties(self.client.config.user_agent)) - self.iter_started = False + self.messages_iter = None try: self._handler.open() while not self._handler.client_ready(): @@ -228,7 +227,7 @@ def _reconnect(self): # pylint: disable=too-many-statements return True except errors.AuthenticationException as shutdown: log.info("Receiver disconnected due to token expiry. Shutting down.") - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -236,7 +235,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver detached. Attempting reconnect.") return False log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -244,7 +243,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver detached. Attempting reconnect.") return False log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -252,7 +251,7 @@ def _reconnect(self): # pylint: disable=too-many-statements log.info("Receiver couldn't authenticate. Attempting reconnect.") return False log.info("Receiver connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: @@ -357,7 +356,7 @@ def receive(self, max_batch_size=None, timeout=None): self.reconnect() else: log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -366,7 +365,7 @@ def receive(self, max_batch_size=None, timeout=None): self.reconnect() else: log.info("Receiver detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py index efad7b9f0caa..ccb193835c20 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/sender.py @@ -13,8 +13,8 @@ from uamqp.constants import MessageSendResult from azure.eventhub.common import EventData, _BatchSendEventData -from azure.eventhub.error import EventHubError, EventHubConnectionError, \ - EventHubAuthenticationError, EventHubMessageError, _error_handler +from azure.eventhub.error import EventHubError, ConnectError, \ + AuthenticationError, EventDataError, _error_handler log = logging.getLogger(__name__) @@ -131,16 +131,16 @@ def _open(self): self.reconnect() else: log.info("Sender detached. Failed to connect") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: log.info("Sender couldn't authenticate.", shutdown) - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) raise error else: log.info("Sender connection error (%r).", shutdown) - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) raise error except Exception as e: log.info("Unexpected error occurred (%r)", e) @@ -169,7 +169,7 @@ def _reconnect(self): return True except errors.AuthenticationException as shutdown: log.info("Sender disconnected due to token expiry. Shutting down.") - error = EventHubAuthenticationError(str(shutdown), shutdown) + error = AuthenticationError(str(shutdown), shutdown) self.close(exception=error) raise error except (errors.LinkDetach, errors.ConnectionClose) as shutdown: @@ -177,7 +177,7 @@ def _reconnect(self): log.info("Sender detached. Attempting reconnect.") return False log.info("Sender reconnect failed. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -185,7 +185,7 @@ def _reconnect(self): log.info("Sender detached. Attempting reconnect.") return False log.info("Sender reconnect failed. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.AMQPConnectionError as shutdown: @@ -193,7 +193,7 @@ def _reconnect(self): log.info("Sender couldn't authenticate. Attempting reconnect.") return False log.info("Sender connection error (%r). Shutting down.", shutdown) - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: @@ -248,7 +248,7 @@ def _send_event_data(self, event_data): if self._outcome != MessageSendResult.Ok: raise Sender._error(self._outcome, self._condition) except errors.MessageException as failed: - error = EventHubMessageError(str(failed), failed) + error = EventDataError(str(failed), failed) self.close(exception=error) raise error except (errors.TokenExpired, errors.AuthenticationException): @@ -260,7 +260,7 @@ def _send_event_data(self, event_data): self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except errors.MessageHandlerError as shutdown: @@ -269,7 +269,7 @@ def _send_event_data(self, event_data): self.reconnect() else: log.info("Sender detached. Shutting down.") - error = EventHubConnectionError(str(shutdown), shutdown) + error = ConnectError(str(shutdown), shutdown) self.close(exception=error) raise error except Exception as e: @@ -282,13 +282,10 @@ def _send_event_data(self, event_data): @staticmethod def _set_batching_label(event_datas, batching_label): - if batching_label: - ed_iter = iter(event_datas) - for ed in ed_iter: - ed._batching_label = batching_label - yield ed - else: - return event_datas + ed_iter = iter(event_datas) + for ed in ed_iter: + ed._batching_label = batching_label + yield ed def send(self, event_data, batching_label=None): """ @@ -320,7 +317,7 @@ def send(self, event_data, batching_label=None): else: wrapper_event_data = _BatchSendEventData( self._set_batching_label(event_data, batching_label), - batching_label=batching_label) + batching_label=batching_label) if batching_label else _BatchSendEventData(event_data) wrapper_event_data.message.on_send_complete = self._on_outcome self._send_event_data(wrapper_event_data) diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py index b19b39723f7f..fdb5c1ffea35 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_iothub_receive_async.py @@ -24,14 +24,11 @@ async def pump(receiver, sleep=None): async def get_partitions(iot_connection_str): - try: - client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) - receiver = client.create_receiver(partition_id="0", prefetch=1000, operation='/messages/events') - async with receiver: - partitions = await client.get_properties() - return partitions["partition_ids"] - finally: - pass + client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) + receiver = client.create_receiver(partition_id="0", prefetch=1000, operation='/messages/events') + async with receiver: + partitions = await client.get_properties() + return partitions["partition_ids"] @pytest.mark.liveTest @@ -39,13 +36,10 @@ async def get_partitions(iot_connection_str): async def test_iothub_receive_multiple_async(iot_connection_str): partitions = await get_partitions(iot_connection_str) client = EventHubClient.from_iothub_connection_string(iot_connection_str, network_tracing=True) - try: - receivers = [] - for p in partitions: - receivers.append(client.create_receiver(partition_id=p, prefetch=10, operation='/messages/events')) - outputs = await asyncio.gather(*[pump(r) for r in receivers]) - - assert isinstance(outputs[0], int) and outputs[0] <= 10 - assert isinstance(outputs[1], int) and outputs[1] <= 10 - finally: - pass + receivers = [] + for p in partitions: + receivers.append(client.create_receiver(partition_id=p, prefetch=10, operation='/messages/events')) + outputs = await asyncio.gather(*[pump(r) for r in receivers]) + + assert isinstance(outputs[0], int) and outputs[0] <= 10 + assert isinstance(outputs[1], int) and outputs[1] <= 10 diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py index cff4f73b9b68..4e904d19453f 100644 --- a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_negative_async.py @@ -14,9 +14,9 @@ EventData, EventPosition, EventHubError, - EventHubConnectionError, - EventHubAuthenticationError, - EventHubMessageError, + ConnectError, + AuthenticationError, + EventDataError, ) from azure.eventhub.aio import EventHubClient @@ -26,7 +26,7 @@ async def test_send_with_invalid_hostname_async(invalid_hostname, connstr_receiv _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -35,7 +35,7 @@ async def test_send_with_invalid_hostname_async(invalid_hostname, connstr_receiv async def test_receive_with_invalid_hostname_async(invalid_hostname): client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -45,7 +45,7 @@ async def test_send_with_invalid_key_async(invalid_key, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_key, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -54,7 +54,7 @@ async def test_send_with_invalid_key_async(invalid_key, connstr_receivers): async def test_receive_with_invalid_key_async(invalid_key): client = EventHubClient.from_connection_string(invalid_key, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -64,7 +64,7 @@ async def test_send_with_invalid_policy_async(invalid_policy, connstr_receivers) _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_policy, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -73,7 +73,7 @@ async def test_send_with_invalid_policy_async(invalid_policy, connstr_receivers) async def test_receive_with_invalid_policy_async(invalid_policy): client = EventHubClient.from_connection_string(invalid_policy, network_tracing=True) sender = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -97,7 +97,7 @@ async def test_send_partition_key_with_partition_async(connection_str): async def test_non_existing_entity_sender_async(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) sender = client.create_sender(partition_id="1") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await sender._open() @@ -106,7 +106,7 @@ async def test_non_existing_entity_sender_async(connection_str): async def test_non_existing_entity_receiver_async(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): await receiver._open() @@ -118,7 +118,7 @@ async def test_receive_from_invalid_partitions_async(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receiver = client.create_receiver(partition_id=p) try: - with pytest.raises(EventHubConnectionError): + with pytest.raises(ConnectError): await receiver.receive(timeout=10) finally: await receiver.close() @@ -132,7 +132,7 @@ async def test_send_to_invalid_partitions_async(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id=p) try: - with pytest.raises(EventHubConnectionError): + with pytest.raises(ConnectError): await sender._open() finally: await sender.close() @@ -147,7 +147,7 @@ async def test_send_too_large_message_async(connection_str): sender = client.create_sender() try: data = EventData(b"A" * 1100000) - with pytest.raises(EventHubMessageError): + with pytest.raises(EventDataError): await sender.send(data) finally: await sender.close() @@ -186,18 +186,16 @@ async def test_max_receivers_async(connstr_senders): receivers = [] for i in range(6): receivers.append(client.create_receiver(partition_id="0", prefetch=1000, event_position=EventPosition('@latest'))) - try: - outputs = await asyncio.gather( - pump(receivers[0]), - pump(receivers[1]), - pump(receivers[2]), - pump(receivers[3]), - pump(receivers[4]), - pump(receivers[5]), - return_exceptions=True) - print(outputs) - failed = [o for o in outputs if isinstance(o, EventHubError)] - assert len(failed) == 1 - print(failed[0].message) - finally: - pass + + outputs = await asyncio.gather( + pump(receivers[0]), + pump(receivers[1]), + pump(receivers[2]), + pump(receivers[3]), + pump(receivers[4]), + pump(receivers[5]), + return_exceptions=True) + print(outputs) + failed = [o for o in outputs if isinstance(o, EventHubError)] + assert len(failed) == 1 + print(failed[0].message) diff --git a/sdk/eventhub/azure-eventhubs/tests/test_negative.py b/sdk/eventhub/azure-eventhubs/tests/test_negative.py index 31978155ed75..206c5b415002 100644 --- a/sdk/eventhub/azure-eventhubs/tests/test_negative.py +++ b/sdk/eventhub/azure-eventhubs/tests/test_negative.py @@ -13,9 +13,9 @@ EventData, EventPosition, EventHubError, - EventHubAuthenticationError, - EventHubConnectionError, - EventHubMessageError, + AuthenticationError, + ConnectError, + EventDataError, EventHubClient) @@ -24,7 +24,7 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): sender._open() @@ -32,7 +32,7 @@ def test_send_with_invalid_hostname(invalid_hostname, connstr_receivers): def test_receive_with_invalid_hostname_sync(invalid_hostname): client = EventHubClient.from_connection_string(invalid_hostname, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): receiver._open() @@ -41,7 +41,7 @@ def test_send_with_invalid_key(invalid_key, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_key, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): sender._open() @@ -49,7 +49,7 @@ def test_send_with_invalid_key(invalid_key, connstr_receivers): def test_receive_with_invalid_key_sync(invalid_key): client = EventHubClient.from_connection_string(invalid_key, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): receiver._open() @@ -58,7 +58,7 @@ def test_send_with_invalid_policy(invalid_policy, connstr_receivers): _, receivers = connstr_receivers client = EventHubClient.from_connection_string(invalid_policy, network_tracing=False) sender = client.create_sender() - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): sender._open() @@ -66,7 +66,7 @@ def test_send_with_invalid_policy(invalid_policy, connstr_receivers): def test_receive_with_invalid_policy_sync(invalid_policy): client = EventHubClient.from_connection_string(invalid_policy, network_tracing=True) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): receiver._open() @@ -88,7 +88,7 @@ def test_send_partition_key_with_partition_sync(connection_str): def test_non_existing_entity_sender(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) sender = client.create_sender(partition_id="1") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): sender._open() @@ -96,7 +96,7 @@ def test_non_existing_entity_sender(connection_str): def test_non_existing_entity_receiver(connection_str): client = EventHubClient.from_connection_string(connection_str, eventhub="nemo", network_tracing=False) receiver = client.create_receiver(partition_id="0") - with pytest.raises(EventHubAuthenticationError): + with pytest.raises(AuthenticationError): receiver._open() @@ -107,7 +107,7 @@ def test_receive_from_invalid_partitions_sync(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=True) receiver = client.create_receiver(partition_id=p) try: - with pytest.raises(EventHubConnectionError): + with pytest.raises(ConnectError): receiver.receive(timeout=10) finally: receiver.close() @@ -120,7 +120,7 @@ def test_send_to_invalid_partitions(connection_str): client = EventHubClient.from_connection_string(connection_str, network_tracing=False) sender = client.create_sender(partition_id=p) try: - with pytest.raises(EventHubConnectionError): + with pytest.raises(ConnectError): sender._open() finally: sender.close() @@ -134,7 +134,7 @@ def test_send_too_large_message(connection_str): sender = client.create_sender() try: data = EventData(b"A" * 1100000) - with pytest.raises(EventHubMessageError): + with pytest.raises(EventDataError): sender.send(data) finally: sender.close() From c2c2764e916b99abd9a639bb35caa99416a0a80c Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 2 Jun 2019 13:23:12 -0700 Subject: [PATCH 46/49] Fix a log issue --- .../azure-eventhubs/azure/eventhub/aio/receiver_async.py | 2 +- sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py index b5363a4cc0e2..6614001dc93d 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/receiver_async.py @@ -194,7 +194,7 @@ async def _open(self): raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: - log.info("Receiver couldn't authenticate.", shutdown) + log.info("Receiver couldn't authenticate (%r).", shutdown) error = AuthenticationError(str(shutdown)) raise error else: diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py index edd897cd9077..4643cb29419f 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/receiver.py @@ -187,7 +187,7 @@ def _open(self): raise error except errors.AMQPConnectionError as shutdown: if str(shutdown).startswith("Unable to open authentication session") and self.auto_reconnect: - log.info("Receiver couldn't authenticate.", shutdown) + log.info("Receiver couldn't authenticate (%r).", shutdown) error = AuthenticationError(str(shutdown), shutdown) raise error else: From 4085a420e8d668662aca573b09ce65879fe5049f Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 2 Jun 2019 16:10:44 -0700 Subject: [PATCH 47/49] fix get_partition_properties bug --- .../azure/eventhub/aio/event_hubs_client_async.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py index 32b1fab44b54..d88461c98d0c 100644 --- a/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py +++ b/sdk/eventhub/azure-eventhubs/azure/eventhub/aio/event_hubs_client_async.py @@ -157,7 +157,7 @@ async def get_partition_properties(self, partition): if partition_info: output['event_hub_path'] = partition_info[b'name'].decode('utf-8') output['id'] = partition_info[b'partition'].decode('utf-8') - output['begin_sequence_number'] = partition_info[b'begin_sequence_number'] + output['beginning_sequence_number'] = partition_info[b'begin_sequence_number'] output['last_enqueued_sequence_number'] = partition_info[b'last_enqueued_sequence_number'] output['last_enqueued_offset'] = partition_info[b'last_enqueued_offset'].decode('utf-8') output['last_enqueued_time_utc'] = datetime.datetime.utcfromtimestamp( From 1a42901169abd4b37753c16cf8faf0f70ddc55c8 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 2 Jun 2019 16:13:31 -0700 Subject: [PATCH 48/49] add client properties live test --- .../tests/asynctests/test_properties_async.py | 45 +++++++++++++++++++ .../azure-eventhubs/tests/test_properties.py | 41 +++++++++++++++++ 2 files changed, 86 insertions(+) create mode 100644 sdk/eventhub/azure-eventhubs/tests/asynctests/test_properties_async.py create mode 100644 sdk/eventhub/azure-eventhubs/tests/test_properties.py diff --git a/sdk/eventhub/azure-eventhubs/tests/asynctests/test_properties_async.py b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_properties_async.py new file mode 100644 index 000000000000..20641033e5bb --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/tests/asynctests/test_properties_async.py @@ -0,0 +1,45 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import pytest +from azure.eventhub import SharedKeyCredentials +from azure.eventhub.aio import EventHubClient + + +@pytest.mark.liveTest +@pytest.mark.asyncio +async def test_get_properties(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + properties = await client.get_properties() + assert properties['path'] == live_eventhub['event_hub'] and properties['partition_ids'] == ['0', '1'] + + +@pytest.mark.liveTest +@pytest.mark.asyncio +async def test_get_partition_ids(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + partition_ids = await client.get_partition_ids() + assert partition_ids == ['0', '1'] + + +@pytest.mark.liveTest +@pytest.mark.asyncio +async def test_get_partition_properties(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + properties = await client.get_partition_properties('0') + assert properties['event_hub_path'] == live_eventhub['event_hub'] \ + and properties['id'] == '0' \ + and 'beginning_sequence_number' in properties \ + and 'last_enqueued_sequence_number' in properties \ + and 'last_enqueued_offset' in properties \ + and 'last_enqueued_time_utc' in properties \ + and 'is_empty' in properties diff --git a/sdk/eventhub/azure-eventhubs/tests/test_properties.py b/sdk/eventhub/azure-eventhubs/tests/test_properties.py new file mode 100644 index 000000000000..b1889bdcf179 --- /dev/null +++ b/sdk/eventhub/azure-eventhubs/tests/test_properties.py @@ -0,0 +1,41 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import pytest +from azure.eventhub import EventHubClient, SharedKeyCredentials + + +@pytest.mark.liveTest +def test_get_properties(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + properties = client.get_properties() + assert properties['path'] == live_eventhub['event_hub'] and properties['partition_ids'] == ['0', '1'] + + +@pytest.mark.liveTest +def test_get_partition_ids(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + partition_ids = client.get_partition_ids() + assert partition_ids == ['0', '1'] + + +@pytest.mark.liveTest +def test_get_partition_properties(live_eventhub): + client = EventHubClient(live_eventhub['hostname'], live_eventhub['event_hub'], + SharedKeyCredentials(live_eventhub['key_name'], live_eventhub['access_key']) + ) + properties = client.get_partition_properties('0') + assert properties['event_hub_path'] == live_eventhub['event_hub'] \ + and properties['id'] == '0' \ + and 'beginning_sequence_number' in properties \ + and 'last_enqueued_sequence_number' in properties \ + and 'last_enqueued_offset' in properties \ + and 'last_enqueued_time_utc' in properties \ + and 'is_empty' in properties From 8afabf600f2d5140c76d04d2056a4ee70605d288 Mon Sep 17 00:00:00 2001 From: yijxie Date: Sun, 2 Jun 2019 16:57:48 -0700 Subject: [PATCH 49/49] Revised setup.py for track 2 --- sdk/eventhub/azure-eventhubs/setup.py | 30 ++++++++++++++++++++------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/sdk/eventhub/azure-eventhubs/setup.py b/sdk/eventhub/azure-eventhubs/setup.py index 1fdb12ec33d8..6f6cf3399021 100644 --- a/sdk/eventhub/azure-eventhubs/setup.py +++ b/sdk/eventhub/azure-eventhubs/setup.py @@ -8,6 +8,7 @@ import re import os.path +import sys from io import open from setuptools import find_packages, setup @@ -34,6 +35,22 @@ with open('HISTORY.rst') as f: history = f.read() +exclude_packages = [ + 'tests', + "tests.asynctests", + 'examples', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + '*.eventprocessorhost', + '*.eventprocessorhost.*' + ] + +if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] < 5): + exclude_packages.extend([ + '*.aio', + '*.aio.*' + ]) + setup( name=PACKAGE_NAME, version=version, @@ -44,28 +61,25 @@ author_email='azpysdkhelp@microsoft.com', url='https://github.com/Azure/azure-sdk-for-python', classifiers=[ - 'Development Status :: 5 - Production/Stable', + 'Development Status :: 3 - Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', + # 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'License :: OSI Approved :: MIT License', ], zip_safe=False, - packages=find_packages(exclude=[ - "azure", - "examples", - "tests", - "tests.asynctests"]), + packages=find_packages(exclude=exclude_packages), install_requires=[ 'uamqp~=1.2.0', 'msrestazure>=0.4.32,<2.0.0', 'azure-common~=1.1', - 'azure-storage-blob~=1.3' + 'azure-storage-blob~=1.3', + 'azure-core~=1.0', ], extras_require={ ":python_version<'3.0'": ['azure-nspkg'],