Skip to content

Commit cfff118

Browse files
committed
refactor: code style
1 parent 169f5b8 commit cfff118

File tree

7 files changed

+21
-20
lines changed

7 files changed

+21
-20
lines changed

src/nats_queue/nats_job.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from datetime import datetime, timedelta
22
import time
33
import uuid
4-
from typing import Any, Optional, Dict
4+
from typing import Any, Dict
55

66

77
class Job:

src/nats_queue/nats_queue.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import logging
2-
import nats
32
from nats.aio.client import Client
43
from nats.js.errors import BadRequestError
54
import json

src/nats_queue/nats_worker.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
import json
44
import logging
55
import time
6-
from typing import Any, Awaitable, Callable, Dict, List, Optional
6+
from typing import Awaitable, Callable, Dict, List, Optional
77
import uuid
8-
from nats_queue.nats_limiter import FixedWindowLimiter, IntervalLimiter, Limiter
8+
from nats_queue.nats_limiter import FixedWindowLimiter, IntervalLimiter
99
from nats.js.client import JetStreamContext
1010
from nats.aio.client import Client
1111
from nats.aio.msg import Msg
@@ -107,7 +107,7 @@ async def _process_task(self, job: Msg):
107107
delay = int(planned_time.total_seconds())
108108
await job.nak(delay=delay)
109109
logger.debug(
110-
f"Job: {job_data["name"]} id={job_data["id"]} is scheduled for later. "
110+
f"Job: {job_data["name"]} id={job_data["id"]} is scheduled later"
111111
f"Requeueing in {delay} seconds."
112112
)
113113
return
@@ -120,7 +120,8 @@ async def _process_task(self, job: Msg):
120120
return
121121

122122
logger.info(
123-
f'Job: {job_data["name"]} id={job_data["id"]} is started with data={job_data["data"]}) in queue={job_data["queue_name"]}'
123+
f"""Job: {job_data["name"]} id={job_data["id"]} is started
124+
with data={job_data["data"]}) in queue={job_data["queue_name"]}"""
124125
)
125126

126127
timeout = job_data["meta"]["timeout"]
@@ -160,17 +161,20 @@ async def fetch_messages(
160161
try:
161162
msgs = await sub.fetch(count, timeout=self.fetch_timeout)
162163
logger.debug(
163-
f"Consumer: name={(await sub.consumer_info()).name} fetched {len(msgs)} messages"
164+
f"""Consumer: name={(await sub.consumer_info()).name}
165+
fetched {len(msgs)} messages"""
164166
)
165167
return msgs
166168
except TimeoutError:
167169
logger.debug(
168-
f"Consumer: name={(await sub.consumer_info()).name} failed to fetch messages: TimeoutError"
170+
f"""Consumer: name={(await sub.consumer_info()).name}
171+
failed to fetch messages: TimeoutError"""
169172
)
170173
return []
171174
except Exception as e:
172175
logger.error(
173-
f"Consumer: name={(await sub.consumer_info()).name} error while fetching messages: {e}"
176+
f"""Consumer: name={(await sub.consumer_info()).name}
177+
error while fetching messages: {e}"""
174178
)
175179
raise
176180

@@ -183,12 +187,14 @@ async def get_subscriptions(self) -> List[JetStreamContext.PullSubscription]:
183187
topic, durable=f"worker_group_{priority}"
184188
)
185189
logger.info(
186-
f"Consumer: name={self.name} successfully subscribed to topic {topic}."
190+
f"""Consumer: name={self.name}
191+
successfully subscribed to topic {topic}."""
187192
)
188193
subscriptions.append(sub)
189194
except Exception as e:
190195
logger.error(
191-
f"Consumer: name={self.name} error while subscribing to topic {topic}: {e}"
196+
f"""Consumer: name={self.name} error
197+
while subscribing to topic {topic}: {e}"""
192198
)
193199
raise
194200

tests/test_job.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import uuid
21
import pytest
32
from nats_queue.nats_job import Job
43

tests/test_queue.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import uuid
21
import pytest
32
import pytest_asyncio
43
import json
@@ -55,7 +54,7 @@ async def test_queue_close_success():
5554
await queue.setup()
5655
await queue.manager.delete_stream(queue.name)
5756
await queue.close()
58-
assert queue.client.is_closed == True
57+
assert queue.client.is_closed is True
5958

6059

6160
@pytest.mark.asyncio

tests/test_rate_limit.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,4 @@
1-
import asyncio
21
import pytest
3-
import time
42
from nats_queue.nats_limiter import FixedWindowLimiter, IntervalLimiter
53

64

@@ -33,7 +31,7 @@ async def test_rate_limiter_increment():
3331
count = limiter_interval.inc()
3432

3533
assert limiter.count == 2
36-
assert count == None
34+
assert count is None
3735

3836

3937
@pytest.mark.asyncio

tests/test_workers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,11 +122,11 @@ async def test_worker_fetch_messages_success(get_client):
122122
jobs = [
123123
Job(
124124
queue_name="my_queue",
125-
name=f"task_1",
125+
name="task_1",
126126
),
127127
Job(
128128
queue_name="my_queue",
129-
name=f"task_2",
129+
name="task_2",
130130
),
131131
]
132132
await queue.addJobs(jobs)
@@ -329,7 +329,7 @@ async def test_worker_get_subscriptions_error(get_client):
329329
client = get_client
330330

331331
worker = Worker(client, name="my_queue", processor=process_job)
332-
with pytest.raises(Exception) as e:
332+
with pytest.raises(Exception):
333333
await worker.get_subscriptions()
334334

335335

0 commit comments

Comments
 (0)