Skip to content

Commit 996c297

Browse files
sondrelgrossmacarthur
authored andcommitted
deps: Upgrade redis stubs (python-arq#399)
1 parent dc31508 commit 996c297

File tree

5 files changed

+58
-44
lines changed

5 files changed

+58
-44
lines changed

arq/connections.py

+11-6
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,9 @@
1616
from .jobs import Deserializer, Job, JobDef, JobResult, Serializer, deserialize_job, serialize_job
1717
from .utils import timestamp_ms, to_ms, to_unix_ms
1818

19+
if TYPE_CHECKING:
20+
from redis.asyncio.client import Pipeline
21+
1922
logger = logging.getLogger('arq.connections')
2023

2124

@@ -145,6 +148,7 @@ async def enqueue_job(
145148
defer_by_ms = to_ms(_defer_by)
146149
expires_ms = to_ms(_expires)
147150

151+
pipe: 'Pipeline[bytes]'
148152
async with self.pipeline(transaction=True) as pipe:
149153
await pipe.watch(job_key)
150154
if await pipe.exists(job_key, result_key_prefix + job_id):
@@ -163,8 +167,8 @@ async def enqueue_job(
163167

164168
job = serialize_job(function, args, kwargs, _job_try, enqueue_time_ms, serializer=self.job_serializer)
165169
pipe.multi()
166-
pipe.psetex(job_key, expires_ms, job) # type: ignore[no-untyped-call]
167-
pipe.zadd(_queue_name, {job_id: score}) # type: ignore[unused-coroutine]
170+
pipe.psetex(job_key, expires_ms, job)
171+
pipe.zadd(_queue_name, {job_id: score})
168172
try:
169173
await pipe.execute()
170174
except WatchError:
@@ -287,11 +291,12 @@ def pool_factory(*args: Any, **kwargs: Any) -> ArqRedis:
287291

288292

289293
async def log_redis_info(redis: 'Redis[bytes]', log_func: Callable[[str], Any]) -> None:
294+
pipe: 'Pipeline[bytes]'
290295
async with redis.pipeline(transaction=False) as pipe:
291-
pipe.info(section='Server') # type: ignore[unused-coroutine]
292-
pipe.info(section='Memory') # type: ignore[unused-coroutine]
293-
pipe.info(section='Clients') # type: ignore[unused-coroutine]
294-
pipe.dbsize() # type: ignore[unused-coroutine]
296+
pipe.info(section='Server')
297+
pipe.info(section='Memory')
298+
pipe.info(section='Clients')
299+
pipe.dbsize()
295300
info_server, info_memory, info_clients, key_count = await pipe.execute()
296301

297302
redis_version = info_server.get('redis_version', '?')

arq/jobs.py

+16-9
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,17 @@
55
from dataclasses import dataclass
66
from datetime import datetime
77
from enum import Enum
8-
from typing import Any, Callable, Dict, Optional, Tuple
8+
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple
99

1010
from redis.asyncio import Redis
1111

1212
from .constants import abort_jobs_ss, default_queue_name, in_progress_key_prefix, job_key_prefix, result_key_prefix
1313
from .utils import ms_to_datetime, poll, timestamp_ms
1414

15+
if TYPE_CHECKING:
16+
from redis.asyncio.client import Pipeline
17+
18+
1519
logger = logging.getLogger('arq.jobs')
1620

1721
Serializer = Callable[[Dict[str, Any]], bytes]
@@ -89,7 +93,7 @@ async def result(
8993
Get the result of the job or, if the job raised an exception, reraise it.
9094
9195
This function waits for the result if it's not yet available and the job is
92-
present in the queue. Otherwise ``ResultNotFound`` is raised.
96+
present in the queue. Otherwise, ``ResultNotFound`` is raised.
9397
9498
:param timeout: maximum time to wait for the job result before raising ``TimeoutError``, will wait forever
9599
:param poll_delay: how often to poll redis for the job result
@@ -102,9 +106,10 @@ async def result(
102106
poll_delay = pole_delay
103107

104108
async for delay in poll(poll_delay):
109+
tr: 'Pipeline[bytes]'
105110
async with self._redis.pipeline(transaction=True) as tr:
106-
tr.get(result_key_prefix + self.job_id) # type: ignore[unused-coroutine]
107-
tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine]
111+
tr.get(result_key_prefix + self.job_id)
112+
tr.zscore(self._queue_name, self.job_id)
108113
v, s = await tr.execute()
109114

110115
if v:
@@ -153,10 +158,11 @@ async def status(self) -> JobStatus:
153158
"""
154159
Status of the job.
155160
"""
161+
tr: 'Pipeline[bytes]'
156162
async with self._redis.pipeline(transaction=True) as tr:
157-
tr.exists(result_key_prefix + self.job_id) # type: ignore[unused-coroutine]
158-
tr.exists(in_progress_key_prefix + self.job_id) # type: ignore[unused-coroutine]
159-
tr.zscore(self._queue_name, self.job_id) # type: ignore[unused-coroutine]
163+
tr.exists(result_key_prefix + self.job_id)
164+
tr.exists(in_progress_key_prefix + self.job_id)
165+
tr.zscore(self._queue_name, self.job_id)
160166
is_complete, is_in_progress, score = await tr.execute()
161167

162168
if is_complete:
@@ -179,9 +185,10 @@ async def abort(self, *, timeout: Optional[float] = None, poll_delay: float = 0.
179185
"""
180186
job_info = await self.info()
181187
if job_info and job_info.score and job_info.score > timestamp_ms():
188+
tr: 'Pipeline[bytes]'
182189
async with self._redis.pipeline(transaction=True) as tr:
183-
tr.zrem(self._queue_name, self.job_id) # type: ignore[unused-coroutine]
184-
tr.zadd(self._queue_name, {self.job_id: 1}) # type: ignore[unused-coroutine]
190+
tr.zrem(self._queue_name, self.job_id)
191+
tr.zadd(self._queue_name, {self.job_id: 1})
185192
await tr.execute()
186193

187194
await self._redis.zadd(abort_jobs_ss, {self.job_id: timestamp_ms()})

arq/worker.py

+25-21
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,8 @@
4141
)
4242

4343
if TYPE_CHECKING:
44+
from redis.asyncio.client import Pipeline
45+
4446
from .typing import SecondsTimedelta, StartupShutdown, WorkerCoroutine, WorkerSettingsType # noqa F401
4547

4648
logger = logging.getLogger('arq.worker')
@@ -404,11 +406,10 @@ async def _cancel_aborted_jobs(self) -> None:
404406
"""
405407
Go through job_ids in the abort_jobs_ss sorted set and cancel those tasks.
406408
"""
409+
pipe: 'Pipeline[bytes]'
407410
async with self.pool.pipeline(transaction=True) as pipe:
408-
pipe.zrange(abort_jobs_ss, start=0, end=-1) # type: ignore[unused-coroutine]
409-
pipe.zremrangebyscore( # type: ignore[unused-coroutine]
410-
abort_jobs_ss, min=timestamp_ms() + abort_job_max_age, max=float('inf')
411-
)
411+
pipe.zrange(abort_jobs_ss, start=0, end=-1)
412+
pipe.zremrangebyscore(abort_jobs_ss, min=timestamp_ms() + abort_job_max_age, max=float('inf'))
412413
abort_job_ids, _ = await pipe.execute()
413414

414415
aborted: Set[str] = set()
@@ -445,6 +446,7 @@ async def start_jobs(self, job_ids: List[bytes]) -> None:
445446

446447
job_id = job_id_b.decode()
447448
in_progress_key = in_progress_key_prefix + job_id
449+
pipe: 'Pipeline[bytes]'
448450
async with self.pool.pipeline(transaction=True) as pipe:
449451
await pipe.watch(in_progress_key)
450452
ongoing_exists = await pipe.exists(in_progress_key)
@@ -457,9 +459,7 @@ async def start_jobs(self, job_ids: List[bytes]) -> None:
457459
continue
458460

459461
pipe.multi()
460-
pipe.psetex( # type: ignore[no-untyped-call]
461-
in_progress_key, int(self.in_progress_timeout_s * 1000), b'1'
462-
)
462+
pipe.psetex(in_progress_key, int(self.in_progress_timeout_s * 1000), b'1')
463463
try:
464464
await pipe.execute()
465465
except (ResponseError, WatchError):
@@ -474,12 +474,13 @@ async def start_jobs(self, job_ids: List[bytes]) -> None:
474474

475475
async def run_job(self, job_id: str, score: int) -> None: # noqa: C901
476476
start_ms = timestamp_ms()
477+
pipe: 'Pipeline[bytes]'
477478
async with self.pool.pipeline(transaction=True) as pipe:
478-
pipe.get(job_key_prefix + job_id) # type: ignore[unused-coroutine]
479-
pipe.incr(retry_key_prefix + job_id) # type: ignore[unused-coroutine]
480-
pipe.expire(retry_key_prefix + job_id, 88400) # type: ignore[unused-coroutine]
479+
pipe.get(job_key_prefix + job_id)
480+
pipe.incr(retry_key_prefix + job_id)
481+
pipe.expire(retry_key_prefix + job_id, 88400)
481482
if self.allow_abort_jobs:
482-
pipe.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine]
483+
pipe.zrem(abort_jobs_ss, job_id)
483484
v, job_try, _, abort_job = await pipe.execute()
484485
else:
485486
v, job_try, _ = await pipe.execute()
@@ -686,41 +687,44 @@ async def finish_job(
686687
incr_score: Optional[int],
687688
keep_in_progress: Optional[float],
688689
) -> None:
690+
691+
tr: 'Pipeline[bytes]'
689692
async with self.pool.pipeline(transaction=True) as tr:
690693
delete_keys = []
691694
in_progress_key = in_progress_key_prefix + job_id
692695
if keep_in_progress is None:
693696
delete_keys += [in_progress_key]
694697
else:
695-
tr.pexpire(in_progress_key, to_ms(keep_in_progress)) # type: ignore[unused-coroutine]
698+
tr.pexpire(in_progress_key, to_ms(keep_in_progress))
696699

697700
if finish:
698701
if result_data:
699702
expire = None if keep_result_forever else result_timeout_s
700-
tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) # type: ignore[unused-coroutine]
703+
tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire))
701704
delete_keys += [retry_key_prefix + job_id, job_key_prefix + job_id]
702-
tr.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine]
703-
tr.zrem(self.queue_name, job_id) # type: ignore[unused-coroutine]
705+
tr.zrem(abort_jobs_ss, job_id)
706+
tr.zrem(self.queue_name, job_id)
704707
elif incr_score:
705-
tr.zincrby(self.queue_name, incr_score, job_id) # type: ignore[unused-coroutine]
708+
tr.zincrby(self.queue_name, incr_score, job_id)
706709
if delete_keys:
707-
tr.delete(*delete_keys) # type: ignore[unused-coroutine]
710+
tr.delete(*delete_keys)
708711
await tr.execute()
709712

710713
async def finish_failed_job(self, job_id: str, result_data: Optional[bytes]) -> None:
714+
tr: 'Pipeline[bytes]'
711715
async with self.pool.pipeline(transaction=True) as tr:
712-
tr.delete( # type: ignore[unused-coroutine]
716+
tr.delete(
713717
retry_key_prefix + job_id,
714718
in_progress_key_prefix + job_id,
715719
job_key_prefix + job_id,
716720
)
717-
tr.zrem(abort_jobs_ss, job_id) # type: ignore[unused-coroutine]
718-
tr.zrem(self.queue_name, job_id) # type: ignore[unused-coroutine]
721+
tr.zrem(abort_jobs_ss, job_id)
722+
tr.zrem(self.queue_name, job_id)
719723
# result_data would only be None if serializing the result fails
720724
keep_result = self.keep_result_forever or self.keep_result_s > 0
721725
if result_data is not None and keep_result: # pragma: no branch
722726
expire = 0 if self.keep_result_forever else self.keep_result_s
723-
tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire)) # type: ignore[unused-coroutine]
727+
tr.set(result_key_prefix + job_id, result_data, px=to_ms(expire))
724728
await tr.execute()
725729

726730
async def heart_beat(self) -> None:

requirements/linting.in

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ flake8-quotes>=3,<4
44
isort[colors]>=5,<6
55
mypy<1
66
types-pytz
7-
types_redis>=4.2,<4.3
7+
types_redis>=4

requirements/linting.txt

+5-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
#
2-
# This file is autogenerated by pip-compile with python 3.9
3-
# To update, run:
2+
# This file is autogenerated by pip-compile with Python 3.10
3+
# by the following command:
44
#
5-
# pip-compile --output-file=requirements/linting.txt requirements/linting.in
5+
# pip-compile requirements/linting.in
66
#
77
black==22.6.0
88
# via -r requirements/linting.in
@@ -40,9 +40,7 @@ tomli==2.0.1
4040
# mypy
4141
types-pytz==2022.2.1.0
4242
# via -r requirements/linting.in
43-
types-redis==4.2.8
43+
types-redis==4.5.5.2
4444
# via -r requirements/linting.in
4545
typing-extensions==4.3.0
46-
# via
47-
# black
48-
# mypy
46+
# via mypy

0 commit comments

Comments
 (0)