图片解析应用
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1208 lines
44 KiB

  1. import asyncio
  2. import copy
  3. import enum
  4. import inspect
  5. import socket
  6. import ssl
  7. import sys
  8. import warnings
  9. import weakref
  10. from abc import abstractmethod
  11. from itertools import chain
  12. from types import MappingProxyType
  13. from typing import (
  14. Any,
  15. Callable,
  16. Iterable,
  17. List,
  18. Mapping,
  19. Optional,
  20. Set,
  21. Tuple,
  22. Type,
  23. TypeVar,
  24. Union,
  25. )
  26. from urllib.parse import ParseResult, parse_qs, unquote, urlparse
  27. from ..utils import format_error_message
  28. # the functionality is available in 3.11.x but has a major issue before
  29. # 3.11.3. See https://github.com/redis/redis-py/issues/2633
  30. if sys.version_info >= (3, 11, 3):
  31. from asyncio import timeout as async_timeout
  32. else:
  33. from async_timeout import timeout as async_timeout
  34. from redis.asyncio.retry import Retry
  35. from redis.backoff import NoBackoff
  36. from redis.compat import Protocol, TypedDict
  37. from redis.connection import DEFAULT_RESP_VERSION
  38. from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider
  39. from redis.exceptions import (
  40. AuthenticationError,
  41. AuthenticationWrongNumberOfArgsError,
  42. ConnectionError,
  43. DataError,
  44. RedisError,
  45. ResponseError,
  46. TimeoutError,
  47. )
  48. from redis.typing import EncodableT
  49. from redis.utils import HIREDIS_AVAILABLE, get_lib_version, str_if_bytes
  50. from .._parsers import (
  51. BaseParser,
  52. Encoder,
  53. _AsyncHiredisParser,
  54. _AsyncRESP2Parser,
  55. _AsyncRESP3Parser,
  56. )
  57. SYM_STAR = b"*"
  58. SYM_DOLLAR = b"$"
  59. SYM_CRLF = b"\r\n"
  60. SYM_LF = b"\n"
  61. SYM_EMPTY = b""
  62. class _Sentinel(enum.Enum):
  63. sentinel = object()
  64. SENTINEL = _Sentinel.sentinel
  65. DefaultParser: Type[Union[_AsyncRESP2Parser, _AsyncRESP3Parser, _AsyncHiredisParser]]
  66. if HIREDIS_AVAILABLE:
  67. DefaultParser = _AsyncHiredisParser
  68. else:
  69. DefaultParser = _AsyncRESP2Parser
  70. class ConnectCallbackProtocol(Protocol):
  71. def __call__(self, connection: "AbstractConnection"):
  72. ...
  73. class AsyncConnectCallbackProtocol(Protocol):
  74. async def __call__(self, connection: "AbstractConnection"):
  75. ...
  76. ConnectCallbackT = Union[ConnectCallbackProtocol, AsyncConnectCallbackProtocol]
  77. class AbstractConnection:
  78. """Manages communication to and from a Redis server"""
  79. __slots__ = (
  80. "db",
  81. "username",
  82. "client_name",
  83. "lib_name",
  84. "lib_version",
  85. "credential_provider",
  86. "password",
  87. "socket_timeout",
  88. "socket_connect_timeout",
  89. "redis_connect_func",
  90. "retry_on_timeout",
  91. "retry_on_error",
  92. "health_check_interval",
  93. "next_health_check",
  94. "last_active_at",
  95. "encoder",
  96. "ssl_context",
  97. "protocol",
  98. "_reader",
  99. "_writer",
  100. "_parser",
  101. "_connect_callbacks",
  102. "_buffer_cutoff",
  103. "_lock",
  104. "_socket_read_size",
  105. "__dict__",
  106. )
  107. def __init__(
  108. self,
  109. *,
  110. db: Union[str, int] = 0,
  111. password: Optional[str] = None,
  112. socket_timeout: Optional[float] = None,
  113. socket_connect_timeout: Optional[float] = None,
  114. retry_on_timeout: bool = False,
  115. retry_on_error: Union[list, _Sentinel] = SENTINEL,
  116. encoding: str = "utf-8",
  117. encoding_errors: str = "strict",
  118. decode_responses: bool = False,
  119. parser_class: Type[BaseParser] = DefaultParser,
  120. socket_read_size: int = 65536,
  121. health_check_interval: float = 0,
  122. client_name: Optional[str] = None,
  123. lib_name: Optional[str] = "redis-py",
  124. lib_version: Optional[str] = get_lib_version(),
  125. username: Optional[str] = None,
  126. retry: Optional[Retry] = None,
  127. redis_connect_func: Optional[ConnectCallbackT] = None,
  128. encoder_class: Type[Encoder] = Encoder,
  129. credential_provider: Optional[CredentialProvider] = None,
  130. protocol: Optional[int] = 2,
  131. ):
  132. if (username or password) and credential_provider is not None:
  133. raise DataError(
  134. "'username' and 'password' cannot be passed along with 'credential_"
  135. "provider'. Please provide only one of the following arguments: \n"
  136. "1. 'password' and (optional) 'username'\n"
  137. "2. 'credential_provider'"
  138. )
  139. self.db = db
  140. self.client_name = client_name
  141. self.lib_name = lib_name
  142. self.lib_version = lib_version
  143. self.credential_provider = credential_provider
  144. self.password = password
  145. self.username = username
  146. self.socket_timeout = socket_timeout
  147. if socket_connect_timeout is None:
  148. socket_connect_timeout = socket_timeout
  149. self.socket_connect_timeout = socket_connect_timeout
  150. self.retry_on_timeout = retry_on_timeout
  151. if retry_on_error is SENTINEL:
  152. retry_on_error = []
  153. if retry_on_timeout:
  154. retry_on_error.append(TimeoutError)
  155. retry_on_error.append(socket.timeout)
  156. retry_on_error.append(asyncio.TimeoutError)
  157. self.retry_on_error = retry_on_error
  158. if retry or retry_on_error:
  159. if not retry:
  160. self.retry = Retry(NoBackoff(), 1)
  161. else:
  162. # deep-copy the Retry object as it is mutable
  163. self.retry = copy.deepcopy(retry)
  164. # Update the retry's supported errors with the specified errors
  165. self.retry.update_supported_errors(retry_on_error)
  166. else:
  167. self.retry = Retry(NoBackoff(), 0)
  168. self.health_check_interval = health_check_interval
  169. self.next_health_check: float = -1
  170. self.encoder = encoder_class(encoding, encoding_errors, decode_responses)
  171. self.redis_connect_func = redis_connect_func
  172. self._reader: Optional[asyncio.StreamReader] = None
  173. self._writer: Optional[asyncio.StreamWriter] = None
  174. self._socket_read_size = socket_read_size
  175. self.set_parser(parser_class)
  176. self._connect_callbacks: List[weakref.WeakMethod[ConnectCallbackT]] = []
  177. self._buffer_cutoff = 6000
  178. try:
  179. p = int(protocol)
  180. except TypeError:
  181. p = DEFAULT_RESP_VERSION
  182. except ValueError:
  183. raise ConnectionError("protocol must be an integer")
  184. finally:
  185. if p < 2 or p > 3:
  186. raise ConnectionError("protocol must be either 2 or 3")
  187. self.protocol = protocol
  188. def __del__(self, _warnings: Any = warnings):
  189. # For some reason, the individual streams don't get properly garbage
  190. # collected and therefore produce no resource warnings. We add one
  191. # here, in the same style as those from the stdlib.
  192. if getattr(self, "_writer", None):
  193. _warnings.warn(
  194. f"unclosed Connection {self!r}", ResourceWarning, source=self
  195. )
  196. self._close()
  197. def _close(self):
  198. """
  199. Internal method to silently close the connection without waiting
  200. """
  201. if self._writer:
  202. self._writer.close()
  203. self._writer = self._reader = None
  204. def __repr__(self):
  205. repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces()))
  206. return f"<{self.__class__.__module__}.{self.__class__.__name__}({repr_args})>"
  207. @abstractmethod
  208. def repr_pieces(self):
  209. pass
  210. @property
  211. def is_connected(self):
  212. return self._reader is not None and self._writer is not None
  213. def register_connect_callback(self, callback):
  214. """
  215. Register a callback to be called when the connection is established either
  216. initially or reconnected. This allows listeners to issue commands that
  217. are ephemeral to the connection, for example pub/sub subscription or
  218. key tracking. The callback must be a _method_ and will be kept as
  219. a weak reference.
  220. """
  221. wm = weakref.WeakMethod(callback)
  222. if wm not in self._connect_callbacks:
  223. self._connect_callbacks.append(wm)
  224. def deregister_connect_callback(self, callback):
  225. """
  226. De-register a previously registered callback. It will no-longer receive
  227. notifications on connection events. Calling this is not required when the
  228. listener goes away, since the callbacks are kept as weak methods.
  229. """
  230. try:
  231. self._connect_callbacks.remove(weakref.WeakMethod(callback))
  232. except ValueError:
  233. pass
  234. def set_parser(self, parser_class: Type[BaseParser]) -> None:
  235. """
  236. Creates a new instance of parser_class with socket size:
  237. _socket_read_size and assigns it to the parser for the connection
  238. :param parser_class: The required parser class
  239. """
  240. self._parser = parser_class(socket_read_size=self._socket_read_size)
  241. async def connect(self):
  242. """Connects to the Redis server if not already connected"""
  243. if self.is_connected:
  244. return
  245. try:
  246. await self.retry.call_with_retry(
  247. lambda: self._connect(), lambda error: self.disconnect()
  248. )
  249. except asyncio.CancelledError:
  250. raise # in 3.7 and earlier, this is an Exception, not BaseException
  251. except (socket.timeout, asyncio.TimeoutError):
  252. raise TimeoutError("Timeout connecting to server")
  253. except OSError as e:
  254. raise ConnectionError(self._error_message(e))
  255. except Exception as exc:
  256. raise ConnectionError(exc) from exc
  257. try:
  258. if not self.redis_connect_func:
  259. # Use the default on_connect function
  260. await self.on_connect()
  261. else:
  262. # Use the passed function redis_connect_func
  263. await self.redis_connect_func(self) if asyncio.iscoroutinefunction(
  264. self.redis_connect_func
  265. ) else self.redis_connect_func(self)
  266. except RedisError:
  267. # clean up after any error in on_connect
  268. await self.disconnect()
  269. raise
  270. # run any user callbacks. right now the only internal callback
  271. # is for pubsub channel/pattern resubscription
  272. # first, remove any dead weakrefs
  273. self._connect_callbacks = [ref for ref in self._connect_callbacks if ref()]
  274. for ref in self._connect_callbacks:
  275. callback = ref()
  276. task = callback(self)
  277. if task and inspect.isawaitable(task):
  278. await task
  279. @abstractmethod
  280. async def _connect(self):
  281. pass
  282. @abstractmethod
  283. def _host_error(self) -> str:
  284. pass
  285. def _error_message(self, exception: BaseException) -> str:
  286. return format_error_message(self._host_error(), exception)
  287. async def on_connect(self) -> None:
  288. """Initialize the connection, authenticate and select a database"""
  289. self._parser.on_connect(self)
  290. parser = self._parser
  291. auth_args = None
  292. # if credential provider or username and/or password are set, authenticate
  293. if self.credential_provider or (self.username or self.password):
  294. cred_provider = (
  295. self.credential_provider
  296. or UsernamePasswordCredentialProvider(self.username, self.password)
  297. )
  298. auth_args = cred_provider.get_credentials()
  299. # if resp version is specified and we have auth args,
  300. # we need to send them via HELLO
  301. if auth_args and self.protocol not in [2, "2"]:
  302. if isinstance(self._parser, _AsyncRESP2Parser):
  303. self.set_parser(_AsyncRESP3Parser)
  304. # update cluster exception classes
  305. self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES
  306. self._parser.on_connect(self)
  307. if len(auth_args) == 1:
  308. auth_args = ["default", auth_args[0]]
  309. await self.send_command("HELLO", self.protocol, "AUTH", *auth_args)
  310. response = await self.read_response()
  311. if response.get(b"proto") != int(self.protocol) and response.get(
  312. "proto"
  313. ) != int(self.protocol):
  314. raise ConnectionError("Invalid RESP version")
  315. # avoid checking health here -- PING will fail if we try
  316. # to check the health prior to the AUTH
  317. elif auth_args:
  318. await self.send_command("AUTH", *auth_args, check_health=False)
  319. try:
  320. auth_response = await self.read_response()
  321. except AuthenticationWrongNumberOfArgsError:
  322. # a username and password were specified but the Redis
  323. # server seems to be < 6.0.0 which expects a single password
  324. # arg. retry auth with just the password.
  325. # https://github.com/andymccurdy/redis-py/issues/1274
  326. await self.send_command("AUTH", auth_args[-1], check_health=False)
  327. auth_response = await self.read_response()
  328. if str_if_bytes(auth_response) != "OK":
  329. raise AuthenticationError("Invalid Username or Password")
  330. # if resp version is specified, switch to it
  331. elif self.protocol not in [2, "2"]:
  332. if isinstance(self._parser, _AsyncRESP2Parser):
  333. self.set_parser(_AsyncRESP3Parser)
  334. # update cluster exception classes
  335. self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES
  336. self._parser.on_connect(self)
  337. await self.send_command("HELLO", self.protocol)
  338. response = await self.read_response()
  339. # if response.get(b"proto") != self.protocol and response.get(
  340. # "proto"
  341. # ) != self.protocol:
  342. # raise ConnectionError("Invalid RESP version")
  343. # if a client_name is given, set it
  344. if self.client_name:
  345. await self.send_command("CLIENT", "SETNAME", self.client_name)
  346. if str_if_bytes(await self.read_response()) != "OK":
  347. raise ConnectionError("Error setting client name")
  348. # set the library name and version, pipeline for lower startup latency
  349. if self.lib_name:
  350. await self.send_command("CLIENT", "SETINFO", "LIB-NAME", self.lib_name)
  351. if self.lib_version:
  352. await self.send_command("CLIENT", "SETINFO", "LIB-VER", self.lib_version)
  353. # if a database is specified, switch to it. Also pipeline this
  354. if self.db:
  355. await self.send_command("SELECT", self.db)
  356. # read responses from pipeline
  357. for _ in (sent for sent in (self.lib_name, self.lib_version) if sent):
  358. try:
  359. await self.read_response()
  360. except ResponseError:
  361. pass
  362. if self.db:
  363. if str_if_bytes(await self.read_response()) != "OK":
  364. raise ConnectionError("Invalid Database")
  365. async def disconnect(self, nowait: bool = False) -> None:
  366. """Disconnects from the Redis server"""
  367. try:
  368. async with async_timeout(self.socket_connect_timeout):
  369. self._parser.on_disconnect()
  370. if not self.is_connected:
  371. return
  372. try:
  373. self._writer.close() # type: ignore[union-attr]
  374. # wait for close to finish, except when handling errors and
  375. # forcefully disconnecting.
  376. if not nowait:
  377. await self._writer.wait_closed() # type: ignore[union-attr]
  378. except OSError:
  379. pass
  380. finally:
  381. self._reader = None
  382. self._writer = None
  383. except asyncio.TimeoutError:
  384. raise TimeoutError(
  385. f"Timed out closing connection after {self.socket_connect_timeout}"
  386. ) from None
  387. async def _send_ping(self):
  388. """Send PING, expect PONG in return"""
  389. await self.send_command("PING", check_health=False)
  390. if str_if_bytes(await self.read_response()) != "PONG":
  391. raise ConnectionError("Bad response from PING health check")
  392. async def _ping_failed(self, error):
  393. """Function to call when PING fails"""
  394. await self.disconnect()
  395. async def check_health(self):
  396. """Check the health of the connection with a PING/PONG"""
  397. if (
  398. self.health_check_interval
  399. and asyncio.get_running_loop().time() > self.next_health_check
  400. ):
  401. await self.retry.call_with_retry(self._send_ping, self._ping_failed)
  402. async def _send_packed_command(self, command: Iterable[bytes]) -> None:
  403. self._writer.writelines(command)
  404. await self._writer.drain()
  405. async def send_packed_command(
  406. self, command: Union[bytes, str, Iterable[bytes]], check_health: bool = True
  407. ) -> None:
  408. if not self.is_connected:
  409. await self.connect()
  410. elif check_health:
  411. await self.check_health()
  412. try:
  413. if isinstance(command, str):
  414. command = command.encode()
  415. if isinstance(command, bytes):
  416. command = [command]
  417. if self.socket_timeout:
  418. await asyncio.wait_for(
  419. self._send_packed_command(command), self.socket_timeout
  420. )
  421. else:
  422. self._writer.writelines(command)
  423. await self._writer.drain()
  424. except asyncio.TimeoutError:
  425. await self.disconnect(nowait=True)
  426. raise TimeoutError("Timeout writing to socket") from None
  427. except OSError as e:
  428. await self.disconnect(nowait=True)
  429. if len(e.args) == 1:
  430. err_no, errmsg = "UNKNOWN", e.args[0]
  431. else:
  432. err_no = e.args[0]
  433. errmsg = e.args[1]
  434. raise ConnectionError(
  435. f"Error {err_no} while writing to socket. {errmsg}."
  436. ) from e
  437. except BaseException:
  438. # BaseExceptions can be raised when a socket send operation is not
  439. # finished, e.g. due to a timeout. Ideally, a caller could then re-try
  440. # to send un-sent data. However, the send_packed_command() API
  441. # does not support it so there is no point in keeping the connection open.
  442. await self.disconnect(nowait=True)
  443. raise
  444. async def send_command(self, *args: Any, **kwargs: Any) -> None:
  445. """Pack and send a command to the Redis server"""
  446. await self.send_packed_command(
  447. self.pack_command(*args), check_health=kwargs.get("check_health", True)
  448. )
  449. async def can_read_destructive(self):
  450. """Poll the socket to see if there's data that can be read."""
  451. try:
  452. return await self._parser.can_read_destructive()
  453. except OSError as e:
  454. await self.disconnect(nowait=True)
  455. host_error = self._host_error()
  456. raise ConnectionError(f"Error while reading from {host_error}: {e.args}")
  457. async def read_response(
  458. self,
  459. disable_decoding: bool = False,
  460. timeout: Optional[float] = None,
  461. *,
  462. disconnect_on_error: bool = True,
  463. push_request: Optional[bool] = False,
  464. ):
  465. """Read the response from a previously sent command"""
  466. read_timeout = timeout if timeout is not None else self.socket_timeout
  467. host_error = self._host_error()
  468. try:
  469. if (
  470. read_timeout is not None
  471. and self.protocol in ["3", 3]
  472. and not HIREDIS_AVAILABLE
  473. ):
  474. async with async_timeout(read_timeout):
  475. response = await self._parser.read_response(
  476. disable_decoding=disable_decoding, push_request=push_request
  477. )
  478. elif read_timeout is not None:
  479. async with async_timeout(read_timeout):
  480. response = await self._parser.read_response(
  481. disable_decoding=disable_decoding
  482. )
  483. elif self.protocol in ["3", 3] and not HIREDIS_AVAILABLE:
  484. response = await self._parser.read_response(
  485. disable_decoding=disable_decoding, push_request=push_request
  486. )
  487. else:
  488. response = await self._parser.read_response(
  489. disable_decoding=disable_decoding
  490. )
  491. except asyncio.TimeoutError:
  492. if timeout is not None:
  493. # user requested timeout, return None. Operation can be retried
  494. return None
  495. # it was a self.socket_timeout error.
  496. if disconnect_on_error:
  497. await self.disconnect(nowait=True)
  498. raise TimeoutError(f"Timeout reading from {host_error}")
  499. except OSError as e:
  500. if disconnect_on_error:
  501. await self.disconnect(nowait=True)
  502. raise ConnectionError(f"Error while reading from {host_error} : {e.args}")
  503. except BaseException:
  504. # Also by default close in case of BaseException. A lot of code
  505. # relies on this behaviour when doing Command/Response pairs.
  506. # See #1128.
  507. if disconnect_on_error:
  508. await self.disconnect(nowait=True)
  509. raise
  510. if self.health_check_interval:
  511. next_time = asyncio.get_running_loop().time() + self.health_check_interval
  512. self.next_health_check = next_time
  513. if isinstance(response, ResponseError):
  514. raise response from None
  515. return response
  516. def pack_command(self, *args: EncodableT) -> List[bytes]:
  517. """Pack a series of arguments into the Redis protocol"""
  518. output = []
  519. # the client might have included 1 or more literal arguments in
  520. # the command name, e.g., 'CONFIG GET'. The Redis server expects these
  521. # arguments to be sent separately, so split the first argument
  522. # manually. These arguments should be bytestrings so that they are
  523. # not encoded.
  524. assert not isinstance(args[0], float)
  525. if isinstance(args[0], str):
  526. args = tuple(args[0].encode().split()) + args[1:]
  527. elif b" " in args[0]:
  528. args = tuple(args[0].split()) + args[1:]
  529. buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
  530. buffer_cutoff = self._buffer_cutoff
  531. for arg in map(self.encoder.encode, args):
  532. # to avoid large string mallocs, chunk the command into the
  533. # output list if we're sending large values or memoryviews
  534. arg_length = len(arg)
  535. if (
  536. len(buff) > buffer_cutoff
  537. or arg_length > buffer_cutoff
  538. or isinstance(arg, memoryview)
  539. ):
  540. buff = SYM_EMPTY.join(
  541. (buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF)
  542. )
  543. output.append(buff)
  544. output.append(arg)
  545. buff = SYM_CRLF
  546. else:
  547. buff = SYM_EMPTY.join(
  548. (
  549. buff,
  550. SYM_DOLLAR,
  551. str(arg_length).encode(),
  552. SYM_CRLF,
  553. arg,
  554. SYM_CRLF,
  555. )
  556. )
  557. output.append(buff)
  558. return output
  559. def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> List[bytes]:
  560. """Pack multiple commands into the Redis protocol"""
  561. output: List[bytes] = []
  562. pieces: List[bytes] = []
  563. buffer_length = 0
  564. buffer_cutoff = self._buffer_cutoff
  565. for cmd in commands:
  566. for chunk in self.pack_command(*cmd):
  567. chunklen = len(chunk)
  568. if (
  569. buffer_length > buffer_cutoff
  570. or chunklen > buffer_cutoff
  571. or isinstance(chunk, memoryview)
  572. ):
  573. if pieces:
  574. output.append(SYM_EMPTY.join(pieces))
  575. buffer_length = 0
  576. pieces = []
  577. if chunklen > buffer_cutoff or isinstance(chunk, memoryview):
  578. output.append(chunk)
  579. else:
  580. pieces.append(chunk)
  581. buffer_length += chunklen
  582. if pieces:
  583. output.append(SYM_EMPTY.join(pieces))
  584. return output
  585. class Connection(AbstractConnection):
  586. "Manages TCP communication to and from a Redis server"
  587. def __init__(
  588. self,
  589. *,
  590. host: str = "localhost",
  591. port: Union[str, int] = 6379,
  592. socket_keepalive: bool = False,
  593. socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None,
  594. socket_type: int = 0,
  595. **kwargs,
  596. ):
  597. self.host = host
  598. self.port = int(port)
  599. self.socket_keepalive = socket_keepalive
  600. self.socket_keepalive_options = socket_keepalive_options or {}
  601. self.socket_type = socket_type
  602. super().__init__(**kwargs)
  603. def repr_pieces(self):
  604. pieces = [("host", self.host), ("port", self.port), ("db", self.db)]
  605. if self.client_name:
  606. pieces.append(("client_name", self.client_name))
  607. return pieces
  608. def _connection_arguments(self) -> Mapping:
  609. return {"host": self.host, "port": self.port}
  610. async def _connect(self):
  611. """Create a TCP socket connection"""
  612. async with async_timeout(self.socket_connect_timeout):
  613. reader, writer = await asyncio.open_connection(
  614. **self._connection_arguments()
  615. )
  616. self._reader = reader
  617. self._writer = writer
  618. sock = writer.transport.get_extra_info("socket")
  619. if sock:
  620. sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
  621. try:
  622. # TCP_KEEPALIVE
  623. if self.socket_keepalive:
  624. sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
  625. for k, v in self.socket_keepalive_options.items():
  626. sock.setsockopt(socket.SOL_TCP, k, v)
  627. except (OSError, TypeError):
  628. # `socket_keepalive_options` might contain invalid options
  629. # causing an error. Do not leave the connection open.
  630. writer.close()
  631. raise
  632. def _host_error(self) -> str:
  633. return f"{self.host}:{self.port}"
  634. class SSLConnection(Connection):
  635. """Manages SSL connections to and from the Redis server(s).
  636. This class extends the Connection class, adding SSL functionality, and making
  637. use of ssl.SSLContext (https://docs.python.org/3/library/ssl.html#ssl.SSLContext)
  638. """
  639. def __init__(
  640. self,
  641. ssl_keyfile: Optional[str] = None,
  642. ssl_certfile: Optional[str] = None,
  643. ssl_cert_reqs: str = "required",
  644. ssl_ca_certs: Optional[str] = None,
  645. ssl_ca_data: Optional[str] = None,
  646. ssl_check_hostname: bool = False,
  647. ssl_min_version: Optional[ssl.TLSVersion] = None,
  648. ssl_ciphers: Optional[str] = None,
  649. **kwargs,
  650. ):
  651. self.ssl_context: RedisSSLContext = RedisSSLContext(
  652. keyfile=ssl_keyfile,
  653. certfile=ssl_certfile,
  654. cert_reqs=ssl_cert_reqs,
  655. ca_certs=ssl_ca_certs,
  656. ca_data=ssl_ca_data,
  657. check_hostname=ssl_check_hostname,
  658. min_version=ssl_min_version,
  659. ciphers=ssl_ciphers,
  660. )
  661. super().__init__(**kwargs)
  662. def _connection_arguments(self) -> Mapping:
  663. kwargs = super()._connection_arguments()
  664. kwargs["ssl"] = self.ssl_context.get()
  665. return kwargs
  666. @property
  667. def keyfile(self):
  668. return self.ssl_context.keyfile
  669. @property
  670. def certfile(self):
  671. return self.ssl_context.certfile
  672. @property
  673. def cert_reqs(self):
  674. return self.ssl_context.cert_reqs
  675. @property
  676. def ca_certs(self):
  677. return self.ssl_context.ca_certs
  678. @property
  679. def ca_data(self):
  680. return self.ssl_context.ca_data
  681. @property
  682. def check_hostname(self):
  683. return self.ssl_context.check_hostname
  684. @property
  685. def min_version(self):
  686. return self.ssl_context.min_version
  687. class RedisSSLContext:
  688. __slots__ = (
  689. "keyfile",
  690. "certfile",
  691. "cert_reqs",
  692. "ca_certs",
  693. "ca_data",
  694. "context",
  695. "check_hostname",
  696. "min_version",
  697. "ciphers",
  698. )
  699. def __init__(
  700. self,
  701. keyfile: Optional[str] = None,
  702. certfile: Optional[str] = None,
  703. cert_reqs: Optional[str] = None,
  704. ca_certs: Optional[str] = None,
  705. ca_data: Optional[str] = None,
  706. check_hostname: bool = False,
  707. min_version: Optional[ssl.TLSVersion] = None,
  708. ciphers: Optional[str] = None,
  709. ):
  710. self.keyfile = keyfile
  711. self.certfile = certfile
  712. if cert_reqs is None:
  713. self.cert_reqs = ssl.CERT_NONE
  714. elif isinstance(cert_reqs, str):
  715. CERT_REQS = {
  716. "none": ssl.CERT_NONE,
  717. "optional": ssl.CERT_OPTIONAL,
  718. "required": ssl.CERT_REQUIRED,
  719. }
  720. if cert_reqs not in CERT_REQS:
  721. raise RedisError(
  722. f"Invalid SSL Certificate Requirements Flag: {cert_reqs}"
  723. )
  724. self.cert_reqs = CERT_REQS[cert_reqs]
  725. self.ca_certs = ca_certs
  726. self.ca_data = ca_data
  727. self.check_hostname = check_hostname
  728. self.min_version = min_version
  729. self.ciphers = ciphers
  730. self.context: Optional[ssl.SSLContext] = None
  731. def get(self) -> ssl.SSLContext:
  732. if not self.context:
  733. context = ssl.create_default_context()
  734. context.check_hostname = self.check_hostname
  735. context.verify_mode = self.cert_reqs
  736. if self.certfile and self.keyfile:
  737. context.load_cert_chain(certfile=self.certfile, keyfile=self.keyfile)
  738. if self.ca_certs or self.ca_data:
  739. context.load_verify_locations(cafile=self.ca_certs, cadata=self.ca_data)
  740. if self.min_version is not None:
  741. context.minimum_version = self.min_version
  742. if self.ciphers is not None:
  743. context.set_ciphers(self.ciphers)
  744. self.context = context
  745. return self.context
  746. class UnixDomainSocketConnection(AbstractConnection):
  747. "Manages UDS communication to and from a Redis server"
  748. def __init__(self, *, path: str = "", **kwargs):
  749. self.path = path
  750. super().__init__(**kwargs)
  751. def repr_pieces(self) -> Iterable[Tuple[str, Union[str, int]]]:
  752. pieces = [("path", self.path), ("db", self.db)]
  753. if self.client_name:
  754. pieces.append(("client_name", self.client_name))
  755. return pieces
  756. async def _connect(self):
  757. async with async_timeout(self.socket_connect_timeout):
  758. reader, writer = await asyncio.open_unix_connection(path=self.path)
  759. self._reader = reader
  760. self._writer = writer
  761. await self.on_connect()
  762. def _host_error(self) -> str:
  763. return self.path
  764. FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO")
  765. def to_bool(value) -> Optional[bool]:
  766. if value is None or value == "":
  767. return None
  768. if isinstance(value, str) and value.upper() in FALSE_STRINGS:
  769. return False
  770. return bool(value)
  771. URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] = MappingProxyType(
  772. {
  773. "db": int,
  774. "socket_timeout": float,
  775. "socket_connect_timeout": float,
  776. "socket_keepalive": to_bool,
  777. "retry_on_timeout": to_bool,
  778. "max_connections": int,
  779. "health_check_interval": int,
  780. "ssl_check_hostname": to_bool,
  781. "timeout": float,
  782. }
  783. )
  784. class ConnectKwargs(TypedDict, total=False):
  785. username: str
  786. password: str
  787. connection_class: Type[AbstractConnection]
  788. host: str
  789. port: int
  790. db: int
  791. path: str
  792. def parse_url(url: str) -> ConnectKwargs:
  793. parsed: ParseResult = urlparse(url)
  794. kwargs: ConnectKwargs = {}
  795. for name, value_list in parse_qs(parsed.query).items():
  796. if value_list and len(value_list) > 0:
  797. value = unquote(value_list[0])
  798. parser = URL_QUERY_ARGUMENT_PARSERS.get(name)
  799. if parser:
  800. try:
  801. kwargs[name] = parser(value)
  802. except (TypeError, ValueError):
  803. raise ValueError(f"Invalid value for `{name}` in connection URL.")
  804. else:
  805. kwargs[name] = value
  806. if parsed.username:
  807. kwargs["username"] = unquote(parsed.username)
  808. if parsed.password:
  809. kwargs["password"] = unquote(parsed.password)
  810. # We only support redis://, rediss:// and unix:// schemes.
  811. if parsed.scheme == "unix":
  812. if parsed.path:
  813. kwargs["path"] = unquote(parsed.path)
  814. kwargs["connection_class"] = UnixDomainSocketConnection
  815. elif parsed.scheme in ("redis", "rediss"):
  816. if parsed.hostname:
  817. kwargs["host"] = unquote(parsed.hostname)
  818. if parsed.port:
  819. kwargs["port"] = int(parsed.port)
  820. # If there's a path argument, use it as the db argument if a
  821. # querystring value wasn't specified
  822. if parsed.path and "db" not in kwargs:
  823. try:
  824. kwargs["db"] = int(unquote(parsed.path).replace("/", ""))
  825. except (AttributeError, ValueError):
  826. pass
  827. if parsed.scheme == "rediss":
  828. kwargs["connection_class"] = SSLConnection
  829. else:
  830. valid_schemes = "redis://, rediss://, unix://"
  831. raise ValueError(
  832. f"Redis URL must specify one of the following schemes ({valid_schemes})"
  833. )
  834. return kwargs
  835. _CP = TypeVar("_CP", bound="ConnectionPool")
  836. class ConnectionPool:
  837. """
  838. Create a connection pool. ``If max_connections`` is set, then this
  839. object raises :py:class:`~redis.ConnectionError` when the pool's
  840. limit is reached.
  841. By default, TCP connections are created unless ``connection_class``
  842. is specified. Use :py:class:`~redis.UnixDomainSocketConnection` for
  843. unix sockets.
  844. Any additional keyword arguments are passed to the constructor of
  845. ``connection_class``.
  846. """
  847. @classmethod
  848. def from_url(cls: Type[_CP], url: str, **kwargs) -> _CP:
  849. """
  850. Return a connection pool configured from the given URL.
  851. For example::
  852. redis://[[username]:[password]]@localhost:6379/0
  853. rediss://[[username]:[password]]@localhost:6379/0
  854. unix://[username@]/path/to/socket.sock?db=0[&password=password]
  855. Three URL schemes are supported:
  856. - `redis://` creates a TCP socket connection. See more at:
  857. <https://www.iana.org/assignments/uri-schemes/prov/redis>
  858. - `rediss://` creates a SSL wrapped TCP socket connection. See more at:
  859. <https://www.iana.org/assignments/uri-schemes/prov/rediss>
  860. - ``unix://``: creates a Unix Domain Socket connection.
  861. The username, password, hostname, path and all querystring values
  862. are passed through urllib.parse.unquote in order to replace any
  863. percent-encoded values with their corresponding characters.
  864. There are several ways to specify a database number. The first value
  865. found will be used:
  866. 1. A ``db`` querystring option, e.g. redis://localhost?db=0
  867. 2. If using the redis:// or rediss:// schemes, the path argument
  868. of the url, e.g. redis://localhost/0
  869. 3. A ``db`` keyword argument to this function.
  870. If none of these options are specified, the default db=0 is used.
  871. All querystring options are cast to their appropriate Python types.
  872. Boolean arguments can be specified with string values "True"/"False"
  873. or "Yes"/"No". Values that cannot be properly cast cause a
  874. ``ValueError`` to be raised. Once parsed, the querystring arguments
  875. and keyword arguments are passed to the ``ConnectionPool``'s
  876. class initializer. In the case of conflicting arguments, querystring
  877. arguments always win.
  878. """
  879. url_options = parse_url(url)
  880. kwargs.update(url_options)
  881. return cls(**kwargs)
  882. def __init__(
  883. self,
  884. connection_class: Type[AbstractConnection] = Connection,
  885. max_connections: Optional[int] = None,
  886. **connection_kwargs,
  887. ):
  888. max_connections = max_connections or 2**31
  889. if not isinstance(max_connections, int) or max_connections < 0:
  890. raise ValueError('"max_connections" must be a positive integer')
  891. self.connection_class = connection_class
  892. self.connection_kwargs = connection_kwargs
  893. self.max_connections = max_connections
  894. self._available_connections: List[AbstractConnection] = []
  895. self._in_use_connections: Set[AbstractConnection] = set()
  896. self.encoder_class = self.connection_kwargs.get("encoder_class", Encoder)
  897. def __repr__(self):
  898. return (
  899. f"<{self.__class__.__module__}.{self.__class__.__name__}"
  900. f"({self.connection_class(**self.connection_kwargs)!r})>"
  901. )
  902. def reset(self):
  903. self._available_connections = []
  904. self._in_use_connections = weakref.WeakSet()
  905. def can_get_connection(self) -> bool:
  906. """Return True if a connection can be retrieved from the pool."""
  907. return (
  908. self._available_connections
  909. or len(self._in_use_connections) < self.max_connections
  910. )
  911. async def get_connection(self, command_name, *keys, **options):
  912. """Get a connected connection from the pool"""
  913. connection = self.get_available_connection()
  914. try:
  915. await self.ensure_connection(connection)
  916. except BaseException:
  917. await self.release(connection)
  918. raise
  919. return connection
  920. def get_available_connection(self):
  921. """Get a connection from the pool, without making sure it is connected"""
  922. try:
  923. connection = self._available_connections.pop()
  924. except IndexError:
  925. if len(self._in_use_connections) >= self.max_connections:
  926. raise ConnectionError("Too many connections") from None
  927. connection = self.make_connection()
  928. self._in_use_connections.add(connection)
  929. return connection
  930. def get_encoder(self):
  931. """Return an encoder based on encoding settings"""
  932. kwargs = self.connection_kwargs
  933. return self.encoder_class(
  934. encoding=kwargs.get("encoding", "utf-8"),
  935. encoding_errors=kwargs.get("encoding_errors", "strict"),
  936. decode_responses=kwargs.get("decode_responses", False),
  937. )
  938. def make_connection(self):
  939. """Create a new connection. Can be overridden by child classes."""
  940. return self.connection_class(**self.connection_kwargs)
  941. async def ensure_connection(self, connection: AbstractConnection):
  942. """Ensure that the connection object is connected and valid"""
  943. await connection.connect()
  944. # connections that the pool provides should be ready to send
  945. # a command. if not, the connection was either returned to the
  946. # pool before all data has been read or the socket has been
  947. # closed. either way, reconnect and verify everything is good.
  948. try:
  949. if await connection.can_read_destructive():
  950. raise ConnectionError("Connection has data") from None
  951. except (ConnectionError, OSError):
  952. await connection.disconnect()
  953. await connection.connect()
  954. if await connection.can_read_destructive():
  955. raise ConnectionError("Connection not ready") from None
  956. async def release(self, connection: AbstractConnection):
  957. """Releases the connection back to the pool"""
  958. # Connections should always be returned to the correct pool,
  959. # not doing so is an error that will cause an exception here.
  960. self._in_use_connections.remove(connection)
  961. self._available_connections.append(connection)
  962. async def disconnect(self, inuse_connections: bool = True):
  963. """
  964. Disconnects connections in the pool
  965. If ``inuse_connections`` is True, disconnect connections that are
  966. current in use, potentially by other tasks. Otherwise only disconnect
  967. connections that are idle in the pool.
  968. """
  969. if inuse_connections:
  970. connections: Iterable[AbstractConnection] = chain(
  971. self._available_connections, self._in_use_connections
  972. )
  973. else:
  974. connections = self._available_connections
  975. resp = await asyncio.gather(
  976. *(connection.disconnect() for connection in connections),
  977. return_exceptions=True,
  978. )
  979. exc = next((r for r in resp if isinstance(r, BaseException)), None)
  980. if exc:
  981. raise exc
  982. async def aclose(self) -> None:
  983. """Close the pool, disconnecting all connections"""
  984. await self.disconnect()
  985. def set_retry(self, retry: "Retry") -> None:
  986. for conn in self._available_connections:
  987. conn.retry = retry
  988. for conn in self._in_use_connections:
  989. conn.retry = retry
  990. class BlockingConnectionPool(ConnectionPool):
  991. """
  992. A blocking connection pool::
  993. >>> from redis.asyncio import Redis, BlockingConnectionPool
  994. >>> client = Redis.from_pool(BlockingConnectionPool())
  995. It performs the same function as the default
  996. :py:class:`~redis.asyncio.ConnectionPool` implementation, in that,
  997. it maintains a pool of reusable connections that can be shared by
  998. multiple async redis clients.
  999. The difference is that, in the event that a client tries to get a
  1000. connection from the pool when all of connections are in use, rather than
  1001. raising a :py:class:`~redis.ConnectionError` (as the default
  1002. :py:class:`~redis.asyncio.ConnectionPool` implementation does), it
  1003. blocks the current `Task` for a specified number of seconds until
  1004. a connection becomes available.
  1005. Use ``max_connections`` to increase / decrease the pool size::
  1006. >>> pool = BlockingConnectionPool(max_connections=10)
  1007. Use ``timeout`` to tell it either how many seconds to wait for a connection
  1008. to become available, or to block forever:
  1009. >>> # Block forever.
  1010. >>> pool = BlockingConnectionPool(timeout=None)
  1011. >>> # Raise a ``ConnectionError`` after five seconds if a connection is
  1012. >>> # not available.
  1013. >>> pool = BlockingConnectionPool(timeout=5)
  1014. """
  1015. def __init__(
  1016. self,
  1017. max_connections: int = 50,
  1018. timeout: Optional[int] = 20,
  1019. connection_class: Type[AbstractConnection] = Connection,
  1020. queue_class: Type[asyncio.Queue] = asyncio.LifoQueue, # deprecated
  1021. **connection_kwargs,
  1022. ):
  1023. super().__init__(
  1024. connection_class=connection_class,
  1025. max_connections=max_connections,
  1026. **connection_kwargs,
  1027. )
  1028. self._condition = asyncio.Condition()
  1029. self.timeout = timeout
  1030. async def get_connection(self, command_name, *keys, **options):
  1031. """Gets a connection from the pool, blocking until one is available"""
  1032. try:
  1033. async with self._condition:
  1034. async with async_timeout(self.timeout):
  1035. await self._condition.wait_for(self.can_get_connection)
  1036. connection = super().get_available_connection()
  1037. except asyncio.TimeoutError as err:
  1038. raise ConnectionError("No connection available.") from err
  1039. # We now perform the connection check outside of the lock.
  1040. try:
  1041. await self.ensure_connection(connection)
  1042. return connection
  1043. except BaseException:
  1044. await self.release(connection)
  1045. raise
  1046. async def release(self, connection: AbstractConnection):
  1047. """Releases the connection back to the pool."""
  1048. async with self._condition:
  1049. await super().release(connection)
  1050. self._condition.notify()