connection.py 62 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717
  1. import asyncio
  2. import copy
  3. import enum
  4. import inspect
  5. import socket
  6. import sys
  7. import time
  8. import warnings
  9. import weakref
  10. from abc import abstractmethod
  11. from itertools import chain
  12. from types import MappingProxyType
  13. from typing import (
  14. Any,
  15. Callable,
  16. Iterable,
  17. List,
  18. Mapping,
  19. Optional,
  20. Protocol,
  21. Set,
  22. Tuple,
  23. Type,
  24. TypedDict,
  25. TypeVar,
  26. Union,
  27. )
  28. from urllib.parse import ParseResult, parse_qs, unquote, urlparse
  29. from ..observability.attributes import (
  30. DB_CLIENT_CONNECTION_POOL_NAME,
  31. DB_CLIENT_CONNECTION_STATE,
  32. AttributeBuilder,
  33. ConnectionState,
  34. get_pool_name,
  35. )
  36. from ..utils import SSL_AVAILABLE
  37. if SSL_AVAILABLE:
  38. import ssl
  39. from ssl import SSLContext, TLSVersion, VerifyFlags
  40. else:
  41. ssl = None
  42. TLSVersion = None
  43. SSLContext = None
  44. VerifyFlags = None
  45. from ..auth.token import TokenInterface
  46. from ..driver_info import DriverInfo, resolve_driver_info
  47. from ..event import AsyncAfterConnectionReleasedEvent, EventDispatcher
  48. from ..utils import deprecated_args, format_error_message
  49. # the functionality is available in 3.11.x but has a major issue before
  50. # 3.11.3. See https://github.com/redis/redis-py/issues/2633
  51. if sys.version_info >= (3, 11, 3):
  52. from asyncio import timeout as async_timeout
  53. else:
  54. from async_timeout import timeout as async_timeout
  55. from redis.asyncio.observability.recorder import (
  56. record_connection_closed,
  57. record_connection_count,
  58. record_connection_create_time,
  59. record_connection_wait_time,
  60. record_error_count,
  61. )
  62. from redis.asyncio.retry import Retry
  63. from redis.backoff import NoBackoff
  64. from redis.connection import DEFAULT_RESP_VERSION
  65. from redis.credentials import CredentialProvider, UsernamePasswordCredentialProvider
  66. from redis.exceptions import (
  67. AuthenticationError,
  68. AuthenticationWrongNumberOfArgsError,
  69. ConnectionError,
  70. DataError,
  71. MaxConnectionsError,
  72. RedisError,
  73. ResponseError,
  74. TimeoutError,
  75. )
  76. from redis.observability.metrics import CloseReason
  77. from redis.typing import EncodableT
  78. from redis.utils import HIREDIS_AVAILABLE, str_if_bytes
  79. from .._parsers import (
  80. BaseParser,
  81. Encoder,
  82. _AsyncHiredisParser,
  83. _AsyncRESP2Parser,
  84. _AsyncRESP3Parser,
  85. )
  86. SYM_STAR = b"*"
  87. SYM_DOLLAR = b"$"
  88. SYM_CRLF = b"\r\n"
  89. SYM_LF = b"\n"
  90. SYM_EMPTY = b""
  91. class _Sentinel(enum.Enum):
  92. sentinel = object()
  93. SENTINEL = _Sentinel.sentinel
  94. DefaultParser: Type[Union[_AsyncRESP2Parser, _AsyncRESP3Parser, _AsyncHiredisParser]]
  95. if HIREDIS_AVAILABLE:
  96. DefaultParser = _AsyncHiredisParser
  97. else:
  98. DefaultParser = _AsyncRESP2Parser
  99. class ConnectCallbackProtocol(Protocol):
  100. def __call__(self, connection: "AbstractConnection"): ...
  101. class AsyncConnectCallbackProtocol(Protocol):
  102. async def __call__(self, connection: "AbstractConnection"): ...
  103. ConnectCallbackT = Union[ConnectCallbackProtocol, AsyncConnectCallbackProtocol]
  104. class AbstractConnection:
  105. """Manages communication to and from a Redis server"""
  106. __slots__ = (
  107. "db",
  108. "username",
  109. "client_name",
  110. "lib_name",
  111. "lib_version",
  112. "credential_provider",
  113. "password",
  114. "socket_timeout",
  115. "socket_connect_timeout",
  116. "redis_connect_func",
  117. "retry_on_timeout",
  118. "retry_on_error",
  119. "health_check_interval",
  120. "next_health_check",
  121. "last_active_at",
  122. "encoder",
  123. "ssl_context",
  124. "protocol",
  125. "_reader",
  126. "_writer",
  127. "_parser",
  128. "_connect_callbacks",
  129. "_buffer_cutoff",
  130. "_lock",
  131. "_socket_read_size",
  132. "__dict__",
  133. )
  134. @deprecated_args(
  135. args_to_warn=["lib_name", "lib_version"],
  136. reason="Use 'driver_info' parameter instead. "
  137. "lib_name and lib_version will be removed in a future version.",
  138. )
  139. def __init__(
  140. self,
  141. *,
  142. db: Union[str, int] = 0,
  143. password: Optional[str] = None,
  144. socket_timeout: Optional[float] = None,
  145. socket_connect_timeout: Optional[float] = None,
  146. retry_on_timeout: bool = False,
  147. retry_on_error: Union[list, _Sentinel] = SENTINEL,
  148. encoding: str = "utf-8",
  149. encoding_errors: str = "strict",
  150. decode_responses: bool = False,
  151. parser_class: Type[BaseParser] = DefaultParser,
  152. socket_read_size: int = 65536,
  153. health_check_interval: float = 0,
  154. client_name: Optional[str] = None,
  155. lib_name: Optional[str] = None,
  156. lib_version: Optional[str] = None,
  157. driver_info: Optional[DriverInfo] = None,
  158. username: Optional[str] = None,
  159. retry: Optional[Retry] = None,
  160. redis_connect_func: Optional[ConnectCallbackT] = None,
  161. encoder_class: Type[Encoder] = Encoder,
  162. credential_provider: Optional[CredentialProvider] = None,
  163. protocol: Optional[int] = 2,
  164. event_dispatcher: Optional[EventDispatcher] = None,
  165. ):
  166. """
  167. Initialize a new async Connection.
  168. Parameters
  169. ----------
  170. driver_info : DriverInfo, optional
  171. Driver metadata for CLIENT SETINFO. If provided, lib_name and lib_version
  172. are ignored. If not provided, a DriverInfo will be created from lib_name
  173. and lib_version (or defaults if those are also None).
  174. lib_name : str, optional
  175. **Deprecated.** Use driver_info instead. Library name for CLIENT SETINFO.
  176. lib_version : str, optional
  177. **Deprecated.** Use driver_info instead. Library version for CLIENT SETINFO.
  178. """
  179. if (username or password) and credential_provider is not None:
  180. raise DataError(
  181. "'username' and 'password' cannot be passed along with 'credential_"
  182. "provider'. Please provide only one of the following arguments: \n"
  183. "1. 'password' and (optional) 'username'\n"
  184. "2. 'credential_provider'"
  185. )
  186. if event_dispatcher is None:
  187. self._event_dispatcher = EventDispatcher()
  188. else:
  189. self._event_dispatcher = event_dispatcher
  190. self.db = db
  191. self.client_name = client_name
  192. # Handle driver_info: if provided, use it; otherwise create from lib_name/lib_version
  193. self.driver_info = resolve_driver_info(driver_info, lib_name, lib_version)
  194. self.credential_provider = credential_provider
  195. self.password = password
  196. self.username = username
  197. self.socket_timeout = socket_timeout
  198. if socket_connect_timeout is None:
  199. socket_connect_timeout = socket_timeout
  200. self.socket_connect_timeout = socket_connect_timeout
  201. self.retry_on_timeout = retry_on_timeout
  202. if retry_on_error is SENTINEL:
  203. retry_on_error = []
  204. if retry_on_timeout:
  205. retry_on_error.append(TimeoutError)
  206. retry_on_error.append(socket.timeout)
  207. retry_on_error.append(asyncio.TimeoutError)
  208. self.retry_on_error = retry_on_error
  209. if retry or retry_on_error:
  210. if not retry:
  211. self.retry = Retry(NoBackoff(), 1)
  212. else:
  213. # deep-copy the Retry object as it is mutable
  214. self.retry = copy.deepcopy(retry)
  215. # Update the retry's supported errors with the specified errors
  216. self.retry.update_supported_errors(retry_on_error)
  217. else:
  218. self.retry = Retry(NoBackoff(), 0)
  219. self.health_check_interval = health_check_interval
  220. self.next_health_check: float = -1
  221. self.encoder = encoder_class(encoding, encoding_errors, decode_responses)
  222. self.redis_connect_func = redis_connect_func
  223. self._reader: Optional[asyncio.StreamReader] = None
  224. self._writer: Optional[asyncio.StreamWriter] = None
  225. self._socket_read_size = socket_read_size
  226. self.set_parser(parser_class)
  227. self._connect_callbacks: List[weakref.WeakMethod[ConnectCallbackT]] = []
  228. self._buffer_cutoff = 6000
  229. self._re_auth_token: Optional[TokenInterface] = None
  230. self._should_reconnect = False
  231. try:
  232. p = int(protocol)
  233. except TypeError:
  234. p = DEFAULT_RESP_VERSION
  235. except ValueError:
  236. raise ConnectionError("protocol must be an integer")
  237. else:
  238. if p < 2 or p > 3:
  239. raise ConnectionError("protocol must be either 2 or 3")
  240. self.protocol = p
  241. def __del__(self, _warnings: Any = warnings):
  242. # For some reason, the individual streams don't get properly garbage
  243. # collected and therefore produce no resource warnings. We add one
  244. # here, in the same style as those from the stdlib.
  245. if getattr(self, "_writer", None):
  246. _warnings.warn(
  247. f"unclosed Connection {self!r}", ResourceWarning, source=self
  248. )
  249. try:
  250. asyncio.get_running_loop()
  251. self._close()
  252. except RuntimeError:
  253. # No actions been taken if pool already closed.
  254. pass
  255. def _close(self):
  256. """
  257. Internal method to silently close the connection without waiting
  258. """
  259. if self._writer:
  260. self._writer.close()
  261. self._writer = self._reader = None
  262. def __repr__(self):
  263. repr_args = ",".join((f"{k}={v}" for k, v in self.repr_pieces()))
  264. return f"<{self.__class__.__module__}.{self.__class__.__name__}({repr_args})>"
  265. @abstractmethod
  266. def repr_pieces(self):
  267. pass
  268. @property
  269. def is_connected(self):
  270. return self._reader is not None and self._writer is not None
  271. def register_connect_callback(self, callback):
  272. """
  273. Register a callback to be called when the connection is established either
  274. initially or reconnected. This allows listeners to issue commands that
  275. are ephemeral to the connection, for example pub/sub subscription or
  276. key tracking. The callback must be a _method_ and will be kept as
  277. a weak reference.
  278. """
  279. wm = weakref.WeakMethod(callback)
  280. if wm not in self._connect_callbacks:
  281. self._connect_callbacks.append(wm)
  282. def deregister_connect_callback(self, callback):
  283. """
  284. De-register a previously registered callback. It will no-longer receive
  285. notifications on connection events. Calling this is not required when the
  286. listener goes away, since the callbacks are kept as weak methods.
  287. """
  288. try:
  289. self._connect_callbacks.remove(weakref.WeakMethod(callback))
  290. except ValueError:
  291. pass
  292. def set_parser(self, parser_class: Type[BaseParser]) -> None:
  293. """
  294. Creates a new instance of parser_class with socket size:
  295. _socket_read_size and assigns it to the parser for the connection
  296. :param parser_class: The required parser class
  297. """
  298. self._parser = parser_class(socket_read_size=self._socket_read_size)
  299. async def connect(self):
  300. """Connects to the Redis server if not already connected"""
  301. # try once the socket connect with the handshake, retry the whole
  302. # connect/handshake flow based on retry policy
  303. await self.retry.call_with_retry(
  304. lambda: self.connect_check_health(
  305. check_health=True, retry_socket_connect=False
  306. ),
  307. lambda error, failure_count: self.disconnect(
  308. error=error, failure_count=failure_count
  309. ),
  310. with_failure_count=True,
  311. )
  312. async def connect_check_health(
  313. self, check_health: bool = True, retry_socket_connect: bool = True
  314. ):
  315. if self.is_connected:
  316. return
  317. # Track actual retry attempts for error reporting
  318. actual_retry_attempts = 0
  319. def failure_callback(error, failure_count):
  320. nonlocal actual_retry_attempts
  321. actual_retry_attempts = failure_count
  322. return self.disconnect(error=error, failure_count=failure_count)
  323. try:
  324. if retry_socket_connect:
  325. await self.retry.call_with_retry(
  326. lambda: self._connect(),
  327. failure_callback,
  328. with_failure_count=True,
  329. )
  330. else:
  331. await self._connect()
  332. except asyncio.CancelledError:
  333. raise # in 3.7 and earlier, this is an Exception, not BaseException
  334. except (socket.timeout, asyncio.TimeoutError):
  335. e = TimeoutError("Timeout connecting to server")
  336. await record_error_count(
  337. server_address=getattr(self, "host", None),
  338. server_port=getattr(self, "port", None),
  339. network_peer_address=getattr(self, "host", None),
  340. network_peer_port=getattr(self, "port", None),
  341. error_type=e,
  342. retry_attempts=actual_retry_attempts,
  343. is_internal=False,
  344. )
  345. raise e
  346. except OSError as e:
  347. e = ConnectionError(self._error_message(e))
  348. await record_error_count(
  349. server_address=getattr(self, "host", None),
  350. server_port=getattr(self, "port", None),
  351. network_peer_address=getattr(self, "host", None),
  352. network_peer_port=getattr(self, "port", None),
  353. error_type=e,
  354. retry_attempts=actual_retry_attempts,
  355. is_internal=False,
  356. )
  357. raise e
  358. except Exception as exc:
  359. raise ConnectionError(exc) from exc
  360. try:
  361. if not self.redis_connect_func:
  362. # Use the default on_connect function
  363. await self.on_connect_check_health(check_health=check_health)
  364. else:
  365. # Use the passed function redis_connect_func
  366. (
  367. await self.redis_connect_func(self)
  368. if asyncio.iscoroutinefunction(self.redis_connect_func)
  369. else self.redis_connect_func(self)
  370. )
  371. except RedisError:
  372. # clean up after any error in on_connect
  373. await self.disconnect()
  374. raise
  375. # run any user callbacks. right now the only internal callback
  376. # is for pubsub channel/pattern resubscription
  377. # first, remove any dead weakrefs
  378. self._connect_callbacks = [ref for ref in self._connect_callbacks if ref()]
  379. for ref in self._connect_callbacks:
  380. callback = ref()
  381. task = callback(self)
  382. if task and inspect.isawaitable(task):
  383. await task
  384. def mark_for_reconnect(self):
  385. self._should_reconnect = True
  386. def should_reconnect(self):
  387. return self._should_reconnect
  388. def reset_should_reconnect(self):
  389. self._should_reconnect = False
  390. @abstractmethod
  391. async def _connect(self):
  392. pass
  393. @abstractmethod
  394. def _host_error(self) -> str:
  395. pass
  396. def _error_message(self, exception: BaseException) -> str:
  397. return format_error_message(self._host_error(), exception)
  398. def get_protocol(self):
  399. return self.protocol
  400. async def on_connect(self) -> None:
  401. """Initialize the connection, authenticate and select a database"""
  402. await self.on_connect_check_health(check_health=True)
  403. async def on_connect_check_health(self, check_health: bool = True) -> None:
  404. self._parser.on_connect(self)
  405. parser = self._parser
  406. auth_args = None
  407. # if credential provider or username and/or password are set, authenticate
  408. if self.credential_provider or (self.username or self.password):
  409. cred_provider = (
  410. self.credential_provider
  411. or UsernamePasswordCredentialProvider(self.username, self.password)
  412. )
  413. auth_args = await cred_provider.get_credentials_async()
  414. # if resp version is specified and we have auth args,
  415. # we need to send them via HELLO
  416. if auth_args and self.protocol not in [2, "2"]:
  417. if isinstance(self._parser, _AsyncRESP2Parser):
  418. self.set_parser(_AsyncRESP3Parser)
  419. # update cluster exception classes
  420. self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES
  421. self._parser.on_connect(self)
  422. if len(auth_args) == 1:
  423. auth_args = ["default", auth_args[0]]
  424. # avoid checking health here -- PING will fail if we try
  425. # to check the health prior to the AUTH
  426. await self.send_command(
  427. "HELLO", self.protocol, "AUTH", *auth_args, check_health=False
  428. )
  429. response = await self.read_response()
  430. if response.get(b"proto") != int(self.protocol) and response.get(
  431. "proto"
  432. ) != int(self.protocol):
  433. raise ConnectionError("Invalid RESP version")
  434. # avoid checking health here -- PING will fail if we try
  435. # to check the health prior to the AUTH
  436. elif auth_args:
  437. await self.send_command("AUTH", *auth_args, check_health=False)
  438. try:
  439. auth_response = await self.read_response()
  440. except AuthenticationWrongNumberOfArgsError:
  441. # a username and password were specified but the Redis
  442. # server seems to be < 6.0.0 which expects a single password
  443. # arg. retry auth with just the password.
  444. # https://github.com/andymccurdy/redis-py/issues/1274
  445. await self.send_command("AUTH", auth_args[-1], check_health=False)
  446. auth_response = await self.read_response()
  447. if str_if_bytes(auth_response) != "OK":
  448. raise AuthenticationError("Invalid Username or Password")
  449. # if resp version is specified, switch to it
  450. elif self.protocol not in [2, "2"]:
  451. if isinstance(self._parser, _AsyncRESP2Parser):
  452. self.set_parser(_AsyncRESP3Parser)
  453. # update cluster exception classes
  454. self._parser.EXCEPTION_CLASSES = parser.EXCEPTION_CLASSES
  455. self._parser.on_connect(self)
  456. await self.send_command("HELLO", self.protocol, check_health=check_health)
  457. response = await self.read_response()
  458. # if response.get(b"proto") != self.protocol and response.get(
  459. # "proto"
  460. # ) != self.protocol:
  461. # raise ConnectionError("Invalid RESP version")
  462. # if a client_name is given, set it
  463. if self.client_name:
  464. await self.send_command(
  465. "CLIENT",
  466. "SETNAME",
  467. self.client_name,
  468. check_health=check_health,
  469. )
  470. if str_if_bytes(await self.read_response()) != "OK":
  471. raise ConnectionError("Error setting client name")
  472. # Set the library name and version from driver_info, pipeline for lower startup latency
  473. lib_name_sent = False
  474. lib_version_sent = False
  475. if self.driver_info and self.driver_info.formatted_name:
  476. await self.send_command(
  477. "CLIENT",
  478. "SETINFO",
  479. "LIB-NAME",
  480. self.driver_info.formatted_name,
  481. check_health=check_health,
  482. )
  483. lib_name_sent = True
  484. if self.driver_info and self.driver_info.lib_version:
  485. await self.send_command(
  486. "CLIENT",
  487. "SETINFO",
  488. "LIB-VER",
  489. self.driver_info.lib_version,
  490. check_health=check_health,
  491. )
  492. lib_version_sent = True
  493. # if a database is specified, switch to it. Also pipeline this
  494. if self.db:
  495. await self.send_command("SELECT", self.db, check_health=check_health)
  496. # read responses from pipeline
  497. for _ in range(sum([lib_name_sent, lib_version_sent])):
  498. try:
  499. await self.read_response()
  500. except ResponseError:
  501. pass
  502. if self.db:
  503. if str_if_bytes(await self.read_response()) != "OK":
  504. raise ConnectionError("Invalid Database")
  505. async def disconnect(
  506. self,
  507. nowait: bool = False,
  508. error: Optional[Exception] = None,
  509. failure_count: Optional[int] = None,
  510. health_check_failed: bool = False,
  511. ) -> None:
  512. """Disconnects from the Redis server"""
  513. try:
  514. async with async_timeout(self.socket_connect_timeout):
  515. self._parser.on_disconnect()
  516. # Reset the reconnect flag
  517. self.reset_should_reconnect()
  518. if not self.is_connected:
  519. return
  520. try:
  521. self._writer.close() # type: ignore[union-attr]
  522. # wait for close to finish, except when handling errors and
  523. # forcefully disconnecting.
  524. if not nowait:
  525. await self._writer.wait_closed() # type: ignore[union-attr]
  526. except OSError:
  527. pass
  528. finally:
  529. self._reader = None
  530. self._writer = None
  531. except asyncio.TimeoutError:
  532. raise TimeoutError(
  533. f"Timed out closing connection after {self.socket_connect_timeout}"
  534. ) from None
  535. if error:
  536. if health_check_failed:
  537. close_reason = CloseReason.HEALTHCHECK_FAILED
  538. else:
  539. close_reason = CloseReason.ERROR
  540. if failure_count is not None and failure_count > self.retry.get_retries():
  541. await record_error_count(
  542. server_address=getattr(self, "host", None),
  543. server_port=getattr(self, "port", None),
  544. network_peer_address=getattr(self, "host", None),
  545. network_peer_port=getattr(self, "port", None),
  546. error_type=error,
  547. retry_attempts=failure_count,
  548. )
  549. await record_connection_closed(
  550. close_reason=close_reason,
  551. error_type=error,
  552. )
  553. else:
  554. await record_connection_closed(
  555. close_reason=CloseReason.APPLICATION_CLOSE,
  556. )
  557. async def _send_ping(self):
  558. """Send PING, expect PONG in return"""
  559. await self.send_command("PING", check_health=False)
  560. if str_if_bytes(await self.read_response()) != "PONG":
  561. raise ConnectionError("Bad response from PING health check")
  562. async def _ping_failed(self, error, failure_count):
  563. """Function to call when PING fails"""
  564. await self.disconnect(
  565. error=error, failure_count=failure_count, health_check_failed=True
  566. )
  567. async def check_health(self):
  568. """Check the health of the connection with a PING/PONG"""
  569. if (
  570. self.health_check_interval
  571. and asyncio.get_running_loop().time() > self.next_health_check
  572. ):
  573. await self.retry.call_with_retry(
  574. self._send_ping, self._ping_failed, with_failure_count=True
  575. )
  576. async def _send_packed_command(self, command: Iterable[bytes]) -> None:
  577. self._writer.writelines(command)
  578. await self._writer.drain()
  579. async def send_packed_command(
  580. self, command: Union[bytes, str, Iterable[bytes]], check_health: bool = True
  581. ) -> None:
  582. if not self.is_connected:
  583. await self.connect_check_health(check_health=False)
  584. if check_health:
  585. await self.check_health()
  586. try:
  587. if isinstance(command, str):
  588. command = command.encode()
  589. if isinstance(command, bytes):
  590. command = [command]
  591. if self.socket_timeout:
  592. await asyncio.wait_for(
  593. self._send_packed_command(command), self.socket_timeout
  594. )
  595. else:
  596. self._writer.writelines(command)
  597. await self._writer.drain()
  598. except asyncio.TimeoutError:
  599. await self.disconnect(nowait=True)
  600. raise TimeoutError("Timeout writing to socket") from None
  601. except OSError as e:
  602. await self.disconnect(nowait=True)
  603. if len(e.args) == 1:
  604. err_no, errmsg = "UNKNOWN", e.args[0]
  605. else:
  606. err_no = e.args[0]
  607. errmsg = e.args[1]
  608. raise ConnectionError(
  609. f"Error {err_no} while writing to socket. {errmsg}."
  610. ) from e
  611. except BaseException:
  612. # BaseExceptions can be raised when a socket send operation is not
  613. # finished, e.g. due to a timeout. Ideally, a caller could then re-try
  614. # to send un-sent data. However, the send_packed_command() API
  615. # does not support it so there is no point in keeping the connection open.
  616. await self.disconnect(nowait=True)
  617. raise
  618. async def send_command(self, *args: Any, **kwargs: Any) -> None:
  619. """Pack and send a command to the Redis server"""
  620. await self.send_packed_command(
  621. self.pack_command(*args), check_health=kwargs.get("check_health", True)
  622. )
  623. async def can_read_destructive(self):
  624. """Poll the socket to see if there's data that can be read."""
  625. try:
  626. return await self._parser.can_read_destructive()
  627. except OSError as e:
  628. await self.disconnect(nowait=True)
  629. host_error = self._host_error()
  630. raise ConnectionError(f"Error while reading from {host_error}: {e.args}")
  631. async def read_response(
  632. self,
  633. disable_decoding: bool = False,
  634. timeout: Optional[float] = None,
  635. *,
  636. disconnect_on_error: bool = True,
  637. push_request: Optional[bool] = False,
  638. ):
  639. """Read the response from a previously sent command"""
  640. read_timeout = timeout if timeout is not None else self.socket_timeout
  641. host_error = self._host_error()
  642. try:
  643. if read_timeout is not None and self.protocol in ["3", 3]:
  644. async with async_timeout(read_timeout):
  645. response = await self._parser.read_response(
  646. disable_decoding=disable_decoding, push_request=push_request
  647. )
  648. elif read_timeout is not None:
  649. async with async_timeout(read_timeout):
  650. response = await self._parser.read_response(
  651. disable_decoding=disable_decoding
  652. )
  653. elif self.protocol in ["3", 3]:
  654. response = await self._parser.read_response(
  655. disable_decoding=disable_decoding, push_request=push_request
  656. )
  657. else:
  658. response = await self._parser.read_response(
  659. disable_decoding=disable_decoding
  660. )
  661. except asyncio.TimeoutError:
  662. if timeout is not None:
  663. # user requested timeout, return None. Operation can be retried
  664. return None
  665. # it was a self.socket_timeout error.
  666. if disconnect_on_error:
  667. await self.disconnect(nowait=True)
  668. raise TimeoutError(f"Timeout reading from {host_error}")
  669. except OSError as e:
  670. if disconnect_on_error:
  671. await self.disconnect(nowait=True)
  672. raise ConnectionError(f"Error while reading from {host_error} : {e.args}")
  673. except BaseException:
  674. # Also by default close in case of BaseException. A lot of code
  675. # relies on this behaviour when doing Command/Response pairs.
  676. # See #1128.
  677. if disconnect_on_error:
  678. await self.disconnect(nowait=True)
  679. raise
  680. if self.health_check_interval:
  681. next_time = asyncio.get_running_loop().time() + self.health_check_interval
  682. self.next_health_check = next_time
  683. if isinstance(response, ResponseError):
  684. raise response from None
  685. return response
  686. def pack_command(self, *args: EncodableT) -> List[bytes]:
  687. """Pack a series of arguments into the Redis protocol"""
  688. output = []
  689. # the client might have included 1 or more literal arguments in
  690. # the command name, e.g., 'CONFIG GET'. The Redis server expects these
  691. # arguments to be sent separately, so split the first argument
  692. # manually. These arguments should be bytestrings so that they are
  693. # not encoded.
  694. assert not isinstance(args[0], float)
  695. if isinstance(args[0], str):
  696. args = tuple(args[0].encode().split()) + args[1:]
  697. elif b" " in args[0]:
  698. args = tuple(args[0].split()) + args[1:]
  699. buff = SYM_EMPTY.join((SYM_STAR, str(len(args)).encode(), SYM_CRLF))
  700. buffer_cutoff = self._buffer_cutoff
  701. for arg in map(self.encoder.encode, args):
  702. # to avoid large string mallocs, chunk the command into the
  703. # output list if we're sending large values or memoryviews
  704. arg_length = len(arg)
  705. if (
  706. len(buff) > buffer_cutoff
  707. or arg_length > buffer_cutoff
  708. or isinstance(arg, memoryview)
  709. ):
  710. buff = SYM_EMPTY.join(
  711. (buff, SYM_DOLLAR, str(arg_length).encode(), SYM_CRLF)
  712. )
  713. output.append(buff)
  714. output.append(arg)
  715. buff = SYM_CRLF
  716. else:
  717. buff = SYM_EMPTY.join(
  718. (
  719. buff,
  720. SYM_DOLLAR,
  721. str(arg_length).encode(),
  722. SYM_CRLF,
  723. arg,
  724. SYM_CRLF,
  725. )
  726. )
  727. output.append(buff)
  728. return output
  729. def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> List[bytes]:
  730. """Pack multiple commands into the Redis protocol"""
  731. output: List[bytes] = []
  732. pieces: List[bytes] = []
  733. buffer_length = 0
  734. buffer_cutoff = self._buffer_cutoff
  735. for cmd in commands:
  736. for chunk in self.pack_command(*cmd):
  737. chunklen = len(chunk)
  738. if (
  739. buffer_length > buffer_cutoff
  740. or chunklen > buffer_cutoff
  741. or isinstance(chunk, memoryview)
  742. ):
  743. if pieces:
  744. output.append(SYM_EMPTY.join(pieces))
  745. buffer_length = 0
  746. pieces = []
  747. if chunklen > buffer_cutoff or isinstance(chunk, memoryview):
  748. output.append(chunk)
  749. else:
  750. pieces.append(chunk)
  751. buffer_length += chunklen
  752. if pieces:
  753. output.append(SYM_EMPTY.join(pieces))
  754. return output
  755. def _socket_is_empty(self):
  756. """Check if the socket is empty"""
  757. return len(self._reader._buffer) == 0
  758. async def process_invalidation_messages(self):
  759. while not self._socket_is_empty():
  760. await self.read_response(push_request=True)
  761. def set_re_auth_token(self, token: TokenInterface):
  762. self._re_auth_token = token
  763. async def re_auth(self):
  764. if self._re_auth_token is not None:
  765. await self.send_command(
  766. "AUTH",
  767. self._re_auth_token.try_get("oid"),
  768. self._re_auth_token.get_value(),
  769. )
  770. await self.read_response()
  771. self._re_auth_token = None
  772. class Connection(AbstractConnection):
  773. "Manages TCP communication to and from a Redis server"
  774. def __init__(
  775. self,
  776. *,
  777. host: str = "localhost",
  778. port: Union[str, int] = 6379,
  779. socket_keepalive: bool = False,
  780. socket_keepalive_options: Optional[Mapping[int, Union[int, bytes]]] = None,
  781. socket_type: int = 0,
  782. **kwargs,
  783. ):
  784. self.host = host
  785. self.port = int(port)
  786. self.socket_keepalive = socket_keepalive
  787. self.socket_keepalive_options = socket_keepalive_options or {}
  788. self.socket_type = socket_type
  789. super().__init__(**kwargs)
  790. def repr_pieces(self):
  791. pieces = [("host", self.host), ("port", self.port), ("db", self.db)]
  792. if self.client_name:
  793. pieces.append(("client_name", self.client_name))
  794. return pieces
  795. def _connection_arguments(self) -> Mapping:
  796. return {"host": self.host, "port": self.port}
  797. async def _connect(self):
  798. """Create a TCP socket connection"""
  799. async with async_timeout(self.socket_connect_timeout):
  800. reader, writer = await asyncio.open_connection(
  801. **self._connection_arguments()
  802. )
  803. self._reader = reader
  804. self._writer = writer
  805. sock = writer.transport.get_extra_info("socket")
  806. if sock:
  807. sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
  808. try:
  809. # TCP_KEEPALIVE
  810. if self.socket_keepalive:
  811. sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
  812. for k, v in self.socket_keepalive_options.items():
  813. sock.setsockopt(socket.SOL_TCP, k, v)
  814. except (OSError, TypeError):
  815. # `socket_keepalive_options` might contain invalid options
  816. # causing an error. Do not leave the connection open.
  817. writer.close()
  818. raise
  819. def _host_error(self) -> str:
  820. return f"{self.host}:{self.port}"
  821. class SSLConnection(Connection):
  822. """Manages SSL connections to and from the Redis server(s).
  823. This class extends the Connection class, adding SSL functionality, and making
  824. use of ssl.SSLContext (https://docs.python.org/3/library/ssl.html#ssl.SSLContext)
  825. """
  826. def __init__(
  827. self,
  828. ssl_keyfile: Optional[str] = None,
  829. ssl_certfile: Optional[str] = None,
  830. ssl_cert_reqs: Union[str, ssl.VerifyMode] = "required",
  831. ssl_include_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
  832. ssl_exclude_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
  833. ssl_ca_certs: Optional[str] = None,
  834. ssl_ca_data: Optional[str] = None,
  835. ssl_ca_path: Optional[str] = None,
  836. ssl_check_hostname: bool = True,
  837. ssl_min_version: Optional[TLSVersion] = None,
  838. ssl_ciphers: Optional[str] = None,
  839. ssl_password: Optional[str] = None,
  840. **kwargs,
  841. ):
  842. if not SSL_AVAILABLE:
  843. raise RedisError("Python wasn't built with SSL support")
  844. self.ssl_context: RedisSSLContext = RedisSSLContext(
  845. keyfile=ssl_keyfile,
  846. certfile=ssl_certfile,
  847. cert_reqs=ssl_cert_reqs,
  848. include_verify_flags=ssl_include_verify_flags,
  849. exclude_verify_flags=ssl_exclude_verify_flags,
  850. ca_certs=ssl_ca_certs,
  851. ca_data=ssl_ca_data,
  852. ca_path=ssl_ca_path,
  853. check_hostname=ssl_check_hostname,
  854. min_version=ssl_min_version,
  855. ciphers=ssl_ciphers,
  856. password=ssl_password,
  857. )
  858. super().__init__(**kwargs)
  859. def _connection_arguments(self) -> Mapping:
  860. kwargs = super()._connection_arguments()
  861. kwargs["ssl"] = self.ssl_context.get()
  862. return kwargs
  863. @property
  864. def keyfile(self):
  865. return self.ssl_context.keyfile
  866. @property
  867. def certfile(self):
  868. return self.ssl_context.certfile
  869. @property
  870. def cert_reqs(self):
  871. return self.ssl_context.cert_reqs
  872. @property
  873. def include_verify_flags(self):
  874. return self.ssl_context.include_verify_flags
  875. @property
  876. def exclude_verify_flags(self):
  877. return self.ssl_context.exclude_verify_flags
  878. @property
  879. def ca_certs(self):
  880. return self.ssl_context.ca_certs
  881. @property
  882. def ca_data(self):
  883. return self.ssl_context.ca_data
  884. @property
  885. def check_hostname(self):
  886. return self.ssl_context.check_hostname
  887. @property
  888. def min_version(self):
  889. return self.ssl_context.min_version
  890. class RedisSSLContext:
  891. __slots__ = (
  892. "keyfile",
  893. "certfile",
  894. "cert_reqs",
  895. "include_verify_flags",
  896. "exclude_verify_flags",
  897. "ca_certs",
  898. "ca_data",
  899. "ca_path",
  900. "context",
  901. "check_hostname",
  902. "min_version",
  903. "ciphers",
  904. "password",
  905. )
  906. def __init__(
  907. self,
  908. keyfile: Optional[str] = None,
  909. certfile: Optional[str] = None,
  910. cert_reqs: Optional[Union[str, ssl.VerifyMode]] = None,
  911. include_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
  912. exclude_verify_flags: Optional[List["ssl.VerifyFlags"]] = None,
  913. ca_certs: Optional[str] = None,
  914. ca_data: Optional[str] = None,
  915. ca_path: Optional[str] = None,
  916. check_hostname: bool = False,
  917. min_version: Optional[TLSVersion] = None,
  918. ciphers: Optional[str] = None,
  919. password: Optional[str] = None,
  920. ):
  921. if not SSL_AVAILABLE:
  922. raise RedisError("Python wasn't built with SSL support")
  923. self.keyfile = keyfile
  924. self.certfile = certfile
  925. if cert_reqs is None:
  926. cert_reqs = ssl.CERT_NONE
  927. elif isinstance(cert_reqs, str):
  928. CERT_REQS = { # noqa: N806
  929. "none": ssl.CERT_NONE,
  930. "optional": ssl.CERT_OPTIONAL,
  931. "required": ssl.CERT_REQUIRED,
  932. }
  933. if cert_reqs not in CERT_REQS:
  934. raise RedisError(
  935. f"Invalid SSL Certificate Requirements Flag: {cert_reqs}"
  936. )
  937. cert_reqs = CERT_REQS[cert_reqs]
  938. self.cert_reqs = cert_reqs
  939. self.include_verify_flags = include_verify_flags
  940. self.exclude_verify_flags = exclude_verify_flags
  941. self.ca_certs = ca_certs
  942. self.ca_data = ca_data
  943. self.ca_path = ca_path
  944. self.check_hostname = (
  945. check_hostname if self.cert_reqs != ssl.CERT_NONE else False
  946. )
  947. self.min_version = min_version
  948. self.ciphers = ciphers
  949. self.password = password
  950. self.context: Optional[SSLContext] = None
  951. def get(self) -> SSLContext:
  952. if not self.context:
  953. context = ssl.create_default_context()
  954. context.check_hostname = self.check_hostname
  955. context.verify_mode = self.cert_reqs
  956. if self.include_verify_flags:
  957. for flag in self.include_verify_flags:
  958. context.verify_flags |= flag
  959. if self.exclude_verify_flags:
  960. for flag in self.exclude_verify_flags:
  961. context.verify_flags &= ~flag
  962. if self.certfile or self.keyfile:
  963. context.load_cert_chain(
  964. certfile=self.certfile,
  965. keyfile=self.keyfile,
  966. password=self.password,
  967. )
  968. if self.ca_certs or self.ca_data or self.ca_path:
  969. context.load_verify_locations(
  970. cafile=self.ca_certs, capath=self.ca_path, cadata=self.ca_data
  971. )
  972. if self.min_version is not None:
  973. context.minimum_version = self.min_version
  974. if self.ciphers is not None:
  975. context.set_ciphers(self.ciphers)
  976. self.context = context
  977. return self.context
  978. class UnixDomainSocketConnection(AbstractConnection):
  979. "Manages UDS communication to and from a Redis server"
  980. def __init__(self, *, path: str = "", **kwargs):
  981. self.path = path
  982. super().__init__(**kwargs)
  983. def repr_pieces(self) -> Iterable[Tuple[str, Union[str, int]]]:
  984. pieces = [("path", self.path), ("db", self.db)]
  985. if self.client_name:
  986. pieces.append(("client_name", self.client_name))
  987. return pieces
  988. async def _connect(self):
  989. async with async_timeout(self.socket_connect_timeout):
  990. reader, writer = await asyncio.open_unix_connection(path=self.path)
  991. self._reader = reader
  992. self._writer = writer
  993. await self.on_connect()
  994. def _host_error(self) -> str:
  995. return self.path
  996. FALSE_STRINGS = ("0", "F", "FALSE", "N", "NO")
  997. def to_bool(value) -> Optional[bool]:
  998. if value is None or value == "":
  999. return None
  1000. if isinstance(value, str) and value.upper() in FALSE_STRINGS:
  1001. return False
  1002. return bool(value)
  1003. def parse_ssl_verify_flags(value):
  1004. # flags are passed in as a string representation of a list,
  1005. # e.g. VERIFY_X509_STRICT, VERIFY_X509_PARTIAL_CHAIN
  1006. verify_flags_str = value.replace("[", "").replace("]", "")
  1007. verify_flags = []
  1008. for flag in verify_flags_str.split(","):
  1009. flag = flag.strip()
  1010. if not hasattr(VerifyFlags, flag):
  1011. raise ValueError(f"Invalid ssl verify flag: {flag}")
  1012. verify_flags.append(getattr(VerifyFlags, flag))
  1013. return verify_flags
  1014. URL_QUERY_ARGUMENT_PARSERS: Mapping[str, Callable[..., object]] = MappingProxyType(
  1015. {
  1016. "db": int,
  1017. "socket_timeout": float,
  1018. "socket_connect_timeout": float,
  1019. "socket_keepalive": to_bool,
  1020. "retry_on_timeout": to_bool,
  1021. "max_connections": int,
  1022. "health_check_interval": int,
  1023. "ssl_check_hostname": to_bool,
  1024. "ssl_include_verify_flags": parse_ssl_verify_flags,
  1025. "ssl_exclude_verify_flags": parse_ssl_verify_flags,
  1026. "timeout": float,
  1027. }
  1028. )
  1029. class ConnectKwargs(TypedDict, total=False):
  1030. username: str
  1031. password: str
  1032. connection_class: Type[AbstractConnection]
  1033. host: str
  1034. port: int
  1035. db: int
  1036. path: str
  1037. def parse_url(url: str) -> ConnectKwargs:
  1038. parsed: ParseResult = urlparse(url)
  1039. kwargs: ConnectKwargs = {}
  1040. for name, value_list in parse_qs(parsed.query).items():
  1041. if value_list and len(value_list) > 0:
  1042. value = unquote(value_list[0])
  1043. parser = URL_QUERY_ARGUMENT_PARSERS.get(name)
  1044. if parser:
  1045. try:
  1046. kwargs[name] = parser(value)
  1047. except (TypeError, ValueError):
  1048. raise ValueError(f"Invalid value for '{name}' in connection URL.")
  1049. else:
  1050. kwargs[name] = value
  1051. if parsed.username:
  1052. kwargs["username"] = unquote(parsed.username)
  1053. if parsed.password:
  1054. kwargs["password"] = unquote(parsed.password)
  1055. # We only support redis://, rediss:// and unix:// schemes.
  1056. if parsed.scheme == "unix":
  1057. if parsed.path:
  1058. kwargs["path"] = unquote(parsed.path)
  1059. kwargs["connection_class"] = UnixDomainSocketConnection
  1060. elif parsed.scheme in ("redis", "rediss"):
  1061. if parsed.hostname:
  1062. kwargs["host"] = unquote(parsed.hostname)
  1063. if parsed.port:
  1064. kwargs["port"] = int(parsed.port)
  1065. # If there's a path argument, use it as the db argument if a
  1066. # querystring value wasn't specified
  1067. if parsed.path and "db" not in kwargs:
  1068. try:
  1069. kwargs["db"] = int(unquote(parsed.path).replace("/", ""))
  1070. except (AttributeError, ValueError):
  1071. pass
  1072. if parsed.scheme == "rediss":
  1073. kwargs["connection_class"] = SSLConnection
  1074. else:
  1075. valid_schemes = "redis://, rediss://, unix://"
  1076. raise ValueError(
  1077. f"Redis URL must specify one of the following schemes ({valid_schemes})"
  1078. )
  1079. return kwargs
  1080. _CP = TypeVar("_CP", bound="ConnectionPool")
  1081. class ConnectionPool:
  1082. """
  1083. Create a connection pool. ``If max_connections`` is set, then this
  1084. object raises :py:class:`~redis.ConnectionError` when the pool's
  1085. limit is reached.
  1086. By default, TCP connections are created unless ``connection_class``
  1087. is specified. Use :py:class:`~redis.UnixDomainSocketConnection` for
  1088. unix sockets.
  1089. :py:class:`~redis.SSLConnection` can be used for SSL enabled connections.
  1090. Any additional keyword arguments are passed to the constructor of
  1091. ``connection_class``.
  1092. """
  1093. @classmethod
  1094. def from_url(cls: Type[_CP], url: str, **kwargs) -> _CP:
  1095. """
  1096. Return a connection pool configured from the given URL.
  1097. For example::
  1098. redis://[[username]:[password]]@localhost:6379/0
  1099. rediss://[[username]:[password]]@localhost:6379/0
  1100. unix://[username@]/path/to/socket.sock?db=0[&password=password]
  1101. Three URL schemes are supported:
  1102. - `redis://` creates a TCP socket connection. See more at:
  1103. <https://www.iana.org/assignments/uri-schemes/prov/redis>
  1104. - `rediss://` creates a SSL wrapped TCP socket connection. See more at:
  1105. <https://www.iana.org/assignments/uri-schemes/prov/rediss>
  1106. - ``unix://``: creates a Unix Domain Socket connection.
  1107. The username, password, hostname, path and all querystring values
  1108. are passed through urllib.parse.unquote in order to replace any
  1109. percent-encoded values with their corresponding characters.
  1110. There are several ways to specify a database number. The first value
  1111. found will be used:
  1112. 1. A ``db`` querystring option, e.g. redis://localhost?db=0
  1113. 2. If using the redis:// or rediss:// schemes, the path argument
  1114. of the url, e.g. redis://localhost/0
  1115. 3. A ``db`` keyword argument to this function.
  1116. If none of these options are specified, the default db=0 is used.
  1117. All querystring options are cast to their appropriate Python types.
  1118. Boolean arguments can be specified with string values "True"/"False"
  1119. or "Yes"/"No". Values that cannot be properly cast cause a
  1120. ``ValueError`` to be raised. Once parsed, the querystring arguments
  1121. and keyword arguments are passed to the ``ConnectionPool``'s
  1122. class initializer. In the case of conflicting arguments, querystring
  1123. arguments always win.
  1124. """
  1125. url_options = parse_url(url)
  1126. kwargs.update(url_options)
  1127. return cls(**kwargs)
  1128. def __init__(
  1129. self,
  1130. connection_class: Type[AbstractConnection] = Connection,
  1131. max_connections: Optional[int] = None,
  1132. **connection_kwargs,
  1133. ):
  1134. max_connections = max_connections or 2**31
  1135. if not isinstance(max_connections, int) or max_connections < 0:
  1136. raise ValueError('"max_connections" must be a positive integer')
  1137. self.connection_class = connection_class
  1138. self.connection_kwargs = connection_kwargs
  1139. self.max_connections = max_connections
  1140. self._available_connections: List[AbstractConnection] = []
  1141. self._in_use_connections: Set[AbstractConnection] = set()
  1142. self.encoder_class = self.connection_kwargs.get("encoder_class", Encoder)
  1143. self._lock = asyncio.Lock()
  1144. self._event_dispatcher = self.connection_kwargs.get("event_dispatcher", None)
  1145. if self._event_dispatcher is None:
  1146. self._event_dispatcher = EventDispatcher()
  1147. # Keys that should be redacted in __repr__ to avoid exposing sensitive information
  1148. SENSITIVE_REPR_KEYS = frozenset(
  1149. {
  1150. "password",
  1151. "username",
  1152. "ssl_password",
  1153. "credential_provider",
  1154. }
  1155. )
  1156. def __repr__(self):
  1157. conn_kwargs = ",".join(
  1158. [
  1159. f"{k}={'<REDACTED>' if k in self.SENSITIVE_REPR_KEYS else v}"
  1160. for k, v in self.connection_kwargs.items()
  1161. ]
  1162. )
  1163. return (
  1164. f"<{self.__class__.__module__}.{self.__class__.__name__}"
  1165. f"(<{self.connection_class.__module__}.{self.connection_class.__name__}"
  1166. f"({conn_kwargs})>)>"
  1167. )
  1168. def reset(self):
  1169. # Record metrics for connections being removed before clearing
  1170. # (only if attributes exist - they won't during __init__)
  1171. if hasattr(self, "_available_connections") and hasattr(
  1172. self, "_in_use_connections"
  1173. ):
  1174. idle_count = len(self._available_connections)
  1175. in_use_count = len(self._in_use_connections)
  1176. if idle_count > 0 or in_use_count > 0:
  1177. pool_name = get_pool_name(self)
  1178. # Note: Using sync version since reset() is sync
  1179. from redis.observability.recorder import (
  1180. record_connection_count as sync_record_connection_count,
  1181. )
  1182. if idle_count > 0:
  1183. sync_record_connection_count(
  1184. pool_name=pool_name,
  1185. connection_state=ConnectionState.IDLE,
  1186. counter=-idle_count,
  1187. )
  1188. if in_use_count > 0:
  1189. sync_record_connection_count(
  1190. pool_name=pool_name,
  1191. connection_state=ConnectionState.USED,
  1192. counter=-in_use_count,
  1193. )
  1194. self._available_connections = []
  1195. self._in_use_connections = weakref.WeakSet()
  1196. def __del__(self) -> None:
  1197. """Clean up connection pool and record metrics when garbage collected."""
  1198. try:
  1199. if not hasattr(self, "_available_connections") or not hasattr(
  1200. self, "_in_use_connections"
  1201. ):
  1202. return
  1203. idle_count = len(self._available_connections)
  1204. in_use_count = len(self._in_use_connections)
  1205. if idle_count > 0 or in_use_count > 0:
  1206. pool_name = get_pool_name(self)
  1207. # Note: Using sync version since __del__ is sync
  1208. from redis.observability.recorder import (
  1209. record_connection_count as sync_record_connection_count,
  1210. )
  1211. if idle_count > 0:
  1212. sync_record_connection_count(
  1213. pool_name=pool_name,
  1214. connection_state=ConnectionState.IDLE,
  1215. counter=-idle_count,
  1216. )
  1217. if in_use_count > 0:
  1218. sync_record_connection_count(
  1219. pool_name=pool_name,
  1220. connection_state=ConnectionState.USED,
  1221. counter=-in_use_count,
  1222. )
  1223. except Exception:
  1224. pass
  1225. def can_get_connection(self) -> bool:
  1226. """Return True if a connection can be retrieved from the pool."""
  1227. return (
  1228. self._available_connections
  1229. or len(self._in_use_connections) < self.max_connections
  1230. )
  1231. @deprecated_args(
  1232. args_to_warn=["*"],
  1233. reason="Use get_connection() without args instead",
  1234. version="5.3.0",
  1235. )
  1236. async def get_connection(self, command_name=None, *keys, **options):
  1237. """Get a connected connection from the pool"""
  1238. # Track connection count before to detect if a new connection is created
  1239. async with self._lock:
  1240. connections_before = len(self._available_connections) + len(
  1241. self._in_use_connections
  1242. )
  1243. start_time_created = time.monotonic()
  1244. connection = self.get_available_connection()
  1245. connections_after = len(self._available_connections) + len(
  1246. self._in_use_connections
  1247. )
  1248. is_created = connections_after > connections_before
  1249. # Record state transition for observability
  1250. # This ensures counters stay balanced if ensure_connection() fails and release() is called
  1251. pool_name = get_pool_name(self)
  1252. if is_created:
  1253. # New connection created and acquired: just USED +1
  1254. await record_connection_count(
  1255. pool_name=pool_name,
  1256. connection_state=ConnectionState.USED,
  1257. counter=1,
  1258. )
  1259. else:
  1260. # Existing connection acquired from pool: IDLE -> USED
  1261. await record_connection_count(
  1262. pool_name=pool_name,
  1263. connection_state=ConnectionState.IDLE,
  1264. counter=-1,
  1265. )
  1266. await record_connection_count(
  1267. pool_name=pool_name,
  1268. connection_state=ConnectionState.USED,
  1269. counter=1,
  1270. )
  1271. # We now perform the connection check outside of the lock.
  1272. try:
  1273. await self.ensure_connection(connection)
  1274. if is_created:
  1275. await record_connection_create_time(
  1276. connection_pool=self,
  1277. duration_seconds=time.monotonic() - start_time_created,
  1278. )
  1279. return connection
  1280. except BaseException:
  1281. await self.release(connection)
  1282. raise
  1283. def get_available_connection(self):
  1284. """Get a connection from the pool, without making sure it is connected"""
  1285. try:
  1286. connection = self._available_connections.pop()
  1287. except IndexError:
  1288. if len(self._in_use_connections) >= self.max_connections:
  1289. raise MaxConnectionsError("Too many connections") from None
  1290. connection = self.make_connection()
  1291. self._in_use_connections.add(connection)
  1292. return connection
  1293. def get_encoder(self):
  1294. """Return an encoder based on encoding settings"""
  1295. kwargs = self.connection_kwargs
  1296. return self.encoder_class(
  1297. encoding=kwargs.get("encoding", "utf-8"),
  1298. encoding_errors=kwargs.get("encoding_errors", "strict"),
  1299. decode_responses=kwargs.get("decode_responses", False),
  1300. )
  1301. def make_connection(self):
  1302. """Create a new connection. Can be overridden by child classes."""
  1303. # Note: We don't record IDLE here because async uses a sync make_connection
  1304. # but async record_connection_count. The recording is handled in get_connection.
  1305. return self.connection_class(**self.connection_kwargs)
  1306. async def ensure_connection(self, connection: AbstractConnection):
  1307. """Ensure that the connection object is connected and valid"""
  1308. await connection.connect()
  1309. # connections that the pool provides should be ready to send
  1310. # a command. if not, the connection was either returned to the
  1311. # pool before all data has been read or the socket has been
  1312. # closed. either way, reconnect and verify everything is good.
  1313. try:
  1314. if await connection.can_read_destructive():
  1315. raise ConnectionError("Connection has data") from None
  1316. except (ConnectionError, TimeoutError, OSError):
  1317. await connection.disconnect()
  1318. await connection.connect()
  1319. if await connection.can_read_destructive():
  1320. raise ConnectionError("Connection not ready") from None
  1321. async def release(self, connection: AbstractConnection):
  1322. """Releases the connection back to the pool"""
  1323. # Connections should always be returned to the correct pool,
  1324. # not doing so is an error that will cause an exception here.
  1325. self._in_use_connections.remove(connection)
  1326. if connection.should_reconnect():
  1327. await connection.disconnect()
  1328. self._available_connections.append(connection)
  1329. await self._event_dispatcher.dispatch_async(
  1330. AsyncAfterConnectionReleasedEvent(connection)
  1331. )
  1332. # Record state transition: USED -> IDLE
  1333. pool_name = get_pool_name(self)
  1334. await record_connection_count(
  1335. pool_name=pool_name,
  1336. connection_state=ConnectionState.USED,
  1337. counter=-1,
  1338. )
  1339. await record_connection_count(
  1340. pool_name=pool_name,
  1341. connection_state=ConnectionState.IDLE,
  1342. counter=1,
  1343. )
  1344. async def disconnect(self, inuse_connections: bool = True):
  1345. """
  1346. Disconnects connections in the pool
  1347. If ``inuse_connections`` is True, disconnect connections that are
  1348. current in use, potentially by other tasks. Otherwise only disconnect
  1349. connections that are idle in the pool.
  1350. """
  1351. if inuse_connections:
  1352. connections: Iterable[AbstractConnection] = chain(
  1353. self._available_connections, self._in_use_connections
  1354. )
  1355. else:
  1356. connections = self._available_connections
  1357. resp = await asyncio.gather(
  1358. *(connection.disconnect() for connection in connections),
  1359. return_exceptions=True,
  1360. )
  1361. exc = next((r for r in resp if isinstance(r, BaseException)), None)
  1362. if exc:
  1363. raise exc
  1364. async def update_active_connections_for_reconnect(self):
  1365. """
  1366. Mark all active connections for reconnect.
  1367. """
  1368. async with self._lock:
  1369. for conn in self._in_use_connections:
  1370. conn.mark_for_reconnect()
  1371. async def aclose(self) -> None:
  1372. """Close the pool, disconnecting all connections"""
  1373. await self.disconnect()
  1374. def set_retry(self, retry: "Retry") -> None:
  1375. for conn in self._available_connections:
  1376. conn.retry = retry
  1377. for conn in self._in_use_connections:
  1378. conn.retry = retry
  1379. async def re_auth_callback(self, token: TokenInterface):
  1380. async with self._lock:
  1381. for conn in self._available_connections:
  1382. await conn.retry.call_with_retry(
  1383. lambda: conn.send_command(
  1384. "AUTH", token.try_get("oid"), token.get_value()
  1385. ),
  1386. lambda error: self._mock(error),
  1387. )
  1388. await conn.retry.call_with_retry(
  1389. lambda: conn.read_response(), lambda error: self._mock(error)
  1390. )
  1391. for conn in self._in_use_connections:
  1392. conn.set_re_auth_token(token)
  1393. async def _mock(self, error: RedisError):
  1394. """
  1395. Dummy functions, needs to be passed as error callback to retry object.
  1396. :param error:
  1397. :return:
  1398. """
  1399. pass
  1400. def get_connection_count(self) -> List[tuple[int, dict]]:
  1401. """
  1402. Returns a connection count (both idle and in use).
  1403. """
  1404. attributes = AttributeBuilder.build_base_attributes()
  1405. attributes[DB_CLIENT_CONNECTION_POOL_NAME] = get_pool_name(self)
  1406. free_connections_attributes = attributes.copy()
  1407. in_use_connections_attributes = attributes.copy()
  1408. free_connections_attributes[DB_CLIENT_CONNECTION_STATE] = (
  1409. ConnectionState.IDLE.value
  1410. )
  1411. in_use_connections_attributes[DB_CLIENT_CONNECTION_STATE] = (
  1412. ConnectionState.USED.value
  1413. )
  1414. return [
  1415. (len(self._available_connections), free_connections_attributes),
  1416. (len(self._in_use_connections), in_use_connections_attributes),
  1417. ]
  1418. class BlockingConnectionPool(ConnectionPool):
  1419. """
  1420. A blocking connection pool::
  1421. >>> from redis.asyncio import Redis, BlockingConnectionPool
  1422. >>> client = Redis.from_pool(BlockingConnectionPool())
  1423. It performs the same function as the default
  1424. :py:class:`~redis.asyncio.ConnectionPool` implementation, in that,
  1425. it maintains a pool of reusable connections that can be shared by
  1426. multiple async redis clients.
  1427. The difference is that, in the event that a client tries to get a
  1428. connection from the pool when all of connections are in use, rather than
  1429. raising a :py:class:`~redis.ConnectionError` (as the default
  1430. :py:class:`~redis.asyncio.ConnectionPool` implementation does), it
  1431. blocks the current `Task` for a specified number of seconds until
  1432. a connection becomes available.
  1433. Use ``max_connections`` to increase / decrease the pool size::
  1434. >>> pool = BlockingConnectionPool(max_connections=10)
  1435. Use ``timeout`` to tell it either how many seconds to wait for a connection
  1436. to become available, or to block forever:
  1437. >>> # Block forever.
  1438. >>> pool = BlockingConnectionPool(timeout=None)
  1439. >>> # Raise a ``ConnectionError`` after five seconds if a connection is
  1440. >>> # not available.
  1441. >>> pool = BlockingConnectionPool(timeout=5)
  1442. """
  1443. def __init__(
  1444. self,
  1445. max_connections: int = 50,
  1446. timeout: Optional[float] = 20,
  1447. connection_class: Type[AbstractConnection] = Connection,
  1448. queue_class: Type[asyncio.Queue] = asyncio.LifoQueue, # deprecated
  1449. **connection_kwargs,
  1450. ):
  1451. super().__init__(
  1452. connection_class=connection_class,
  1453. max_connections=max_connections,
  1454. **connection_kwargs,
  1455. )
  1456. self._condition = asyncio.Condition()
  1457. self.timeout = timeout
  1458. @deprecated_args(
  1459. args_to_warn=["*"],
  1460. reason="Use get_connection() without args instead",
  1461. version="5.3.0",
  1462. )
  1463. async def get_connection(self, command_name=None, *keys, **options):
  1464. """Gets a connection from the pool, blocking until one is available"""
  1465. # Start timing for wait time observability
  1466. start_time_acquired = time.monotonic()
  1467. try:
  1468. async with self._condition:
  1469. async with async_timeout(self.timeout):
  1470. await self._condition.wait_for(self.can_get_connection)
  1471. # Track connection count before to detect if a new connection is created
  1472. connections_before = len(self._available_connections) + len(
  1473. self._in_use_connections
  1474. )
  1475. start_time_created = time.monotonic()
  1476. connection = super().get_available_connection()
  1477. connections_after = len(self._available_connections) + len(
  1478. self._in_use_connections
  1479. )
  1480. is_created = connections_after > connections_before
  1481. except asyncio.TimeoutError as err:
  1482. raise ConnectionError("No connection available.") from err
  1483. # We now perform the connection check outside of the lock.
  1484. try:
  1485. await self.ensure_connection(connection)
  1486. if is_created:
  1487. await record_connection_create_time(
  1488. connection_pool=self,
  1489. duration_seconds=time.monotonic() - start_time_created,
  1490. )
  1491. await record_connection_wait_time(
  1492. pool_name=get_pool_name(self),
  1493. duration_seconds=time.monotonic() - start_time_acquired,
  1494. )
  1495. return connection
  1496. except BaseException:
  1497. await self.release(connection)
  1498. raise
  1499. async def release(self, connection: AbstractConnection):
  1500. """Releases the connection back to the pool."""
  1501. async with self._condition:
  1502. await super().release(connection)
  1503. self._condition.notify()