engines.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444
  1. # testing/engines.py
  2. # Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: https://www.opensource.org/licenses/mit-license.php
  7. from __future__ import absolute_import
  8. import collections
  9. import re
  10. import warnings
  11. import weakref
  12. from . import config
  13. from .util import decorator
  14. from .util import gc_collect
  15. from .. import event
  16. from .. import pool
  17. class ConnectionKiller(object):
  18. def __init__(self):
  19. self.proxy_refs = weakref.WeakKeyDictionary()
  20. self.testing_engines = collections.defaultdict(set)
  21. self.dbapi_connections = set()
  22. def add_pool(self, pool):
  23. event.listen(pool, "checkout", self._add_conn)
  24. event.listen(pool, "checkin", self._remove_conn)
  25. event.listen(pool, "close", self._remove_conn)
  26. event.listen(pool, "close_detached", self._remove_conn)
  27. # note we are keeping "invalidated" here, as those are still
  28. # opened connections we would like to roll back
  29. def _add_conn(self, dbapi_con, con_record, con_proxy):
  30. self.dbapi_connections.add(dbapi_con)
  31. self.proxy_refs[con_proxy] = True
  32. def _remove_conn(self, dbapi_conn, *arg):
  33. self.dbapi_connections.discard(dbapi_conn)
  34. def add_engine(self, engine, scope):
  35. self.add_pool(engine.pool)
  36. assert scope in ("class", "global", "function", "fixture")
  37. self.testing_engines[scope].add(engine)
  38. def _safe(self, fn):
  39. try:
  40. fn()
  41. except Exception as e:
  42. warnings.warn(
  43. "testing_reaper couldn't rollback/close connection: %s" % e
  44. )
  45. def rollback_all(self):
  46. for rec in list(self.proxy_refs):
  47. if rec is not None and rec.is_valid:
  48. self._safe(rec.rollback)
  49. def checkin_all(self):
  50. # run pool.checkin() for all ConnectionFairy instances we have
  51. # tracked.
  52. for rec in list(self.proxy_refs):
  53. if rec is not None and rec.is_valid:
  54. self.dbapi_connections.discard(rec.dbapi_connection)
  55. self._safe(rec._checkin)
  56. # for fairy refs that were GCed and could not close the connection,
  57. # such as asyncio, roll back those remaining connections
  58. for con in self.dbapi_connections:
  59. self._safe(con.rollback)
  60. self.dbapi_connections.clear()
  61. def close_all(self):
  62. self.checkin_all()
  63. def prepare_for_drop_tables(self, connection):
  64. # don't do aggressive checks for third party test suites
  65. if not config.bootstrapped_as_sqlalchemy:
  66. return
  67. from . import provision
  68. provision.prepare_for_drop_tables(connection.engine.url, connection)
  69. def _drop_testing_engines(self, scope):
  70. eng = self.testing_engines[scope]
  71. for rec in list(eng):
  72. for proxy_ref in list(self.proxy_refs):
  73. if proxy_ref is not None and proxy_ref.is_valid:
  74. if (
  75. proxy_ref._pool is not None
  76. and proxy_ref._pool is rec.pool
  77. ):
  78. self._safe(proxy_ref._checkin)
  79. rec.dispose()
  80. eng.clear()
  81. def after_test(self):
  82. self._drop_testing_engines("function")
  83. def after_test_outside_fixtures(self, test):
  84. # don't do aggressive checks for third party test suites
  85. if not config.bootstrapped_as_sqlalchemy:
  86. return
  87. if test.__class__.__leave_connections_for_teardown__:
  88. return
  89. self.checkin_all()
  90. # on PostgreSQL, this will test for any "idle in transaction"
  91. # connections. useful to identify tests with unusual patterns
  92. # that can't be cleaned up correctly.
  93. from . import provision
  94. with config.db.connect() as conn:
  95. provision.prepare_for_drop_tables(conn.engine.url, conn)
  96. def stop_test_class_inside_fixtures(self):
  97. self.checkin_all()
  98. self._drop_testing_engines("function")
  99. self._drop_testing_engines("class")
  100. def stop_test_class_outside_fixtures(self):
  101. # ensure no refs to checked out connections at all.
  102. if pool.base._strong_ref_connection_records:
  103. gc_collect()
  104. if pool.base._strong_ref_connection_records:
  105. ln = len(pool.base._strong_ref_connection_records)
  106. pool.base._strong_ref_connection_records.clear()
  107. assert (
  108. False
  109. ), "%d connection recs not cleared after test suite" % (ln)
  110. def final_cleanup(self):
  111. self.checkin_all()
  112. for scope in self.testing_engines:
  113. self._drop_testing_engines(scope)
  114. def assert_all_closed(self):
  115. for rec in self.proxy_refs:
  116. if rec.is_valid:
  117. assert False
  118. testing_reaper = ConnectionKiller()
  119. @decorator
  120. def assert_conns_closed(fn, *args, **kw):
  121. try:
  122. fn(*args, **kw)
  123. finally:
  124. testing_reaper.assert_all_closed()
  125. @decorator
  126. def rollback_open_connections(fn, *args, **kw):
  127. """Decorator that rolls back all open connections after fn execution."""
  128. try:
  129. fn(*args, **kw)
  130. finally:
  131. testing_reaper.rollback_all()
  132. @decorator
  133. def close_first(fn, *args, **kw):
  134. """Decorator that closes all connections before fn execution."""
  135. testing_reaper.checkin_all()
  136. fn(*args, **kw)
  137. @decorator
  138. def close_open_connections(fn, *args, **kw):
  139. """Decorator that closes all connections after fn execution."""
  140. try:
  141. fn(*args, **kw)
  142. finally:
  143. testing_reaper.checkin_all()
  144. def all_dialects(exclude=None):
  145. import sqlalchemy.dialects as d
  146. for name in d.__all__:
  147. # TEMPORARY
  148. if exclude and name in exclude:
  149. continue
  150. mod = getattr(d, name, None)
  151. if not mod:
  152. mod = getattr(
  153. __import__("sqlalchemy.dialects.%s" % name).dialects, name
  154. )
  155. yield mod.dialect()
  156. class ReconnectFixture(object):
  157. def __init__(self, dbapi):
  158. self.dbapi = dbapi
  159. self.connections = []
  160. self.is_stopped = False
  161. def __getattr__(self, key):
  162. return getattr(self.dbapi, key)
  163. def connect(self, *args, **kwargs):
  164. conn = self.dbapi.connect(*args, **kwargs)
  165. if self.is_stopped:
  166. self._safe(conn.close)
  167. curs = conn.cursor() # should fail on Oracle etc.
  168. # should fail for everything that didn't fail
  169. # above, connection is closed
  170. curs.execute("select 1")
  171. assert False, "simulated connect failure didn't work"
  172. else:
  173. self.connections.append(conn)
  174. return conn
  175. def _safe(self, fn):
  176. try:
  177. fn()
  178. except Exception as e:
  179. warnings.warn("ReconnectFixture couldn't close connection: %s" % e)
  180. def shutdown(self, stop=False):
  181. # TODO: this doesn't cover all cases
  182. # as nicely as we'd like, namely MySQLdb.
  183. # would need to implement R. Brewer's
  184. # proxy server idea to get better
  185. # coverage.
  186. self.is_stopped = stop
  187. for c in list(self.connections):
  188. self._safe(c.close)
  189. self.connections = []
  190. def restart(self):
  191. self.is_stopped = False
  192. def reconnecting_engine(url=None, options=None):
  193. url = url or config.db.url
  194. dbapi = config.db.dialect.dbapi
  195. if not options:
  196. options = {}
  197. options["module"] = ReconnectFixture(dbapi)
  198. engine = testing_engine(url, options)
  199. _dispose = engine.dispose
  200. def dispose():
  201. engine.dialect.dbapi.shutdown()
  202. engine.dialect.dbapi.is_stopped = False
  203. _dispose()
  204. engine.test_shutdown = engine.dialect.dbapi.shutdown
  205. engine.test_restart = engine.dialect.dbapi.restart
  206. engine.dispose = dispose
  207. return engine
  208. def testing_engine(
  209. url=None,
  210. options=None,
  211. future=None,
  212. asyncio=False,
  213. transfer_staticpool=False,
  214. ):
  215. """Produce an engine configured by --options with optional overrides."""
  216. if asyncio:
  217. from sqlalchemy.ext.asyncio import create_async_engine as create_engine
  218. elif future or (
  219. config.db and config.db._is_future and future is not False
  220. ):
  221. from sqlalchemy.future import create_engine
  222. else:
  223. from sqlalchemy import create_engine
  224. from sqlalchemy.engine.url import make_url
  225. if not options:
  226. use_reaper = True
  227. scope = "function"
  228. else:
  229. use_reaper = options.pop("use_reaper", True)
  230. scope = options.pop("scope", "function")
  231. url = url or config.db.url
  232. url = make_url(url)
  233. if options is None:
  234. if config.db is None or url.drivername == config.db.url.drivername:
  235. options = config.db_opts
  236. else:
  237. options = {}
  238. elif config.db is not None and url.drivername == config.db.url.drivername:
  239. default_opt = config.db_opts.copy()
  240. default_opt.update(options)
  241. engine = create_engine(url, **options)
  242. if transfer_staticpool:
  243. from sqlalchemy.pool import StaticPool
  244. if config.db is not None and isinstance(config.db.pool, StaticPool):
  245. engine.pool._transfer_from(config.db.pool)
  246. if scope == "global":
  247. if asyncio:
  248. engine.sync_engine._has_events = True
  249. else:
  250. engine._has_events = (
  251. True # enable event blocks, helps with profiling
  252. )
  253. if isinstance(engine.pool, pool.QueuePool):
  254. engine.pool._timeout = 0
  255. engine.pool._max_overflow = 0
  256. if use_reaper:
  257. testing_reaper.add_engine(engine, scope)
  258. return engine
  259. def mock_engine(dialect_name=None):
  260. """Provides a mocking engine based on the current testing.db.
  261. This is normally used to test DDL generation flow as emitted
  262. by an Engine.
  263. It should not be used in other cases, as assert_compile() and
  264. assert_sql_execution() are much better choices with fewer
  265. moving parts.
  266. """
  267. from sqlalchemy import create_mock_engine
  268. if not dialect_name:
  269. dialect_name = config.db.name
  270. buffer = []
  271. def executor(sql, *a, **kw):
  272. buffer.append(sql)
  273. def assert_sql(stmts):
  274. recv = [re.sub(r"[\n\t]", "", str(s)) for s in buffer]
  275. assert recv == stmts, recv
  276. def print_sql():
  277. d = engine.dialect
  278. return "\n".join(str(s.compile(dialect=d)) for s in engine.mock)
  279. engine = create_mock_engine(dialect_name + "://", executor)
  280. assert not hasattr(engine, "mock")
  281. engine.mock = buffer
  282. engine.assert_sql = assert_sql
  283. engine.print_sql = print_sql
  284. return engine
  285. class DBAPIProxyCursor(object):
  286. """Proxy a DBAPI cursor.
  287. Tests can provide subclasses of this to intercept
  288. DBAPI-level cursor operations.
  289. """
  290. def __init__(self, engine, conn, *args, **kwargs):
  291. self.engine = engine
  292. self.connection = conn
  293. self.cursor = conn.cursor(*args, **kwargs)
  294. def execute(self, stmt, parameters=None, **kw):
  295. if parameters:
  296. return self.cursor.execute(stmt, parameters, **kw)
  297. else:
  298. return self.cursor.execute(stmt, **kw)
  299. def executemany(self, stmt, params, **kw):
  300. return self.cursor.executemany(stmt, params, **kw)
  301. def __iter__(self):
  302. return iter(self.cursor)
  303. def __getattr__(self, key):
  304. return getattr(self.cursor, key)
  305. class DBAPIProxyConnection(object):
  306. """Proxy a DBAPI connection.
  307. Tests can provide subclasses of this to intercept
  308. DBAPI-level connection operations.
  309. """
  310. def __init__(self, engine, cursor_cls):
  311. self.conn = engine.pool._creator()
  312. self.engine = engine
  313. self.cursor_cls = cursor_cls
  314. def cursor(self, *args, **kwargs):
  315. return self.cursor_cls(self.engine, self.conn, *args, **kwargs)
  316. def close(self):
  317. self.conn.close()
  318. def __getattr__(self, key):
  319. return getattr(self.conn, key)
  320. def proxying_engine(
  321. conn_cls=DBAPIProxyConnection, cursor_cls=DBAPIProxyCursor
  322. ):
  323. """Produce an engine that provides proxy hooks for
  324. common methods.
  325. """
  326. def mock_conn():
  327. return conn_cls(config.db, cursor_cls)
  328. def _wrap_do_on_connect(do_on_connect):
  329. def go(dbapi_conn):
  330. return do_on_connect(dbapi_conn.conn)
  331. return go
  332. return testing_engine(
  333. options={
  334. "creator": mock_conn,
  335. "_wrap_do_on_connect": _wrap_do_on_connect,
  336. }
  337. )