util.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458
  1. # testing/util.py
  2. # Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: https://www.opensource.org/licenses/mit-license.php
  7. import decimal
  8. import gc
  9. import random
  10. import sys
  11. import types
  12. from . import config
  13. from . import mock
  14. from .. import inspect
  15. from ..engine import Connection
  16. from ..schema import Column
  17. from ..schema import DropConstraint
  18. from ..schema import DropTable
  19. from ..schema import ForeignKeyConstraint
  20. from ..schema import MetaData
  21. from ..schema import Table
  22. from ..sql import schema
  23. from ..sql.sqltypes import Integer
  24. from ..util import decorator
  25. from ..util import defaultdict
  26. from ..util import has_refcount_gc
  27. from ..util import inspect_getfullargspec
  28. from ..util import py2k
  29. if not has_refcount_gc:
  30. def non_refcount_gc_collect(*args):
  31. gc.collect()
  32. gc.collect()
  33. gc_collect = lazy_gc = non_refcount_gc_collect
  34. else:
  35. # assume CPython - straight gc.collect, lazy_gc() is a pass
  36. gc_collect = gc.collect
  37. def lazy_gc():
  38. pass
  39. def picklers():
  40. picklers = set()
  41. if py2k:
  42. try:
  43. import cPickle
  44. picklers.add(cPickle)
  45. except ImportError:
  46. pass
  47. import pickle
  48. picklers.add(pickle)
  49. # yes, this thing needs this much testing
  50. for pickle_ in picklers:
  51. for protocol in range(-2, pickle.HIGHEST_PROTOCOL):
  52. yield pickle_.loads, lambda d: pickle_.dumps(d, protocol)
  53. if py2k:
  54. def random_choices(population, k=1):
  55. pop = list(population)
  56. # lame but works :)
  57. random.shuffle(pop)
  58. return pop[0:k]
  59. else:
  60. def random_choices(population, k=1):
  61. return random.choices(population, k=k)
  62. def round_decimal(value, prec):
  63. if isinstance(value, float):
  64. return round(value, prec)
  65. # can also use shift() here but that is 2.6 only
  66. return (value * decimal.Decimal("1" + "0" * prec)).to_integral(
  67. decimal.ROUND_FLOOR
  68. ) / pow(10, prec)
  69. class RandomSet(set):
  70. def __iter__(self):
  71. l = list(set.__iter__(self))
  72. random.shuffle(l)
  73. return iter(l)
  74. def pop(self):
  75. index = random.randint(0, len(self) - 1)
  76. item = list(set.__iter__(self))[index]
  77. self.remove(item)
  78. return item
  79. def union(self, other):
  80. return RandomSet(set.union(self, other))
  81. def difference(self, other):
  82. return RandomSet(set.difference(self, other))
  83. def intersection(self, other):
  84. return RandomSet(set.intersection(self, other))
  85. def copy(self):
  86. return RandomSet(self)
  87. def conforms_partial_ordering(tuples, sorted_elements):
  88. """True if the given sorting conforms to the given partial ordering."""
  89. deps = defaultdict(set)
  90. for parent, child in tuples:
  91. deps[parent].add(child)
  92. for i, node in enumerate(sorted_elements):
  93. for n in sorted_elements[i:]:
  94. if node in deps[n]:
  95. return False
  96. else:
  97. return True
  98. def all_partial_orderings(tuples, elements):
  99. edges = defaultdict(set)
  100. for parent, child in tuples:
  101. edges[child].add(parent)
  102. def _all_orderings(elements):
  103. if len(elements) == 1:
  104. yield list(elements)
  105. else:
  106. for elem in elements:
  107. subset = set(elements).difference([elem])
  108. if not subset.intersection(edges[elem]):
  109. for sub_ordering in _all_orderings(subset):
  110. yield [elem] + sub_ordering
  111. return iter(_all_orderings(elements))
  112. def function_named(fn, name):
  113. """Return a function with a given __name__.
  114. Will assign to __name__ and return the original function if possible on
  115. the Python implementation, otherwise a new function will be constructed.
  116. This function should be phased out as much as possible
  117. in favor of @decorator. Tests that "generate" many named tests
  118. should be modernized.
  119. """
  120. try:
  121. fn.__name__ = name
  122. except TypeError:
  123. fn = types.FunctionType(
  124. fn.__code__, fn.__globals__, name, fn.__defaults__, fn.__closure__
  125. )
  126. return fn
  127. def run_as_contextmanager(ctx, fn, *arg, **kw):
  128. """Run the given function under the given contextmanager,
  129. simulating the behavior of 'with' to support older
  130. Python versions.
  131. This is not necessary anymore as we have placed 2.6
  132. as minimum Python version, however some tests are still using
  133. this structure.
  134. """
  135. obj = ctx.__enter__()
  136. try:
  137. result = fn(obj, *arg, **kw)
  138. ctx.__exit__(None, None, None)
  139. return result
  140. except:
  141. exc_info = sys.exc_info()
  142. raise_ = ctx.__exit__(*exc_info)
  143. if not raise_:
  144. raise
  145. else:
  146. return raise_
  147. def rowset(results):
  148. """Converts the results of sql execution into a plain set of column tuples.
  149. Useful for asserting the results of an unordered query.
  150. """
  151. return {tuple(row) for row in results}
  152. def fail(msg):
  153. assert False, msg
  154. @decorator
  155. def provide_metadata(fn, *args, **kw):
  156. """Provide bound MetaData for a single test, dropping afterwards.
  157. Legacy; use the "metadata" pytest fixture.
  158. """
  159. from . import fixtures
  160. metadata = schema.MetaData()
  161. self = args[0]
  162. prev_meta = getattr(self, "metadata", None)
  163. self.metadata = metadata
  164. try:
  165. return fn(*args, **kw)
  166. finally:
  167. # close out some things that get in the way of dropping tables.
  168. # when using the "metadata" fixture, there is a set ordering
  169. # of things that makes sure things are cleaned up in order, however
  170. # the simple "decorator" nature of this legacy function means
  171. # we have to hardcode some of that cleanup ahead of time.
  172. # close ORM sessions
  173. fixtures._close_all_sessions()
  174. # integrate with the "connection" fixture as there are many
  175. # tests where it is used along with provide_metadata
  176. if fixtures._connection_fixture_connection:
  177. # TODO: this warning can be used to find all the places
  178. # this is used with connection fixture
  179. # warn("mixing legacy provide metadata with connection fixture")
  180. drop_all_tables_from_metadata(
  181. metadata, fixtures._connection_fixture_connection
  182. )
  183. # as the provide_metadata fixture is often used with "testing.db",
  184. # when we do the drop we have to commit the transaction so that
  185. # the DB is actually updated as the CREATE would have been
  186. # committed
  187. fixtures._connection_fixture_connection.get_transaction().commit()
  188. else:
  189. drop_all_tables_from_metadata(metadata, config.db)
  190. self.metadata = prev_meta
  191. def flag_combinations(*combinations):
  192. """A facade around @testing.combinations() oriented towards boolean
  193. keyword-based arguments.
  194. Basically generates a nice looking identifier based on the keywords
  195. and also sets up the argument names.
  196. E.g.::
  197. @testing.flag_combinations(
  198. dict(lazy=False, passive=False),
  199. dict(lazy=True, passive=False),
  200. dict(lazy=False, passive=True),
  201. dict(lazy=False, passive=True, raiseload=True),
  202. )
  203. would result in::
  204. @testing.combinations(
  205. ('', False, False, False),
  206. ('lazy', True, False, False),
  207. ('lazy_passive', True, True, False),
  208. ('lazy_passive', True, True, True),
  209. id_='iaaa',
  210. argnames='lazy,passive,raiseload'
  211. )
  212. """
  213. keys = set()
  214. for d in combinations:
  215. keys.update(d)
  216. keys = sorted(keys)
  217. return config.combinations(
  218. *[
  219. ("_".join(k for k in keys if d.get(k, False)),)
  220. + tuple(d.get(k, False) for k in keys)
  221. for d in combinations
  222. ],
  223. id_="i" + ("a" * len(keys)),
  224. argnames=",".join(keys)
  225. )
  226. def lambda_combinations(lambda_arg_sets, **kw):
  227. args = inspect_getfullargspec(lambda_arg_sets)
  228. arg_sets = lambda_arg_sets(*[mock.Mock() for arg in args[0]])
  229. def create_fixture(pos):
  230. def fixture(**kw):
  231. return lambda_arg_sets(**kw)[pos]
  232. fixture.__name__ = "fixture_%3.3d" % pos
  233. return fixture
  234. return config.combinations(
  235. *[(create_fixture(i),) for i in range(len(arg_sets))], **kw
  236. )
  237. def resolve_lambda(__fn, **kw):
  238. """Given a no-arg lambda and a namespace, return a new lambda that
  239. has all the values filled in.
  240. This is used so that we can have module-level fixtures that
  241. refer to instance-level variables using lambdas.
  242. """
  243. pos_args = inspect_getfullargspec(__fn)[0]
  244. pass_pos_args = {arg: kw.pop(arg) for arg in pos_args}
  245. glb = dict(__fn.__globals__)
  246. glb.update(kw)
  247. new_fn = types.FunctionType(__fn.__code__, glb)
  248. return new_fn(**pass_pos_args)
  249. def metadata_fixture(ddl="function"):
  250. """Provide MetaData for a pytest fixture."""
  251. def decorate(fn):
  252. def run_ddl(self):
  253. metadata = self.metadata = schema.MetaData()
  254. try:
  255. result = fn(self, metadata)
  256. metadata.create_all(config.db)
  257. # TODO:
  258. # somehow get a per-function dml erase fixture here
  259. yield result
  260. finally:
  261. metadata.drop_all(config.db)
  262. return config.fixture(scope=ddl)(run_ddl)
  263. return decorate
  264. def force_drop_names(*names):
  265. """Force the given table names to be dropped after test complete,
  266. isolating for foreign key cycles
  267. """
  268. @decorator
  269. def go(fn, *args, **kw):
  270. try:
  271. return fn(*args, **kw)
  272. finally:
  273. drop_all_tables(config.db, inspect(config.db), include_names=names)
  274. return go
  275. class adict(dict):
  276. """Dict keys available as attributes. Shadows."""
  277. def __getattribute__(self, key):
  278. try:
  279. return self[key]
  280. except KeyError:
  281. return dict.__getattribute__(self, key)
  282. def __call__(self, *keys):
  283. return tuple([self[key] for key in keys])
  284. get_all = __call__
  285. def drop_all_tables_from_metadata(metadata, engine_or_connection):
  286. from . import engines
  287. def go(connection):
  288. engines.testing_reaper.prepare_for_drop_tables(connection)
  289. if not connection.dialect.supports_alter:
  290. from . import assertions
  291. with assertions.expect_warnings(
  292. "Can't sort tables", assert_=False
  293. ):
  294. metadata.drop_all(connection)
  295. else:
  296. metadata.drop_all(connection)
  297. if not isinstance(engine_or_connection, Connection):
  298. with engine_or_connection.begin() as connection:
  299. go(connection)
  300. else:
  301. go(engine_or_connection)
  302. def drop_all_tables(engine, inspector, schema=None, include_names=None):
  303. if include_names is not None:
  304. include_names = set(include_names)
  305. with engine.begin() as conn:
  306. for tname, fkcs in reversed(
  307. inspector.get_sorted_table_and_fkc_names(schema=schema)
  308. ):
  309. if tname:
  310. if include_names is not None and tname not in include_names:
  311. continue
  312. conn.execute(
  313. DropTable(Table(tname, MetaData(), schema=schema))
  314. )
  315. elif fkcs:
  316. if not engine.dialect.supports_alter:
  317. continue
  318. for tname, fkc in fkcs:
  319. if (
  320. include_names is not None
  321. and tname not in include_names
  322. ):
  323. continue
  324. tb = Table(
  325. tname,
  326. MetaData(),
  327. Column("x", Integer),
  328. Column("y", Integer),
  329. schema=schema,
  330. )
  331. conn.execute(
  332. DropConstraint(
  333. ForeignKeyConstraint([tb.c.x], [tb.c.y], name=fkc)
  334. )
  335. )
  336. def teardown_events(event_cls):
  337. @decorator
  338. def decorate(fn, *arg, **kw):
  339. try:
  340. return fn(*arg, **kw)
  341. finally:
  342. event_cls._clear()
  343. return decorate