This file is indexed.

/usr/lib/python3/dist-packages/sqlalchemy/testing/util.py is in python3-sqlalchemy 1.0.15+ds1-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
# testing/util.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php

from ..util import jython, pypy, defaultdict, decorator, py2k
import decimal
import gc
import time
import random
import sys
import types

if jython:
    def jython_gc_collect(*args):
        """aggressive gc.collect for tests."""
        gc.collect()
        time.sleep(0.1)
        gc.collect()
        gc.collect()
        return 0

    # "lazy" gc, for VM's that don't GC on refcount == 0
    gc_collect = lazy_gc = jython_gc_collect
elif pypy:
    def pypy_gc_collect(*args):
        gc.collect()
        gc.collect()
    gc_collect = lazy_gc = pypy_gc_collect
else:
    # assume CPython - straight gc.collect, lazy_gc() is a pass
    gc_collect = gc.collect

    def lazy_gc():
        pass


def picklers():
    picklers = set()
    if py2k:
        try:
            import cPickle
            picklers.add(cPickle)
        except ImportError:
            pass

    import pickle
    picklers.add(pickle)

    # yes, this thing needs this much testing
    for pickle_ in picklers:
        for protocol in -1, 0, 1, 2:
            yield pickle_.loads, lambda d: pickle_.dumps(d, protocol)


def round_decimal(value, prec):
    if isinstance(value, float):
        return round(value, prec)

    # can also use shift() here but that is 2.6 only
    return (value * decimal.Decimal("1" + "0" * prec)
            ).to_integral(decimal.ROUND_FLOOR) / \
        pow(10, prec)


class RandomSet(set):
    def __iter__(self):
        l = list(set.__iter__(self))
        random.shuffle(l)
        return iter(l)

    def pop(self):
        index = random.randint(0, len(self) - 1)
        item = list(set.__iter__(self))[index]
        self.remove(item)
        return item

    def union(self, other):
        return RandomSet(set.union(self, other))

    def difference(self, other):
        return RandomSet(set.difference(self, other))

    def intersection(self, other):
        return RandomSet(set.intersection(self, other))

    def copy(self):
        return RandomSet(self)


def conforms_partial_ordering(tuples, sorted_elements):
    """True if the given sorting conforms to the given partial ordering."""

    deps = defaultdict(set)
    for parent, child in tuples:
        deps[parent].add(child)
    for i, node in enumerate(sorted_elements):
        for n in sorted_elements[i:]:
            if node in deps[n]:
                return False
    else:
        return True


def all_partial_orderings(tuples, elements):
    edges = defaultdict(set)
    for parent, child in tuples:
        edges[child].add(parent)

    def _all_orderings(elements):

        if len(elements) == 1:
            yield list(elements)
        else:
            for elem in elements:
                subset = set(elements).difference([elem])
                if not subset.intersection(edges[elem]):
                    for sub_ordering in _all_orderings(subset):
                        yield [elem] + sub_ordering

    return iter(_all_orderings(elements))


def function_named(fn, name):
    """Return a function with a given __name__.

    Will assign to __name__ and return the original function if possible on
    the Python implementation, otherwise a new function will be constructed.

    This function should be phased out as much as possible
    in favor of @decorator.   Tests that "generate" many named tests
    should be modernized.

    """
    try:
        fn.__name__ = name
    except TypeError:
        fn = types.FunctionType(fn.__code__, fn.__globals__, name,
                                fn.__defaults__, fn.__closure__)
    return fn


def run_as_contextmanager(ctx, fn, *arg, **kw):
    """Run the given function under the given contextmanager,
    simulating the behavior of 'with' to support older
    Python versions.

    This is not necessary anymore as we have placed 2.6
    as minimum Python version, however some tests are still using
    this structure.

    """

    obj = ctx.__enter__()
    try:
        result = fn(obj, *arg, **kw)
        ctx.__exit__(None, None, None)
        return result
    except:
        exc_info = sys.exc_info()
        raise_ = ctx.__exit__(*exc_info)
        if raise_ is None:
            raise
        else:
            return raise_


def rowset(results):
    """Converts the results of sql execution into a plain set of column tuples.

    Useful for asserting the results of an unordered query.
    """

    return set([tuple(row) for row in results])


def fail(msg):
    assert False, msg


@decorator
def provide_metadata(fn, *args, **kw):
    """Provide bound MetaData for a single test, dropping afterwards."""

    from . import config
    from . import engines
    from sqlalchemy import schema

    metadata = schema.MetaData(config.db)
    self = args[0]
    prev_meta = getattr(self, 'metadata', None)
    self.metadata = metadata
    try:
        return fn(*args, **kw)
    finally:
        engines.drop_all_tables(metadata, config.db)
        self.metadata = prev_meta


def force_drop_names(*names):
    """Force the given table names to be dropped after test complete,
    isolating for foreign key cycles

    """
    from . import config
    from sqlalchemy import inspect

    @decorator
    def go(fn, *args, **kw):

        try:
            return fn(*args, **kw)
        finally:
            drop_all_tables(
                config.db, inspect(config.db), include_names=names)
    return go


class adict(dict):
    """Dict keys available as attributes.  Shadows."""

    def __getattribute__(self, key):
        try:
            return self[key]
        except KeyError:
            return dict.__getattribute__(self, key)

    def __call__(self, *keys):
        return tuple([self[key] for key in keys])

    get_all = __call__


def drop_all_tables(engine, inspector, schema=None, include_names=None):
    from sqlalchemy import Column, Table, Integer, MetaData, \
        ForeignKeyConstraint
    from sqlalchemy.schema import DropTable, DropConstraint

    if include_names is not None:
        include_names = set(include_names)

    with engine.connect() as conn:
        for tname, fkcs in reversed(
                inspector.get_sorted_table_and_fkc_names(schema=schema)):
            if tname:
                if include_names is not None and tname not in include_names:
                    continue
                conn.execute(DropTable(
                    Table(tname, MetaData(), schema=schema)
                ))
            elif fkcs:
                if not engine.dialect.supports_alter:
                    continue
                for tname, fkc in fkcs:
                    if include_names is not None and \
                            tname not in include_names:
                        continue
                    tb = Table(
                        tname, MetaData(),
                        Column('x', Integer),
                        Column('y', Integer),
                        schema=schema
                    )
                    conn.execute(DropConstraint(
                        ForeignKeyConstraint(
                            [tb.c.x], [tb.c.y], name=fkc)
                    ))


def teardown_events(event_cls):
    @decorator
    def decorate(fn, *arg, **kw):
        try:
            return fn(*arg, **kw)
        finally:
            event_cls._clear()
    return decorate