Skip to content

Commit

Permalink
Isolate memory tests in forks
Browse files Browse the repository at this point in the history
Swing the biggest hammer, run multiprocessing.Process() for
each memusage test individually so that they are fully isolated
from the parent process and any side effects of pytest-xdist

Also add --nomemory as a shortcut to exclude_tags=memory-intensive
and add this to the setup.py test runner as the memory tests
should not be running for quick runs

Change-Id: I3c16c781e21b33deb939a64e77a6e0e41fb86922
  • Loading branch information
zzzeek committed Aug 14, 2017
1 parent bb9d511 commit 2c594da
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 32 deletions.
8 changes: 8 additions & 0 deletions lib/sqlalchemy/testing/plugin/plugin_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ def setup_options(make_option):
help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
help="Run only tests marked with __backend__")
make_option("--nomemory", action="store_true", dest="nomemory",
help="Don't run memory profiling tests")
make_option("--low-connections", action="store_true",
dest="low_connections",
help="Use a low number of distinct connections - "
Expand Down Expand Up @@ -228,6 +230,12 @@ def _setup_options(opt, file_config):
options = opt


@pre
def _set_nomemory(opt, file_config):
if opt.nomemory:
exclude_tags.add("memory_intensive")


@pre
def _monkeypatch_cdecimal(options, file_config):
if options.cdecimal:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class PyTest(TestCommand):
# not working at the time of this comment.
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]

default_options = ["-n", "4", "-q"]
default_options = ["-n", "4", "-q", "--nomemory"]

def initialize_options(self):
TestCommand.initialize_options(self)
Expand Down
111 changes: 83 additions & 28 deletions test/aaa_profiling/test_memusage.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import weakref
import itertools

import multiprocessing

class A(fixtures.ComparableEntity):
pass
Expand Down Expand Up @@ -52,7 +53,12 @@ def get_objects_skipping_sqlite_issue():
else:
return gc.get_objects()

def profile(*args):
def profile(queue, func_args):
# give testing.db a brand new pool and don't
# touch the existing pool, since closing a socket
# in the subprocess can affect the parent
testing.db.pool = testing.db.pool.recreate()

gc_collect()
samples = []
max_ = 0
Expand All @@ -63,52 +69,85 @@ def profile(*args):
if until_maxtimes >= maxtimes // 5:
break
for x in range(5):
func(*args)
func(*func_args)
gc_collect()
samples.append(
get_num_objects() if get_num_objects is not None
else len(get_objects_skipping_sqlite_issue())
)

# note: this prints lots of text, and when using pytest-xdist,
# actually interferes with memory itself from just sending
# the stdout between processes :).
# need to figure out a "condiional print" that doesn't send
# any stdout when we have pytest-xdist happening
# print("sample gc sizes:", samples)

if assert_no_sessions:
assert len(_sessions) == 0

# queue.put(('samples', samples))

latest_max = max(samples[-5:])
if latest_max > max_:
print(
"Max grew from %s to %s, max has "
"grown for %s samples" % (
max_, latest_max, max_grew_for
queue.put(
(
'status',
"Max grew from %s to %s, max has "
"grown for %s samples" % (
max_, latest_max, max_grew_for
)
)
)
max_ = latest_max
max_grew_for += 1
until_maxtimes += 1
continue
else:
print("Max remained at %s, %s more attempts left" %
(max_, max_grew_for))
queue.put(
(
'status',
"Max remained at %s, %s more attempts left" %
(max_, max_grew_for)
)
)
max_grew_for -= 1
if max_grew_for == 0:
success = True
break

if not success:
assert False, \
"Ran for a total of %d times, memory kept growing: %r" % (
maxtimes,
samples
queue.put(
(
'result',
False,
"Ran for a total of %d times, memory kept "
"growing: %r" % (
maxtimes,
samples
)
)
)

else:
queue.put(
('result', True, 'success')
)

def run_in_process(*func_args):
queue = multiprocessing.Queue()
proc = multiprocessing.Process(
target=profile, args=(queue, func_args))
proc.start()
while True:
row = queue.get()
typ = row[0]
if typ == 'samples':
print("sample gc sizes:", row[1])
elif typ == 'status':
print(row[1])
elif typ == 'result':
break
else:
assert False, "can't parse row"
proc.join()
assert row[1], row[2]

return run_in_process

assert success
return profile
return decorate


Expand Down Expand Up @@ -190,6 +229,19 @@ def go():

assert not eng.dialect._type_memos

@testing.fails()
def test_fixture_failure(self):
class Foo(object):
pass
stuff = []

@profile_memory(maxtimes=20)
def go():
stuff.extend(
Foo() for i in range(100)
)
go()


class MemUsageWBackendTest(EnsureZeroed):

Expand Down Expand Up @@ -416,19 +468,22 @@ class SomeClass(object):
target_strings = session.connection().\
dialect.identifier_preparer._strings

with session.transaction:
@profile_memory(
assert_no_sessions=False,
get_num_objects=lambda: len(target_strings))
def go():
session.close()

@profile_memory(
assert_no_sessions=False,
get_num_objects=lambda: len(target_strings)
)
def go():
session = Session(testing.db)
with session.transaction:

sc = SomeClass()
session.add(sc)

with session.begin_nested():
session.query(SomeClass).first()

go()
go()

@testing.crashes('mysql+cymysql', 'blocking')
def test_unicode_warnings(self):
Expand Down
5 changes: 2 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,11 @@ setenv=
WORKERS={env:WORKERS:-n4}
oracle: WORKERS={env:WORKERS:-n2}
nocext: DISABLE_SQLALCHEMY_CEXT=1
nomemory: NOMEMORY=--exclude-tag memory-intensive
cov: COVERAGE={[testenv]cov_args}
sqlite: SQLITE={env:SQLITE:--db sqlite}
postgresql: POSTGRESQL={env:POSTGRESQL:--db postgresql}
mysql: MYSQL={env:MYSQL:--db mysql --db pymysql}
oracle: ORACLE={env:ORACLE:--db oracle} --write-idents oracle_idents.txt --exclude-tag memory-intensive
oracle: ORACLE={env:ORACLE:--db oracle} --write-idents oracle_idents.txt --nomemory
mssql: MSSQL={env:MSSQL:--db pyodbc --db pymssql}
backendonly: BACKENDONLY=--backend-only

Expand All @@ -67,7 +66,7 @@ passenv=ORACLE_HOME NLS_LANG POSTGRESQL MYSQL ORACLE MSSQL SQLITE WORKERS
# for nocext, we rm *.so in lib in case we are doing usedevelop=True
commands=
{nocext}: sh -c "rm -f lib/sqlalchemy/*.so"
{env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:NOMEMORY:} {env:COVERAGE:} {posargs}
{env:BASECOMMAND} {env:WORKERS} {env:SQLITE:} {env:POSTGRESQL:} {env:MYSQL:} {env:ORACLE:} {env:MSSQL:} {env:BACKENDONLY:} {env:COVERAGE:} {posargs}
{oracle}: python reap_oracle_dbs.py oracle_idents.txt


Expand Down

0 comments on commit 2c594da

Please sign in to comment.