Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test removed recycle pool warns, no longer errors #345

Open
wants to merge 6 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions q2cli/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -582,8 +582,14 @@ def __call__(self, **kwargs):
# succeeded, then we need to clean up the pool. Make sure to do this at
# the very end so if a failure happens during writing results we still
# have them
if recycle_pool == default_pool:
cache.remove(recycle_pool)
#
# It is possible that another run of the same action already finished
# and removed the pool we were using in which case we do not want to
# attempt to delete it again and get an error
with cache.lock:
if recycle_pool == default_pool \
and os.path.exists(cache.pools / recycle_pool):
cache.remove(recycle_pool)

# Set the USED_ARTIFACT_CACHE back to the default cache. This is mostly
# useful for the tests that invoke actions back to back to back without
Expand Down
61 changes: 61 additions & 0 deletions q2cli/tests/test_cache_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def setUp(self):
self.ints1 = {'1': self.art4, '2': self.art5}
self.ints2 = {'1': self.art1, '2': self.art2}
self.mapping = Artifact.import_data(Mapping, {'a': '1', 'b': '2'})
self.mapping2 = Artifact.import_data(Mapping, {'a': '42'})

self.metadata = os.path.join(self.tempdir, 'metadata.tsv')
with open(self.metadata, 'w') as fh:
Expand Down Expand Up @@ -1016,6 +1017,66 @@ def test_cache_arg_invalid(self):
f"received '{art_path}' which is not a path to an existing cache",
result.output)

# NOTE: This test may fail stochastically. By running 5 of this action at
# once like this, we are almost guaranteed to have this warning be raised
# at least once, but it could be messed up by how things are scheduled
def test_multiple_of_same_action_at_once_default_pool(self):
import subprocess
import re

art1_path = os.path.join(self.tempdir, 'art1.qza')
self.art1.save(art1_path)

mapping_path = os.path.join(self.tempdir, 'mapping.qza')
self.mapping2.save(mapping_path)

commands = [
'qiime dummy-plugin typical-pipeline --i-int-sequence'
f' {art1_path} --i-mapping {mapping_path}'
' --p-do-extra-thing False --output-dir'
f' {os.path.join(self.tempdir, "out")}1.qza'
' --verbose',
'qiime dummy-plugin typical-pipeline --i-int-sequence'
f' {art1_path} --i-mapping {mapping_path}'
' --p-do-extra-thing False --output-dir'
f' {os.path.join(self.tempdir, "out")}2.qza'
' --verbose',
'qiime dummy-plugin typical-pipeline --i-int-sequence'
f' {art1_path} --i-mapping {mapping_path}'
' --p-do-extra-thing False --output-dir'
f' {os.path.join(self.tempdir, "out")}3.qza'
' --verbose',
'qiime dummy-plugin typical-pipeline --i-int-sequence'
f' {art1_path} --i-mapping {mapping_path}'
' --p-do-extra-thing False --output-dir'
f' {os.path.join(self.tempdir, "out")}4.qza'
' --verbose',
'qiime dummy-plugin typical-pipeline --i-int-sequence'
f' {art1_path} --i-mapping {mapping_path}'
' --p-do-extra-thing False --output-dir'
f' {os.path.join(self.tempdir, "out")}5.qza'
' --verbose'
]

processes = [
subprocess.Popen(cmd, shell=True,
stderr=subprocess.PIPE) for cmd in commands]

warned = False
regex = 'UserWarning: The named pool path.*does not exist'

for process in processes:
process.wait()
if process.stderr is not None:
err = process.stderr.read().decode()
if process.returncode != 0:
raise ValueError(err)

if re.search(regex, err):
warned = True

assert warned

def _load_alias_execution_contexts(self, collection):
execution_contexts = []

Expand Down
Loading