This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch jmac/googlecas_and_virtual_directories_1
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 369bf1e77394c805697c26a501ba183b6deb1896
Author: Jürg Billeter <[email protected]>
AuthorDate: Sun May 13 20:20:57 2018 +0200

    tests: Use context manager for ArtifactShare
---
 tests/artifactcache/junctions.py |  90 +++++------
 tests/frontend/pull.py           | 323 ++++++++++++++++++++-------------------
 tests/frontend/push.py           | 234 ++++++++++++++--------------
 tests/testutils/artifactshare.py |  18 ++-
 4 files changed, 341 insertions(+), 324 deletions(-)

diff --git a/tests/artifactcache/junctions.py b/tests/artifactcache/junctions.py
index 0457d46..3fcea97 100644
--- a/tests/artifactcache/junctions.py
+++ b/tests/artifactcache/junctions.py
@@ -43,48 +43,48 @@ def test_push_pull(cli, tmpdir, datafiles):
     project = os.path.join(str(datafiles), 'foo')
     base_project = os.path.join(str(project), 'base')
 
-    share = create_artifact_share(os.path.join(str(tmpdir), 
'artifactshare-foo'))
-    base_share = create_artifact_share(os.path.join(str(tmpdir), 
'artifactshare-base'))
-
-    # First build it without the artifact cache configured
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    assert result.exit_code == 0
-
-    # Assert that we are now cached locally
-    state = cli.get_element_state(project, 'target.bst')
-    assert state == 'cached'
-    state = cli.get_element_state(base_project, 'target.bst')
-    assert state == 'cached'
-
-    project_set_artifacts(project, share.repo)
-    project_set_artifacts(base_project, base_share.repo)
-
-    # Now try bst push
-    result = cli.run(project=project, args=['push', '--deps', 'all', 
'target.bst'])
-    assert result.exit_code == 0
-
-    # And finally assert that the artifacts are in the right shares
-    assert_shared(cli, share, 'foo', project, 'target.bst')
-    assert_shared(cli, base_share, 'base', base_project, 'target.bst')
-
-    # Now we've pushed, delete the user's local artifact cache
-    # directory and try to redownload it from the share
-    #
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
-
-    # Assert that nothing is cached locally anymore
-    state = cli.get_element_state(project, 'target.bst')
-    assert state != 'cached'
-    state = cli.get_element_state(base_project, 'target.bst')
-    assert state != 'cached'
-
-    # Now try bst pull
-    result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
-    assert result.exit_code == 0
-
-    # And assert that they are again in the local cache, without having built
-    state = cli.get_element_state(project, 'target.bst')
-    assert state == 'cached'
-    state = cli.get_element_state(base_project, 'target.bst')
-    assert state == 'cached'
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-foo')) 
as share,\
+        create_artifact_share(os.path.join(str(tmpdir), 'artifactshare-base')) 
as base_share:
+
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        assert result.exit_code == 0
+
+        # Assert that we are now cached locally
+        state = cli.get_element_state(project, 'target.bst')
+        assert state == 'cached'
+        state = cli.get_element_state(base_project, 'target.bst')
+        assert state == 'cached'
+
+        project_set_artifacts(project, share.repo)
+        project_set_artifacts(base_project, base_share.repo)
+
+        # Now try bst push
+        result = cli.run(project=project, args=['push', '--deps', 'all', 
'target.bst'])
+        assert result.exit_code == 0
+
+        # And finally assert that the artifacts are in the right shares
+        assert_shared(cli, share, 'foo', project, 'target.bst')
+        assert_shared(cli, base_share, 'base', base_project, 'target.bst')
+
+        # Now we've pushed, delete the user's local artifact cache
+        # directory and try to redownload it from the share
+        #
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
+
+        # Assert that nothing is cached locally anymore
+        state = cli.get_element_state(project, 'target.bst')
+        assert state != 'cached'
+        state = cli.get_element_state(base_project, 'target.bst')
+        assert state != 'cached'
+
+        # Now try bst pull
+        result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
+        assert result.exit_code == 0
+
+        # And assert that they are again in the local cache, without having 
built
+        state = cli.get_element_state(project, 'target.bst')
+        assert state == 'cached'
+        state = cli.get_element_state(base_project, 'target.bst')
+        assert state == 'cached'
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 68a4ca2..8c30d01 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -44,38 +44,39 @@ def assert_not_shared(cli, share, project, element_name):
 @pytest.mark.datafiles(DATA_DIR)
 def test_push_pull_all(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
-    share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
-    # First build the target element and push to the remote.
-    cli.configure({
-        'artifacts': {'url': share.repo, 'push': True}
-    })
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
-
-    # Assert that everything is now cached in the remote.
-    all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst']
-    for element_name in all_elements:
-        assert_shared(cli, share, project, element_name)
-
-    # Now we've pushed, delete the user's local artifact cache
-    # directory and try to redownload it from the share
-    #
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
 
-    # Assert that nothing is cached locally anymore
-    for element_name in all_elements:
-        assert cli.get_element_state(project, element_name) != 'cached'
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as 
share:
+
+        # First build the target element and push to the remote.
+        cli.configure({
+            'artifacts': {'url': share.repo, 'push': True}
+        })
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+        # Assert that everything is now cached in the remote.
+        all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst']
+        for element_name in all_elements:
+            assert_shared(cli, share, project, element_name)
+
+        # Now we've pushed, delete the user's local artifact cache
+        # directory and try to redownload it from the share
+        #
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
 
-    # Now try bst pull
-    result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
-    result.assert_success()
+        # Assert that nothing is cached locally anymore
+        for element_name in all_elements:
+            assert cli.get_element_state(project, element_name) != 'cached'
 
-    # And assert that it's again in the local cache, without having built
-    for element_name in all_elements:
-        assert cli.get_element_state(project, element_name) == 'cached'
+        # Now try bst pull
+        result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
+        result.assert_success()
+
+        # And assert that it's again in the local cache, without having built
+        for element_name in all_elements:
+            assert cli.get_element_state(project, element_name) == 'cached'
 
 
 # Tests that:
@@ -88,36 +89,36 @@ def test_push_pull_all(cli, tmpdir, datafiles):
 def test_pull_secondary_cache(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
 
-    share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
-    share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as 
share1,\
+        create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as 
share2:
 
-    # Build the target and push it to share2 only.
-    cli.configure({
-        'artifacts': [
-            {'url': share1.repo, 'push': False},
-            {'url': share2.repo, 'push': True},
-        ]
-    })
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
+        # Build the target and push it to share2 only.
+        cli.configure({
+            'artifacts': [
+                {'url': share1.repo, 'push': False},
+                {'url': share2.repo, 'push': True},
+            ]
+        })
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
 
-    assert_not_shared(cli, share1, project, 'target.bst')
-    assert_shared(cli, share2, project, 'target.bst')
+        assert_not_shared(cli, share1, project, 'target.bst')
+        assert_shared(cli, share2, project, 'target.bst')
 
-    # Delete the user's local artifact cache.
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
+        # Delete the user's local artifact cache.
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
 
-    # Assert that the element is not cached anymore.
-    assert cli.get_element_state(project, 'target.bst') != 'cached'
+        # Assert that the element is not cached anymore.
+        assert cli.get_element_state(project, 'target.bst') != 'cached'
 
-    # Now try bst pull
-    result = cli.run(project=project, args=['pull', 'target.bst'])
-    result.assert_success()
+        # Now try bst pull
+        result = cli.run(project=project, args=['pull', 'target.bst'])
+        result.assert_success()
 
-    # And assert that it's again in the local cache, without having built,
-    # i.e. we found it in share2.
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
+        # And assert that it's again in the local cache, without having built,
+        # i.e. we found it in share2.
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
 
 
 # Tests that:
@@ -130,45 +131,45 @@ def test_pull_secondary_cache(cli, tmpdir, datafiles):
 def test_push_pull_specific_remote(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
 
-    good_share = create_artifact_share(os.path.join(str(tmpdir), 
'goodartifactshare'))
-    bad_share = create_artifact_share(os.path.join(str(tmpdir), 
'badartifactshare'))
+    with create_artifact_share(os.path.join(str(tmpdir), 'goodartifactshare')) 
as good_share,\
+        create_artifact_share(os.path.join(str(tmpdir), 'badartifactshare')) 
as bad_share:
 
-    # Build the target so we have it cached locally only.
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
+        # Build the target so we have it cached locally only.
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
 
-    state = cli.get_element_state(project, 'target.bst')
-    assert state == 'cached'
+        state = cli.get_element_state(project, 'target.bst')
+        assert state == 'cached'
 
-    # Configure the default push location to be bad_share; we will assert that
-    # nothing actually gets pushed there.
-    cli.configure({
-        'artifacts': {'url': bad_share.repo, 'push': True},
-    })
+        # Configure the default push location to be bad_share; we will assert 
that
+        # nothing actually gets pushed there.
+        cli.configure({
+            'artifacts': {'url': bad_share.repo, 'push': True},
+        })
 
-    # Now try `bst push` to the good_share.
-    result = cli.run(project=project, args=[
-        'push', 'target.bst', '--remote', good_share.repo
-    ])
-    result.assert_success()
+        # Now try `bst push` to the good_share.
+        result = cli.run(project=project, args=[
+            'push', 'target.bst', '--remote', good_share.repo
+        ])
+        result.assert_success()
 
-    # Assert that all the artifacts are in the share we pushed
-    # to, and not the other.
-    assert_shared(cli, good_share, project, 'target.bst')
-    assert_not_shared(cli, bad_share, project, 'target.bst')
+        # Assert that all the artifacts are in the share we pushed
+        # to, and not the other.
+        assert_shared(cli, good_share, project, 'target.bst')
+        assert_not_shared(cli, bad_share, project, 'target.bst')
 
-    # Now we've pushed, delete the user's local artifact cache
-    # directory and try to redownload it from the good_share.
-    #
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
+        # Now we've pushed, delete the user's local artifact cache
+        # directory and try to redownload it from the good_share.
+        #
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
 
-    result = cli.run(project=project, args=['pull', 'target.bst', '--remote',
-                                            good_share.repo])
-    result.assert_success()
+        result = cli.run(project=project, args=['pull', 'target.bst', 
'--remote',
+                                                good_share.repo])
+        result.assert_success()
 
-    # And assert that it's again in the local cache, without having built
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
+        # And assert that it's again in the local cache, without having built
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
 
 
 # Tests that:
@@ -179,50 +180,51 @@ def test_push_pull_specific_remote(cli, tmpdir, 
datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_push_pull_non_strict(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
-    share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-    workspace = os.path.join(str(tmpdir), 'workspace')
-
-    # First build the target element and push to the remote.
-    cli.configure({
-        'artifacts': {'url': share.repo, 'push': True},
-        'projects': {
-            'test': {'strict': False}
-        }
-    })
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
-
-    # Assert that everything is now cached in the remote.
-    all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst']
-    for element_name in all_elements:
-        assert_shared(cli, share, project, element_name)
-
-    # Now we've pushed, delete the user's local artifact cache
-    # directory and try to redownload it from the share
-    #
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
 
-    # Assert that nothing is cached locally anymore
-    for element_name in all_elements:
-        assert cli.get_element_state(project, element_name) != 'cached'
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as 
share:
+        workspace = os.path.join(str(tmpdir), 'workspace')
 
-    # Add a file to force change in strict cache key of import-bin.bst
-    with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 'bin', 
'world'), 'w') as f:
-        f.write('world')
+        # First build the target element and push to the remote.
+        cli.configure({
+            'artifacts': {'url': share.repo, 'push': True},
+            'projects': {
+                'test': {'strict': False}
+            }
+        })
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
 
-    # Assert that the workspaced element requires a rebuild
-    assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
-    # Assert that the target is still waiting due to --no-strict
-    assert cli.get_element_state(project, 'target.bst') == 'waiting'
+        # Assert that everything is now cached in the remote.
+        all_elements = ['target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst']
+        for element_name in all_elements:
+            assert_shared(cli, share, project, element_name)
 
-    # Now try bst pull
-    result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
-    result.assert_success()
+        # Now we've pushed, delete the user's local artifact cache
+        # directory and try to redownload it from the share
+        #
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
 
-    # And assert that the target is again in the local cache, without having 
built
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
+        # Assert that nothing is cached locally anymore
+        for element_name in all_elements:
+            assert cli.get_element_state(project, element_name) != 'cached'
+
+        # Add a file to force change in strict cache key of import-bin.bst
+        with open(os.path.join(str(project), 'files', 'bin-files', 'usr', 
'bin', 'world'), 'w') as f:
+            f.write('world')
+
+        # Assert that the workspaced element requires a rebuild
+        assert cli.get_element_state(project, 'import-bin.bst') == 'buildable'
+        # Assert that the target is still waiting due to --no-strict
+        assert cli.get_element_state(project, 'target.bst') == 'waiting'
+
+        # Now try bst pull
+        result = cli.run(project=project, args=['pull', '--deps', 'all', 
'target.bst'])
+        result.assert_success()
+
+        # And assert that the target is again in the local cache, without 
having built
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
 
 
 # Regression test for https://gitlab.com/BuildStream/buildstream/issues/202
@@ -230,40 +232,41 @@ def test_push_pull_non_strict(cli, tmpdir, datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_push_pull_track_non_strict(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
-    share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
-
-    # First build the target element and push to the remote.
-    cli.configure({
-        'artifacts': {'url': share.repo, 'push': True},
-        'projects': {
-            'test': {'strict': False}
-        }
-    })
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
-
-    # Assert that everything is now cached in the remote.
-    all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst'}
-    for element_name in all_elements:
-        assert_shared(cli, share, project, element_name)
-
-    # Now we've pushed, delete the user's local artifact cache
-    # directory and try to redownload it from the share
-    #
-    artifacts = os.path.join(cli.directory, 'artifacts')
-    shutil.rmtree(artifacts)
-
-    # Assert that nothing is cached locally anymore
-    for element_name in all_elements:
-        assert cli.get_element_state(project, element_name) != 'cached'
-
-    # Now try bst build with tracking and pulling.
-    # Tracking will be skipped for target.bst as it doesn't have any sources.
-    # With the non-strict build plan target.bst immediately enters the pull 
queue.
-    # However, pulling has to be deferred until the dependencies have been
-    # tracked as the strict cache key needs to be calculated before querying
-    # the caches.
-    result = cli.run(project=project, args=['build', '--track-all', '--all', 
'target.bst'])
-    result.assert_success()
-    assert set(result.get_pulled_elements()) == all_elements
+
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as 
share:
+
+        # First build the target element and push to the remote.
+        cli.configure({
+            'artifacts': {'url': share.repo, 'push': True},
+            'projects': {
+                'test': {'strict': False}
+            }
+        })
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+        # Assert that everything is now cached in the remote.
+        all_elements = {'target.bst', 'import-bin.bst', 'import-dev.bst', 
'compose-all.bst'}
+        for element_name in all_elements:
+            assert_shared(cli, share, project, element_name)
+
+        # Now we've pushed, delete the user's local artifact cache
+        # directory and try to redownload it from the share
+        #
+        artifacts = os.path.join(cli.directory, 'artifacts')
+        shutil.rmtree(artifacts)
+
+        # Assert that nothing is cached locally anymore
+        for element_name in all_elements:
+            assert cli.get_element_state(project, element_name) != 'cached'
+
+        # Now try bst build with tracking and pulling.
+        # Tracking will be skipped for target.bst as it doesn't have any 
sources.
+        # With the non-strict build plan target.bst immediately enters the 
pull queue.
+        # However, pulling has to be deferred until the dependencies have been
+        # tracked as the strict cache key needs to be calculated before 
querying
+        # the caches.
+        result = cli.run(project=project, args=['build', '--track-all', 
'--all', 'target.bst'])
+        result.assert_success()
+        assert set(result.get_pulled_elements()) == all_elements
diff --git a/tests/frontend/push.py b/tests/frontend/push.py
index 80f06e0..721cd63 100644
--- a/tests/frontend/push.py
+++ b/tests/frontend/push.py
@@ -1,5 +1,4 @@
 import os
-import shutil
 import pytest
 from buildstream._exceptions import ErrorDomain
 from tests.testutils import cli, create_artifact_share
@@ -54,47 +53,47 @@ def test_push(cli, tmpdir, datafiles):
     assert cli.get_element_state(project, 'target.bst') == 'cached'
 
     # Set up two artifact shares.
-    share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
-    share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
-
-    # Try pushing with no remotes configured. This should fail.
-    result = cli.run(project=project, args=['push', 'target.bst'])
-    result.assert_main_error(ErrorDomain.STREAM, None)
-
-    # Configure bst to pull but not push from a cache and run `bst push`.
-    # This should also fail.
-    cli.configure({
-        'artifacts': {'url': share1.repo, 'push': False},
-    })
-    result = cli.run(project=project, args=['push', 'target.bst'])
-    result.assert_main_error(ErrorDomain.STREAM, None)
-
-    # Configure bst to push to one of the caches and run `bst push`. This 
works.
-    cli.configure({
-        'artifacts': [
-            {'url': share1.repo, 'push': False},
-            {'url': share2.repo, 'push': True},
-        ]
-    })
-    result = cli.run(project=project, args=['push', 'target.bst'])
-
-    assert_not_shared(cli, share1, project, 'target.bst')
-    assert_shared(cli, share2, project, 'target.bst')
-
-    # Now try pushing to both (making sure to empty the cache we just pushed
-    # to).
-    shutil.rmtree(share2.directory)
-    share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
-    cli.configure({
-        'artifacts': [
-            {'url': share1.repo, 'push': True},
-            {'url': share2.repo, 'push': True},
-        ]
-    })
-    result = cli.run(project=project, args=['push', 'target.bst'])
-
-    assert_shared(cli, share1, project, 'target.bst')
-    assert_shared(cli, share2, project, 'target.bst')
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as 
share1:
+
+        with create_artifact_share(os.path.join(str(tmpdir), 
'artifactshare2')) as share2:
+
+            # Try pushing with no remotes configured. This should fail.
+            result = cli.run(project=project, args=['push', 'target.bst'])
+            result.assert_main_error(ErrorDomain.STREAM, None)
+
+            # Configure bst to pull but not push from a cache and run `bst 
push`.
+            # This should also fail.
+            cli.configure({
+                'artifacts': {'url': share1.repo, 'push': False},
+            })
+            result = cli.run(project=project, args=['push', 'target.bst'])
+            result.assert_main_error(ErrorDomain.STREAM, None)
+
+            # Configure bst to push to one of the caches and run `bst push`. 
This works.
+            cli.configure({
+                'artifacts': [
+                    {'url': share1.repo, 'push': False},
+                    {'url': share2.repo, 'push': True},
+                ]
+            })
+            result = cli.run(project=project, args=['push', 'target.bst'])
+
+            assert_not_shared(cli, share1, project, 'target.bst')
+            assert_shared(cli, share2, project, 'target.bst')
+
+        # Now try pushing to both
+
+        with create_artifact_share(os.path.join(str(tmpdir), 
'artifactshare2')) as share2:
+            cli.configure({
+                'artifacts': [
+                    {'url': share1.repo, 'push': True},
+                    {'url': share2.repo, 'push': True},
+                ]
+            })
+            result = cli.run(project=project, args=['push', 'target.bst'])
+
+            assert_shared(cli, share1, project, 'target.bst')
+            assert_shared(cli, share2, project, 'target.bst')
 
 
 # Tests that `bst push --deps all` pushes all dependencies of the given 
element.
@@ -103,46 +102,47 @@ def test_push(cli, tmpdir, datafiles):
 @pytest.mark.datafiles(DATA_DIR)
 def test_push_all(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
-    share = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare'))
 
-    # First build it without the artifact cache configured
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-
-    # Assert that we are now cached locally
-    assert cli.get_element_state(project, 'target.bst') == 'cached'
-
-    # Configure artifact share
-    cli.configure({
-        #
-        # FIXME: This test hangs "sometimes" if we allow
-        #        concurrent push.
-        #
-        #        It's not too bad to ignore since we're
-        #        using the local artifact cache functionality
-        #        only, but it should probably be fixed.
-        #
-        'scheduler': {
-            'pushers': 1
-        },
-        'artifacts': {
-            'url': share.repo,
-            'push': True,
-        }
-    })
-
-    # Now try bst push all the deps
-    result = cli.run(project=project, args=[
-        'push', 'target.bst',
-        '--deps', 'all'
-    ])
-    result.assert_success()
-
-    # And finally assert that all the artifacts are in the share
-    assert_shared(cli, share, project, 'target.bst')
-    assert_shared(cli, share, project, 'import-bin.bst')
-    assert_shared(cli, share, project, 'import-dev.bst')
-    assert_shared(cli, share, project, 'compose-all.bst')
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare')) as 
share:
+
+        # First build it without the artifact cache configured
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+
+        # Assert that we are now cached locally
+        assert cli.get_element_state(project, 'target.bst') == 'cached'
+
+        # Configure artifact share
+        cli.configure({
+            #
+            # FIXME: This test hangs "sometimes" if we allow
+            #        concurrent push.
+            #
+            #        It's not too bad to ignore since we're
+            #        using the local artifact cache functionality
+            #        only, but it should probably be fixed.
+            #
+            'scheduler': {
+                'pushers': 1
+            },
+            'artifacts': {
+                'url': share.repo,
+                'push': True,
+            }
+        })
+
+        # Now try bst push all the deps
+        result = cli.run(project=project, args=[
+            'push', 'target.bst',
+            '--deps', 'all'
+        ])
+        result.assert_success()
+
+        # And finally assert that all the artifacts are in the share
+        assert_shared(cli, share, project, 'target.bst')
+        assert_shared(cli, share, project, 'import-bin.bst')
+        assert_shared(cli, share, project, 'import-dev.bst')
+        assert_shared(cli, share, project, 'compose-all.bst')
 
 
 # Tests that `bst build` won't push artifacts to the cache it just pulled from.
@@ -154,44 +154,44 @@ def test_push_after_pull(cli, tmpdir, datafiles):
     project = os.path.join(datafiles.dirname, datafiles.basename)
 
     # Set up two artifact shares.
-    share1 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1'))
-    share2 = create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2'))
+    with create_artifact_share(os.path.join(str(tmpdir), 'artifactshare1')) as 
share1,\
+        create_artifact_share(os.path.join(str(tmpdir), 'artifactshare2')) as 
share2:
 
-    # Set the scene: share1 has the artifact, share2 does not.
-    #
-    cli.configure({
-        'artifacts': {'url': share1.repo, 'push': True},
-    })
+        # Set the scene: share1 has the artifact, share2 does not.
+        #
+        cli.configure({
+            'artifacts': {'url': share1.repo, 'push': True},
+        })
 
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
 
-    cli.remove_artifact_from_cache(project, 'target.bst')
+        cli.remove_artifact_from_cache(project, 'target.bst')
 
-    assert_shared(cli, share1, project, 'target.bst')
-    assert_not_shared(cli, share2, project, 'target.bst')
-    assert cli.get_element_state(project, 'target.bst') != 'cached'
+        assert_shared(cli, share1, project, 'target.bst')
+        assert_not_shared(cli, share2, project, 'target.bst')
+        assert cli.get_element_state(project, 'target.bst') != 'cached'
 
-    # Now run the build again. Correct `bst build` behaviour is to download the
-    # artifact from share1 but not push it back again.
-    #
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-    assert result.get_pulled_elements() == ['target.bst']
-    assert result.get_pushed_elements() == []
-
-    # Delete the artifact locally again.
-    cli.remove_artifact_from_cache(project, 'target.bst')
-
-    # Now we add share2 into the mix as a second push remote. This time,
-    # `bst build` should push to share2 after pulling from share1.
-    cli.configure({
-        'artifacts': [
-            {'url': share1.repo, 'push': True},
-            {'url': share2.repo, 'push': True},
-        ]
-    })
-    result = cli.run(project=project, args=['build', 'target.bst'])
-    result.assert_success()
-    assert result.get_pulled_elements() == ['target.bst']
-    assert result.get_pushed_elements() == ['target.bst']
+        # Now run the build again. Correct `bst build` behaviour is to 
download the
+        # artifact from share1 but not push it back again.
+        #
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+        assert result.get_pulled_elements() == ['target.bst']
+        assert result.get_pushed_elements() == []
+
+        # Delete the artifact locally again.
+        cli.remove_artifact_from_cache(project, 'target.bst')
+
+        # Now we add share2 into the mix as a second push remote. This time,
+        # `bst build` should push to share2 after pulling from share1.
+        cli.configure({
+            'artifacts': [
+                {'url': share1.repo, 'push': True},
+                {'url': share2.repo, 'push': True},
+            ]
+        })
+        result = cli.run(project=project, args=['build', 'target.bst'])
+        result.assert_success()
+        assert result.get_pulled_elements() == ['target.bst']
+        assert result.get_pushed_elements() == ['target.bst']
diff --git a/tests/testutils/artifactshare.py b/tests/testutils/artifactshare.py
index 57ed598..b7cb5d3 100644
--- a/tests/testutils/artifactshare.py
+++ b/tests/testutils/artifactshare.py
@@ -2,6 +2,9 @@ import string
 import pytest
 import subprocess
 import os
+import shutil
+
+from contextlib import contextmanager
 
 from buildstream import _yaml
 
@@ -90,11 +93,22 @@ class ArtifactShare():
 
         return False
 
+    # close():
+    #
+    # Remove the artifact share.
+    #
+    def close(self):
+        shutil.rmtree(self.directory)
+
 
 # create_artifact_share()
 #
 # Create an ArtifactShare for use in a test case
 #
+@contextmanager
 def create_artifact_share(directory):
-
-    return ArtifactShare(directory)
+    share = ArtifactShare(directory)
+    try:
+        yield share
+    finally:
+        share.close()

Reply via email to