This is an automated email from the ASF dual-hosted git repository.

tvb pushed a commit to branch tristan/remote-cli-options
in repository https://gitbox.apache.org/repos/asf/buildstream.git

commit 2937f6b7ac3dcd6be3ec7f64bede2a265b096f71
Author: Tristan van Berkom <[email protected]>
AuthorDate: Fri Feb 5 16:06:44 2021 +0900

    Support specifying remotes on the command line.
    
    Previously, remote URLs could be specified on the command line, which is
    rather useless considering that you normally need credentials at least
    to specify remotes, and that remote configuration can be rather complex,
    as one may need to have indexes and storage split, etc.
    
    Also, some commands which use remotes did not support the remote url,
    and the build command supported only an artifact url but no source cache
    url.
    
    With this patch, multiple remotes can be fully specified on the command line
    for every command which may end up using remotes, they also get an 
additional
    --ignore-project-remotes option allowing the user to decide on an per
    invocation basis, whether to observe project suggested remotes or not.
    
    Summary of changes:
    
      * cli.py: A new click ParamType uses RemoteSpec.new_from_string() to
        parse fully configurable RemoteSpecs from the command line.
    
        Commands which use remotes can now specify one or more remotes using
        this new param type.
    
      * _stream.py: Expose API for remote lists for all commands which may use
        remotes.
    
      * _context.py: Support the command line remote lists and command line
        ignore-project-remotes switches when resolving which remotes shall
        be used for which projects.
    
      * tests: Fixed for CLI changes
---
 src/buildstream/_context.py      | 157 +++++++++------
 src/buildstream/_frontend/cli.py | 191 +++++++++++++++---
 src/buildstream/_stream.py       | 413 +++++++++++++++++++++++++--------------
 tests/frontend/completions.py    |  13 +-
 tests/frontend/pull.py           |   4 +-
 5 files changed, 545 insertions(+), 233 deletions(-)

diff --git a/src/buildstream/_context.py b/src/buildstream/_context.py
index 9422a28..2466650 100644
--- a/src/buildstream/_context.py
+++ b/src/buildstream/_context.py
@@ -46,6 +46,26 @@ if TYPE_CHECKING:
     # pylint: enable=cyclic-import
 
 
+# _CacheConfig
+#
+# A convenience object for parsing artifact/source cache configurations
+#
+class _CacheConfig:
+    def __init__(self, override_projects: bool, remote_specs: 
List[RemoteSpec]):
+        self.override_projects: bool = override_projects
+        self.remote_specs: List[RemoteSpec] = remote_specs
+
+    @classmethod
+    def new_from_node(cls, node: MappingNode) -> "_CacheConfig":
+        node.validate_keys(["override-project-caches", "servers"])
+        servers = node.get_sequence("servers", default=[], 
allowed_types=[MappingNode])
+
+        override_projects: bool = node.get_bool("push", default=False)
+        remote_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for 
node in servers]
+
+        return cls(override_projects, remote_specs)
+
+
 # Context()
 #
 # The Context object holds all of the user preferences
@@ -502,15 +522,19 @@ class Context:
     # Args:
     #    connect_artifact_cache: Whether to try to contact remote artifact 
caches
     #    connect_source_cache: Whether to try to contact remote source caches
-    #    artifact_remote: An overriding artifact cache remote, or None
-    #    source_remote: An overriding source cache remote, or None
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
+    #    ignore_project_source_remotes: Whether to ignore artifact remotes 
specified by projects
     #
     def initialize_remotes(
         self,
         connect_artifact_cache: bool,
         connect_source_cache: bool,
-        artifact_remote: Optional[RemoteSpec],
-        source_remote: Optional[RemoteSpec],
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+        ignore_project_source_remotes: bool = False,
     ) -> None:
 
         # Ensure all projects are fully loaded.
@@ -528,34 +552,6 @@ class Context:
             if remote_execution:
                 self.pull_artifact_files, self.remote_execution_specs = 
self._load_remote_execution(remote_execution)
 
-        cli_artifact_remotes = [artifact_remote] if artifact_remote else []
-        cli_source_remotes = [source_remote] if source_remote else []
-
-        #
-        # Helper function to resolve which remote specs apply for a given 
project
-        #
-        def resolve_specs_for_project(
-            project: "Project", global_config: _CacheConfig, override_key: 
str, project_attribute: str,
-        ) -> List[RemoteSpec]:
-
-            # Obtain the overrides
-            override_node = self.get_overrides(project.name)
-            override_config_node = override_node.get_mapping(override_key, 
default={})
-            override_config = _CacheConfig.new_from_node(override_config_node)
-            if override_config.override_projects:
-                return override_config.remote_specs
-            elif global_config.override_projects:
-                return global_config.remote_specs
-
-            # If there were no explicit overrides, then take either the 
project specific
-            # config or fallback to the global config, and tack on the project 
recommended
-            # remotes at the end.
-            #
-            config_specs = override_config.remote_specs or 
global_config.remote_specs
-            project_specs = getattr(project, project_attribute)
-            all_specs = config_specs + project_specs
-            return list(utils._deduplicate(all_specs))
-
         #
         # Maintain our list of remote specs for artifact and source caches
         #
@@ -564,13 +560,22 @@ class Context:
             source_specs: List[RemoteSpec] = []
 
             if connect_artifact_cache:
-                artifact_specs = cli_artifact_remotes or 
resolve_specs_for_project(
-                    project, self._global_artifact_cache_config, "artifacts", 
"artifact_cache_specs",
+                artifact_specs = self._resolve_specs_for_project(
+                    project,
+                    artifact_remotes,
+                    ignore_project_artifact_remotes,
+                    self._global_artifact_cache_config,
+                    "artifacts",
+                    "artifact_cache_specs",
                 )
-
             if connect_source_cache:
-                source_specs = cli_source_remotes or resolve_specs_for_project(
-                    project, self._global_source_cache_config, 
"source-caches", "source_cache_specs",
+                source_specs = self._resolve_specs_for_project(
+                    project,
+                    source_remotes,
+                    ignore_project_source_remotes,
+                    self._global_source_cache_config,
+                    "source-caches",
+                    "source_cache_specs",
                 )
 
             # Advertize the per project remote specs publicly for the frontend
@@ -683,6 +688,64 @@ class Context:
     #                  Private methods                   #
     ######################################################
 
+    # _resolve_specs_for_project()
+    #
+    # Helper function to resolve which remote specs apply for a given project
+    #
+    # Args:
+    #    project: The project
+    #    cli_remotes: The remotes specified in the CLI
+    #    cli_override: Whether the CLI decided to override project suggestions
+    #    global_config: The global user configuration for this remote type
+    #    override_key: The key to lookup project overrides for this remote type
+    #    project_attribute: The Project attribute for project suggestions
+    #
+    # Returns:
+    #    The resolved remotes for this project.
+    #
+    def _resolve_specs_for_project(
+        self,
+        project: "Project",
+        cli_remotes: Iterable[RemoteSpec],
+        cli_override: bool,
+        global_config: _CacheConfig,
+        override_key: str,
+        project_attribute: str,
+    ) -> List[RemoteSpec]:
+
+        # Early return if the CLI is taking full control
+        if cli_override and cli_remotes:
+            return list(cli_remotes)
+
+        # Obtain the overrides
+        override_node = self.get_overrides(project.name)
+        override_config_node = override_node.get_mapping(override_key, 
default={})
+        override_config = _CacheConfig.new_from_node(override_config_node)
+
+        #
+        # Decide on what remotes to use from user config, if any
+        #
+        # Priority CLI -> Project overrides -> Global config
+        #
+        remotes: List[RemoteSpec]
+        if cli_remotes:
+            remotes = list(cli_remotes)
+        elif override_config.remote_specs:
+            remotes = override_config.remote_specs
+        else:
+            remotes = global_config.remote_specs
+
+        # If any of the configs have disabled project remotes, return now
+        #
+        if cli_override or override_config.override_projects or 
global_config.override_projects:
+            return remotes
+
+        # If there are any project recommendations, append them at the end
+        project_remotes = getattr(project, project_attribute)
+        remotes = list(utils._deduplicate(remotes + project_remotes))
+
+        return remotes
+
     # Force the resolved XDG variables into the environment,
     # this is so that they can be used directly to specify
     # preferred locations of things from user configuration
@@ -710,23 +773,3 @@ class Context:
             remote_execution_specs = None
 
         return pull_artifact_files, remote_execution_specs
-
-
-# _CacheConfig
-#
-# A convenience object for parsing artifact/source cache configurations
-#
-class _CacheConfig:
-    def __init__(self, override_projects: bool, remote_specs: 
List[RemoteSpec]):
-        self.override_projects: bool = override_projects
-        self.remote_specs: List[RemoteSpec] = remote_specs
-
-    @classmethod
-    def new_from_node(cls, node: MappingNode) -> "_CacheConfig":
-        node.validate_keys(["override-project-caches", "servers"])
-        servers = node.get_sequence("servers", default=[], 
allowed_types=[MappingNode])
-
-        override_projects: bool = node.get_bool("push", default=False)
-        remote_specs: List[RemoteSpec] = [RemoteSpec.new_from_node(node) for 
node in servers]
-
-        return cls(override_projects, remote_specs)
diff --git a/src/buildstream/_frontend/cli.py b/src/buildstream/_frontend/cli.py
index fbe16d3..8221936 100644
--- a/src/buildstream/_frontend/cli.py
+++ b/src/buildstream/_frontend/cli.py
@@ -5,9 +5,10 @@ from functools import partial
 import shutil
 import click
 from .. import _yaml
-from .._exceptions import BstError, LoadError, AppError
+from .._exceptions import BstError, LoadError, AppError, RemoteError
 from .complete import main_bashcomplete, complete_path, CompleteUnhandled
 from ..types import _CacheBuildTrees, _SchedulerErrorAction, 
_PipelineSelection, _HostMount, _Scope
+from .._remotespec import RemoteSpec, RemoteSpecPurpose
 from ..utils import UtilError
 
 
@@ -36,6 +37,25 @@ class FastEnumType(click.Choice):
         return self._enum(super().convert(value, param, ctx))
 
 
+class RemoteSpecType(click.ParamType):
+    name = "remote"
+
+    def __init__(self, purpose=RemoteSpecPurpose.ALL):
+        self.purpose = purpose
+
+    def convert(self, value, param, ctx):
+        spec = None
+        try:
+            spec = RemoteSpec.new_from_string(value, self.purpose)
+        except RemoteError as e:
+            self.fail("Failed to interpret remote: {}".format(e))
+
+        return spec
+
+    def __repr__(self):
+        return "REMOTE"
+
+
 ##################################################################
 #            Override of click's main entry point                #
 ##################################################################
@@ -412,11 +432,38 @@ def init(app, project_name, min_version, element_path, 
force, target_directory):
     help="The dependencies to build",
 )
 @click.option(
-    "--remote", "-r", default=None, help="The URL of the remote cache 
(defaults to the first configured cache)"
+    "--artifact-remote",
+    "artifact_remotes",
+    type=RemoteSpecType(),
+    multiple=True,
+    help="A remote for uploading and downloading artifacts",
+)
[email protected](
+    "--source-remote",
+    "source_remotes",
+    type=RemoteSpecType(),
+    multiple=True,
+    help="A remote for uploading and downloading cached sources",
+)
[email protected](
+    "--ignore-project-artifact-remotes",
+    is_flag=True,
+    help="Ignore remote artifact cache servers recommended by projects",
+)
[email protected](
+    "--ignore-project-source-remotes", is_flag=True, help="Ignore remote 
source cache servers recommended by projects"
 )
 @click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
-def build(app, elements, deps, remote):
+def build(
+    app,
+    elements,
+    deps,
+    artifact_remotes,
+    source_remotes,
+    ignore_project_artifact_remotes,
+    ignore_project_source_remotes,
+):
     """Build elements in a pipeline
 
     Specifying no elements will result in building the default targets
@@ -444,7 +491,15 @@ def build(app, elements, deps, remote):
             # Junction elements cannot be built, exclude them from default 
targets
             ignore_junction_targets = True
 
-        app.stream.build(elements, selection=deps, 
ignore_junction_targets=ignore_junction_targets, remote=remote)
+        app.stream.build(
+            elements,
+            selection=deps,
+            ignore_junction_targets=ignore_junction_targets,
+            artifact_remotes=artifact_remotes,
+            source_remotes=source_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
+        )
 
 
 ##################################################################
@@ -586,10 +641,18 @@ def show(app, elements, deps, except_, order, format_):
     ),
 )
 @click.option("--pull", "pull_", is_flag=True, help="Attempt to pull missing 
or incomplete artifacts")
[email protected](
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading artifacts",
+)
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("element", required=False, type=click.Path(readable=False))
 @click.argument("command", type=click.STRING, nargs=-1)
 @click.pass_obj
-def shell(app, element, mount, isolate, build_, cli_buildtree, pull_, command):
+def shell(app, element, mount, isolate, build_, cli_buildtree, pull_, remotes, 
ignore_project_remotes, command):
     """Run a command in the target element's sandbox environment
 
     When this command is executed from a workspace directory, the default
@@ -636,6 +699,8 @@ def shell(app, element, mount, isolate, build_, 
cli_buildtree, pull_, command):
                 command=command,
                 usebuildtree=cli_buildtree,
                 pull_=pull_,
+                artifact_remotes=remotes,
+                ignore_project_artifact_remotes=ignore_project_remotes,
             )
         except BstError as e:
             raise AppError("Error launching shell: {}".format(e), 
detail=e.detail, reason=e.reason) from e
@@ -681,11 +746,16 @@ def source():
     help="The dependencies to fetch",
 )
 @click.option(
-    "--remote", "-r", default=None, help="The URL of the remote source cache 
(defaults to the first configured cache)"
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading sources",
 )
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
-def source_fetch(app, elements, deps, except_, remote):
+def source_fetch(app, elements, deps, except_, remotes, 
ignore_project_remotes):
     """Fetch sources required to build the pipeline
 
     Specifying no elements will result in fetching the default targets
@@ -713,7 +783,13 @@ def source_fetch(app, elements, deps, except_, remote):
         if not elements:
             elements = app.project.get_default_targets()
 
-        app.stream.fetch(elements, selection=deps, except_targets=except_, 
remote=remote)
+        app.stream.fetch(
+            elements,
+            selection=deps,
+            except_targets=except_,
+            source_remotes=remotes,
+            ignore_project_source_remotes=ignore_project_remotes,
+        )
 
 
 ##################################################################
@@ -738,11 +814,16 @@ def source_fetch(app, elements, deps, except_, remote):
     help="The dependencies to push",
 )
 @click.option(
-    "--remote", "-r", default=None, help="The URL of the remote source cache 
(defaults to the first configured cache)"
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PUSH),
+    multiple=True,
+    help="A remote for uploading sources",
 )
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("elements", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
-def source_push(app, elements, deps, remote):
+def source_push(app, elements, deps, remotes, ignore_project_remotes):
     """Push sources required to build the pipeline
 
     Specifying no elements will result in pushing the sources of the default
@@ -765,7 +846,9 @@ def source_push(app, elements, deps, remote):
         if not elements:
             elements = app.project.get_default_targets()
 
-        app.stream.source_push(elements, selection=deps, remote=remote)
+        app.stream.source_push(
+            elements, selection=deps, source_remotes=remotes, 
ignore_project_source_remotes=ignore_project_remotes
+        )
 
 
 ##################################################################
@@ -865,9 +948,19 @@ def source_track(app, elements, deps, except_, 
cross_junctions):
     type=click.Path(file_okay=False),
     help="The directory to checkout the sources to",
 )
[email protected](
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading cached sources",
+)
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("element", required=False, type=click.Path(readable=False))
 @click.pass_obj
-def source_checkout(app, element, directory, force, deps, except_, tar, 
compression, build_scripts):
+def source_checkout(
+    app, element, directory, force, deps, except_, tar, compression, 
build_scripts, remotes, ignore_project_remotes
+):
     """Checkout sources of an element to the specified location
 
     When this command is executed from a workspace directory, the default
@@ -901,6 +994,8 @@ def source_checkout(app, element, directory, force, deps, 
except_, tar, compress
             tar=bool(tar),
             compression=compression,
             include_build_scripts=build_scripts,
+            source_remotes=remotes,
+            ignore_project_source_remotes=ignore_project_remotes,
         )
 
 
@@ -930,13 +1025,28 @@ def workspace():
     default=None,
     help="Only for use when a single Element is given: Set the directory to 
use to create the workspace",
 )
[email protected](
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading cached sources",
+)
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("elements", nargs=-1, type=click.Path(readable=False), 
required=True)
 @click.pass_obj
-def workspace_open(app, no_checkout, force, directory, elements):
+def workspace_open(app, no_checkout, force, directory, remotes, 
ignore_project_remotes, elements):
     """Open a workspace for manual source modification"""
 
     with app.initialized():
-        app.stream.workspace_open(elements, no_checkout=no_checkout, 
force=force, custom_dir=directory)
+        app.stream.workspace_open(
+            elements,
+            no_checkout=no_checkout,
+            force=force,
+            custom_dir=directory,
+            source_remotes=remotes,
+            ignore_project_source_remotes=ignore_project_remotes,
+        )
 
 
 ##################################################################
@@ -1124,9 +1234,19 @@ def artifact_show(app, deps, artifacts):
 @click.option(
     "--directory", default=None, type=click.Path(file_okay=False), help="The 
directory to checkout the artifact to"
 )
[email protected](
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading artifacts",
+)
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("target", required=False, type=click.Path(readable=False))
 @click.pass_obj
-def artifact_checkout(app, force, deps, integrate, hardlinks, tar, 
compression, pull_, directory, target):
+def artifact_checkout(
+    app, force, deps, integrate, hardlinks, tar, compression, pull_, 
directory, remotes, ignore_project_remotes, target
+):
     """Checkout contents of an artifact
 
     When this command is executed from a workspace directory, the default
@@ -1186,6 +1306,8 @@ def artifact_checkout(app, force, deps, integrate, 
hardlinks, tar, compression,
             pull=pull_,
             compression=compression,
             tar=bool(tar),
+            artifact_remotes=remotes,
+            ignore_project_artifact_remotes=ignore_project_remotes,
         )
 
 
@@ -1205,11 +1327,16 @@ def artifact_checkout(app, force, deps, integrate, 
hardlinks, tar, compression,
     help="The dependency artifacts to pull",
 )
 @click.option(
-    "--remote", "-r", default=None, help="The URL of the remote cache 
(defaults to the first configured cache)"
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PULL),
+    multiple=True,
+    help="A remote for downloading artifacts",
 )
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
-def artifact_pull(app, artifacts, deps, remote):
+def artifact_pull(app, deps, remotes, ignore_project_remotes, artifacts):
     """Pull a built artifact from the configured remote artifact cache.
 
     Specifying no elements will result in pulling the default targets
@@ -1240,7 +1367,13 @@ def artifact_pull(app, artifacts, deps, remote):
             # Junction elements cannot be pulled, exclude them from default 
targets
             ignore_junction_targets = True
 
-        app.stream.pull(artifacts, selection=deps, remote=remote, 
ignore_junction_targets=ignore_junction_targets)
+        app.stream.pull(
+            artifacts,
+            selection=deps,
+            ignore_junction_targets=ignore_junction_targets,
+            artifact_remotes=remotes,
+            ignore_project_artifact_remotes=ignore_project_remotes,
+        )
 
 
 ##################################################################
@@ -1259,12 +1392,17 @@ def artifact_pull(app, artifacts, deps, remote):
     help="The dependencies to push",
 )
 @click.option(
-    "--remote", "-r", default=None, help="The URL of the remote cache 
(defaults to the first configured cache)"
+    "--remote",
+    "remotes",
+    type=RemoteSpecType(RemoteSpecPurpose.PUSH),
+    multiple=True,
+    help="A remote for uploading artifacts",
 )
[email protected]("--ignore-project-remotes", is_flag=True, help="Ignore remotes 
recommended by projects")
 @click.argument("artifacts", nargs=-1, type=click.Path(readable=False))
 @click.pass_obj
-def artifact_push(app, artifacts, deps, remote):
-    """Push a built artifact to a remote artifact cache.
+def artifact_push(app, deps, remotes, ignore_project_remotes, artifacts):
+    """Push built artifacts to a remote artifact cache, possibly pulling them 
first.
 
     Specifying no elements will result in pushing the default targets
     of the project. If no default targets are configured, all project
@@ -1273,9 +1411,6 @@ def artifact_push(app, artifacts, deps, remote):
     When this command is executed from a workspace directory, the default
     is to push the workspace element.
 
-    The default destination is the highest priority configured cache. You can
-    override this by passing a different cache URL with the `--remote` flag.
-
     If bst has been configured to include build trees on artifact pulls,
     an attempt will be made to pull any required build trees to avoid the
     skipping of partial artifacts being pushed.
@@ -1296,7 +1431,13 @@ def artifact_push(app, artifacts, deps, remote):
             # Junction elements cannot be pushed, exclude them from default 
targets
             ignore_junction_targets = True
 
-        app.stream.push(artifacts, selection=deps, remote=remote, 
ignore_junction_targets=ignore_junction_targets)
+        app.stream.push(
+            artifacts,
+            selection=deps,
+            ignore_junction_targets=ignore_junction_targets,
+            artifact_remotes=remotes,
+            ignore_project_artifact_remotes=ignore_project_remotes,
+        )
 
 
 ################################################################
diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py
index 1728bd7..57233bf 100644
--- a/src/buildstream/_stream.py
+++ b/src/buildstream/_stream.py
@@ -29,7 +29,7 @@ import tarfile
 import tempfile
 from contextlib import contextmanager, suppress
 from collections import deque
-from typing import List, Tuple
+from typing import List, Tuple, Optional, Iterable, Callable
 
 from ._artifactelement import verify_artifact_ref, ArtifactElement
 from ._artifactproject import ArtifactProject
@@ -47,9 +47,9 @@ from ._scheduler import (
 from .element import Element
 from ._profile import Topics, PROFILER
 from ._project import ProjectRefStorage
-from ._remotespec import RemoteType, RemoteSpec
+from ._remotespec import RemoteSpec
 from ._state import State
-from .types import _KeyStrength, _PipelineSelection, _Scope
+from .types import _KeyStrength, _PipelineSelection, _Scope, _HostMount
 from .plugin import Plugin
 from . import utils, _yaml, _site, _pipeline
 
@@ -135,32 +135,46 @@ class Stream:
     # and `bst shell`.
     #
     # Args:
-    #    targets (list of str): Targets to pull
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    except_targets (list of str): Specified targets to except from 
fetching
-    #    connect_artifact_cache (bool): Whether to try to contact remote 
artifact caches
+    #    targets: Targets to pull
+    #    selection: The selection mode for the specified targets 
(_PipelineSelection)
+    #    except_targets: Specified targets to except from fetching
     #    load_artifacts (bool): Whether to load artifacts with artifact names
+    #    connect_artifact_cache: Whether to try to contact remote artifact 
caches
+    #    connect_source_cache: Whether to try to contact remote source caches
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
     # Returns:
     #    (list of Element): The selected elements
     def load_selection(
         self,
-        targets,
+        targets: Iterable[str],
         *,
-        selection=_PipelineSelection.NONE,
-        except_targets=(),
-        connect_artifact_cache=False,
-        load_artifacts=False,
+        selection: str = _PipelineSelection.NONE,
+        except_targets: Iterable[str] = (),
+        load_artifacts: bool = False,
+        connect_artifact_cache: bool = False,
+        connect_source_cache: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+        ignore_project_source_remotes: bool = False,
     ):
         with PROFILER.profile(Topics.LOAD_SELECTION, 
"_".join(t.replace(os.sep, "-") for t in targets)):
             target_objects = self._load(
                 targets,
                 selection=selection,
                 except_targets=except_targets,
-                connect_artifact_cache=connect_artifact_cache,
                 load_artifacts=load_artifacts,
+                connect_artifact_cache=connect_artifact_cache,
+                connect_source_cache=connect_source_cache,
+                artifact_remotes=artifact_remotes,
+                source_remotes=source_remotes,
+                
ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+                ignore_project_source_remotes=ignore_project_source_remotes,
             )
-
             return target_objects
 
     # shell()
@@ -168,32 +182,37 @@ class Stream:
     # Run a shell
     #
     # Args:
-    #    element (str): The name of the element to run the shell for
-    #    scope (_Scope): The scope for the shell (_Scope.BUILD or _Scope.RUN)
-    #    prompt (function): A function to return the prompt to display in the 
shell
-    #    mounts (list of HostMount): Additional directories to mount into the 
sandbox
+    #    target: The name of the element to run the shell for
+    #    scope: The scope for the shell, only BUILD or RUN are valid (_Scope)
+    #    prompt: A function to return the prompt to display in the shell
+    #    unique_id: (str): A unique_id to use to lookup an Element instance
+    #    mounts: Additional directories to mount into the sandbox
     #    isolate (bool): Whether to isolate the environment like we do in 
builds
     #    command (list): An argv to launch in the sandbox, or None
     #    usebuildtree (bool): Whether to use a buildtree as the source, given 
cli option
     #    pull_ (bool): Whether to attempt to pull missing or incomplete 
artifacts
-    #    unique_id: (str): Whether to use a unique_id to load an Element 
instance
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
     #
     # Returns:
     #    (int): The exit code of the launched shell
     #
     def shell(
         self,
-        element,
-        scope,
-        prompt,
+        target: str,
+        scope: int,
+        prompt: Callable[[Element], str],
         *,
-        mounts=None,
-        isolate=False,
-        command=None,
-        usebuildtree=False,
-        pull_=False,
-        unique_id=None,
+        unique_id: Optional[str] = None,
+        mounts: Optional[List[_HostMount]] = None,
+        isolate: bool = False,
+        command: Optional[List[str]] = None,
+        usebuildtree: bool = False,
+        pull_: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
     ):
+        element: Element
 
         # Load the Element via the unique_id if given
         if unique_id and element is None:
@@ -201,7 +220,13 @@ class Stream:
         else:
             selection = _PipelineSelection.BUILD if scope == _Scope.BUILD else 
_PipelineSelection.RUN
 
-            elements = self.load_selection((element,), selection=selection, 
connect_artifact_cache=True)
+            elements = self.load_selection(
+                (target,),
+                selection=selection,
+                connect_artifact_cache=True,
+                artifact_remotes=artifact_remotes,
+                
ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+            )
 
             # Get element to stage from `targets` list.
             # If scope is BUILD, it will not be in the `elements` list.
@@ -260,24 +285,40 @@ class Stream:
     # Builds (assembles) elements in the pipeline.
     #
     # Args:
-    #    targets (list of str): Targets to build
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    ignore_junction_targets (bool): Whether junction targets should be 
filtered out
-    #    remote (str): The URL of a specific remote server to push to, or None
+    #    targets: Targets to build
+    #    selection: The selection mode for the specified targets 
(_PipelineSelection)
+    #    ignore_junction_targets: Whether junction targets should be filtered 
out
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
     # If `remote` specified as None, then regular configuration will be used
     # to determine where to push artifacts to.
     #
-    def build(self, targets, *, selection=_PipelineSelection.PLAN, 
ignore_junction_targets=False, remote=None):
+    def build(
+        self,
+        targets: Iterable[str],
+        *,
+        selection: str = _PipelineSelection.PLAN,
+        ignore_junction_targets: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+        ignore_project_source_remotes: bool = False,
+    ):
 
         elements = self._load(
             targets,
             selection=selection,
             ignore_junction_targets=ignore_junction_targets,
+            dynamic_plan=True,
             connect_artifact_cache=True,
-            artifact_remote_url=remote,
             connect_source_cache=True,
-            dynamic_plan=True,
+            artifact_remotes=artifact_remotes,
+            source_remotes=source_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
         )
 
         # Assert that the elements are consistent
@@ -320,19 +361,29 @@ class Stream:
     # Fetches sources on the pipeline.
     #
     # Args:
-    #    targets (list of str): Targets to fetch
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    except_targets (list of str): Specified targets to except from 
fetching
-    #    remote (str|None): The URL of a specific remote server to pull from.
+    #    targets: Targets to fetch
+    #    selection: The selection mode for the specified targets 
(_PipelineSelection)
+    #    except_targets: Specified targets to except from fetching
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
-    def fetch(self, targets, *, selection=_PipelineSelection.PLAN, 
except_targets=None, remote=None):
+    def fetch(
+        self,
+        targets: Iterable[str],
+        *,
+        selection: str = _PipelineSelection.PLAN,
+        except_targets: Iterable[str] = (),
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_source_remotes: bool = False,
+    ):
 
         elements = self._load(
             targets,
             selection=selection,
             except_targets=except_targets,
             connect_source_cache=True,
-            source_remote_url=remote,
+            source_remotes=source_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
         )
 
         # Delegated to a shared fetch method
@@ -376,7 +427,8 @@ class Stream:
     # Args:
     #    targets (list of str): Targets to push
     #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    remote (str): The URL of a specific remote server to push to, or None
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
     # If `remote` specified as None, then regular configuration will be used
     # to determine where to push sources to.
@@ -385,10 +437,22 @@ class Stream:
     # a fetch queue will be created if user context and available remotes 
allow for
     # attempting to fetch them.
     #
-    def source_push(self, targets, *, selection=_PipelineSelection.NONE, 
remote=None):
+    def source_push(
+        self,
+        targets,
+        *,
+        selection=_PipelineSelection.NONE,
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_source_remotes: bool = False,
+    ):
 
         elements = self._load(
-            targets, selection=selection, connect_source_cache=True, 
source_remote_url=remote, load_artifacts=True,
+            targets,
+            selection=selection,
+            load_artifacts=True,
+            connect_source_cache=True,
+            source_remotes=source_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
         )
 
         if not self._sourcecache.has_push_remotes():
@@ -408,24 +472,31 @@ class Stream:
     # Pulls artifacts from remote artifact server(s)
     #
     # Args:
-    #    targets (list of str): Targets to pull
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    ignore_junction_targets (bool): Whether junction targets should be 
filtered out
-    #    remote (str): The URL of a specific remote server to pull from, or 
None
+    #    targets: Targets to pull
+    #    selection: The selection mode for the specified targets 
(_PipelineSelection)
+    #    ignore_junction_targets: Whether junction targets should be filtered 
out
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
     #
-    # If `remote` specified as None, then regular configuration will be used
-    # to determine where to pull artifacts from.
-    #
-    def pull(self, targets, *, selection=_PipelineSelection.NONE, 
ignore_junction_targets=False, remote=None):
+    def pull(
+        self,
+        targets: Iterable[str],
+        *,
+        selection: str = _PipelineSelection.NONE,
+        ignore_junction_targets: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+    ):
 
         elements = self._load(
             targets,
             selection=selection,
             ignore_junction_targets=ignore_junction_targets,
-            connect_artifact_cache=True,
-            artifact_remote_url=remote,
             load_artifacts=True,
             attempt_artifact_metadata=True,
+            connect_artifact_cache=True,
+            artifact_remotes=artifact_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
         )
 
         if not self._artifacts.has_fetch_remotes():
@@ -439,30 +510,38 @@ class Stream:
 
     # push()
     #
-    # Pulls artifacts to remote artifact server(s)
+    # Pushes artifacts to remote artifact server(s), pulling them first if 
necessary,
+    # possibly from different remotes.
     #
     # Args:
     #    targets (list of str): Targets to push
     #    selection (_PipelineSelection): The selection mode for the specified 
targets
     #    ignore_junction_targets (bool): Whether junction targets should be 
filtered out
-    #    remote (str): The URL of a specific remote server to push to, or None
-    #
-    # If `remote` specified as None, then regular configuration will be used
-    # to determine where to push artifacts to.
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
     #
     # If any of the given targets are missing their expected buildtree 
artifact,
     # a pull queue will be created if user context and available remotes allow 
for
     # attempting to fetch them.
     #
-    def push(self, targets, *, selection=_PipelineSelection.NONE, 
ignore_junction_targets=False, remote=None):
+    def push(
+        self,
+        targets: Iterable[str],
+        *,
+        selection: str = _PipelineSelection.NONE,
+        ignore_junction_targets: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+    ):
 
         elements = self._load(
             targets,
             selection=selection,
             ignore_junction_targets=ignore_junction_targets,
-            connect_artifact_cache=True,
-            artifact_remote_url=remote,
             load_artifacts=True,
+            connect_artifact_cache=True,
+            artifact_remotes=artifact_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
         )
 
         if not self._artifacts.has_push_remotes():
@@ -481,48 +560,54 @@ class Stream:
     # Checkout target artifact to the specified location
     #
     # Args:
-    #    target (str): Target to checkout
-    #    location (str): Location to checkout the artifact to
-    #    force (bool): Whether files can be overwritten if necessary
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    integrate (bool): Whether to run integration commands
-    #    hardlinks (bool): Whether checking out files hardlinked to
-    #                      their artifacts is acceptable
-    #    tar (bool): If true, a tarball from the artifact contents will
-    #                be created, otherwise the file tree of the artifact
-    #                will be placed at the given location. If true and
-    #                location is '-', the tarball will be dumped on the
-    #                standard output.
-    #    pull (bool): If true will attempt to pull any missing or incomplete
-    #                 artifacts.
+    #    target: Target to checkout
+    #    location: Location to checkout the artifact to
+    #    force: Whether files can be overwritten if necessary
+    #    selection: The selection mode for the specified targets 
(_PipelineSelection)
+    #    integrate: Whether to run integration commands
+    #    hardlinks: Whether checking out files hardlinked to
+    #               their artifacts is acceptable
+    #    tar: If true, a tarball from the artifact contents will
+    #         be created, otherwise the file tree of the artifact
+    #         will be placed at the given location. If true and
+    #         location is '-', the tarball will be dumped on the
+    #         standard output.
+    #    pull: If true will attempt to pull any missing or incomplete
+    #          artifacts.
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
     #
     def checkout(
         self,
-        target,
+        target: str,
         *,
-        location=None,
-        force=False,
-        selection=_PipelineSelection.RUN,
-        integrate=True,
-        hardlinks=False,
-        compression="",
-        pull=False,
-        tar=False,
+        location: Optional[str] = None,
+        force: bool = False,
+        selection: str = _PipelineSelection.RUN,
+        integrate: bool = True,
+        hardlinks: bool = False,
+        compression: str = "",
+        pull: bool = False,
+        tar: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
     ):
 
         elements = self._load(
             (target,),
             selection=selection,
-            connect_artifact_cache=True,
             load_artifacts=True,
             attempt_artifact_metadata=True,
+            connect_artifact_cache=True,
+            artifact_remotes=artifact_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
         )
 
         # self.targets contains a list of the loaded target objects
         # if we specify --deps build, Stream._load() will return a list
         # of build dependency objects, however, we need to prepare a sandbox
         # with the target (which has had its appropriate dependencies loaded)
-        target = self.targets[0]
+        element: Element = self.targets[0]
 
         self._check_location_writable(location, force=force, tar=tar)
 
@@ -541,10 +626,10 @@ class Stream:
                 _PipelineSelection.NONE: _Scope.NONE,
                 _PipelineSelection.ALL: _Scope.ALL,
             }
-            with target._prepare_sandbox(scope=scope[selection], 
integrate=integrate) as sandbox:
+            with element._prepare_sandbox(scope=scope[selection], 
integrate=integrate) as sandbox:
                 # Copy or move the sandbox to the target directory
                 virdir = sandbox.get_virtual_directory()
-                self._export_artifact(tar, location, compression, target, 
hardlinks, virdir)
+                self._export_artifact(tar, location, compression, element, 
hardlinks, virdir)
         except BstError as e:
             raise StreamError(
                 "Error while staging dependencies into a sandbox" ": 
'{}'".format(e), detail=e.detail, reason=e.reason
@@ -702,31 +787,42 @@ class Stream:
     # Checkout sources of the target element to the specified location
     #
     # Args:
-    #    target (str): The target element whose sources to checkout
-    #    location (str): Location to checkout the sources to
-    #    force (bool): Whether to overwrite existing directories/tarfiles
-    #    deps (str): The dependencies to checkout
-    #    except_targets ([str]): List of targets to except from staging
-    #    tar (bool): Whether to write a tarfile holding the checkout contents
-    #    compression (str): The type of compression for tarball
-    #    include_build_scripts (bool): Whether to include build scripts in the 
checkout
+    #    target: The target element whose sources to checkout
+    #    location: Location to checkout the sources to
+    #    force: Whether to overwrite existing directories/tarfiles
+    #    deps: The selection mode for the specified targets 
(_PipelineSelection)
+    #    except_targets: List of targets to except from staging
+    #    tar: Whether to write a tarfile holding the checkout contents
+    #    compression: The type of compression for tarball
+    #    include_build_scripts: Whether to include build scripts in the 
checkout
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
     def source_checkout(
         self,
-        target,
+        target: str,
         *,
-        location=None,
-        force=False,
-        deps="none",
-        except_targets=(),
-        tar=False,
-        compression=None,
-        include_build_scripts=False,
+        location: Optional[str] = None,
+        force: bool = False,
+        deps=_PipelineSelection.NONE,
+        except_targets: Iterable[str] = (),
+        tar: bool = False,
+        compression: Optional[str] = None,
+        include_build_scripts: bool = False,
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_source_remotes: bool = False,
     ):
 
         self._check_location_writable(location, force=force, tar=tar)
 
-        elements = self._load((target,), selection=deps, 
except_targets=except_targets)
+        elements = self._load(
+            (target,),
+            selection=deps,
+            except_targets=except_targets,
+            connect_source_cache=True,
+            source_remotes=source_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
+        )
 
         # Assert all sources are cached in the source dir
         self._fetch(elements)
@@ -751,11 +847,28 @@ class Stream:
     #    no_checkout (bool): Whether to skip checking out the source
     #    force (bool): Whether to ignore contents in an existing directory
     #    custom_dir (str): Custom location to create a workspace or false to 
use default location.
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
-    def workspace_open(self, targets, *, no_checkout, force, custom_dir):
+    def workspace_open(
+        self,
+        targets: Iterable[str],
+        *,
+        no_checkout: bool = False,
+        force: bool = False,
+        custom_dir: Optional[str] = None,
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_source_remotes: bool = False,
+    ):
         # This function is a little funny but it is trying to be as atomic as 
possible.
 
-        elements = self._load(targets, selection=_PipelineSelection.REDIRECT)
+        elements = self._load(
+            targets,
+            selection=_PipelineSelection.REDIRECT,
+            connect_source_cache=True,
+            source_remotes=source_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
+        )
 
         workspaces = self._context.get_workspaces()
 
@@ -1198,8 +1311,8 @@ class Stream:
     #
     def _load_elements_from_targets(
         self,
-        targets: List[str],
-        except_targets: List[str],
+        targets: Iterable[str],
+        except_targets: Iterable[str],
         *,
         rewritable: bool = False,
         valid_artifact_names: bool = False,
@@ -1352,36 +1465,40 @@ class Stream:
     # fully loaded.
     #
     # Args:
-    #    targets (list of str): Main targets to load
-    #    selection (_PipelineSelection): The selection mode for the specified 
targets
-    #    except_targets (list of str): Specified targets to except from 
fetching
+    #    targets: Main targets to load
+    #    selection: The selection mode for the specified targets  
(_PipelineSelection)
+    #    except_targets: Specified targets to except from fetching
     #    ignore_junction_targets (bool): Whether junction targets should be 
filtered out
-    #    connect_artifact_cache (bool): Whether to try to contact remote 
artifact caches
-    #    connect_source_cache (bool): Whether to try to contact remote source 
caches
-    #    artifact_remote_url (str): A remote url for initializing the artifacts
-    #    source_remote_url (str): A remote url for initializing source caches
-    #    dynamic_plan (bool): Require artifacts as needed during the build
-    #    load_artifacts (bool): Whether to load artifacts with artifact names
-    #    attempt_artifact_metadata (bool): Whether to attempt to download 
artifact metadata in
+    #    dynamic_plan: Require artifacts as needed during the build
+    #    load_artifacts: Whether to load artifacts with artifact names
+    #    attempt_artifact_metadata: Whether to attempt to download artifact 
metadata in
     #                                      order to deduce build dependencies 
and reload.
+    #    connect_artifact_cache: Whether to try to contact remote artifact 
caches
+    #    connect_source_cache: Whether to try to contact remote source caches
+    #    artifact_remotes: Artifact cache remotes specified on the commmand 
line
+    #    source_remotes: Source cache remotes specified on the commmand line
+    #    ignore_project_artifact_remotes: Whether to ignore artifact remotes 
specified by projects
+    #    ignore_project_source_remotes: Whether to ignore source remotes 
specified by projects
     #
     # Returns:
     #    (list of Element): The primary element selection
     #
     def _load(
         self,
-        targets,
+        targets: Iterable[str],
         *,
-        selection=_PipelineSelection.NONE,
-        except_targets=(),
-        ignore_junction_targets=False,
-        connect_artifact_cache=False,
-        connect_source_cache=False,
-        artifact_remote_url=None,
-        source_remote_url=None,
-        dynamic_plan=False,
-        load_artifacts=False,
-        attempt_artifact_metadata=False,
+        selection: str = _PipelineSelection.NONE,
+        except_targets: Iterable[str] = (),
+        ignore_junction_targets: bool = False,
+        dynamic_plan: bool = False,
+        load_artifacts: bool = False,
+        attempt_artifact_metadata: bool = False,
+        connect_artifact_cache: bool = False,
+        connect_source_cache: bool = False,
+        artifact_remotes: Iterable[RemoteSpec] = (),
+        source_remotes: Iterable[RemoteSpec] = (),
+        ignore_project_artifact_remotes: bool = False,
+        ignore_project_source_remotes: bool = False,
     ):
         elements, except_elements, artifacts = 
self._load_elements_from_targets(
             targets, except_targets, rewritable=False, 
valid_artifact_names=load_artifacts
@@ -1390,7 +1507,7 @@ class Stream:
         if artifacts:
             if selection in (_PipelineSelection.ALL, _PipelineSelection.RUN):
                 raise StreamError(
-                    "Error: '--deps {}' is not supported for artifact 
names".format(selection.value),
+                    "Error: '--deps {}' is not supported for artifact 
names".format(selection),
                     reason="deps-not-supported",
                 )
 
@@ -1400,20 +1517,15 @@ class Stream:
         # Hold on to the targets
         self.targets = elements
 
-        # FIXME: Instead of converting the URL to a RemoteSpec here, the CLI 
needs to
-        #        be enhanced to parse a fully qualified RemoteSpec (including 
certs etc)
-        #        from the command line, the CLI should be feeding the 
RemoteSpec through
-        #        the Stream API directly.
-        #
-        artifact_remote = None
-        if artifact_remote_url:
-            artifact_remote = RemoteSpec(RemoteType.ALL, artifact_remote_url, 
push=True)
-        source_remote = None
-        if source_remote_url:
-            source_remote = RemoteSpec(RemoteType.ALL, source_remote_url, 
push=True)
-
         # Connect to remote caches, this needs to be done before resolving 
element state
-        self._context.initialize_remotes(connect_artifact_cache, 
connect_source_cache, artifact_remote, source_remote)
+        self._context.initialize_remotes(
+            connect_artifact_cache,
+            connect_source_cache,
+            artifact_remotes,
+            source_remotes,
+            ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+            ignore_project_source_remotes=ignore_project_source_remotes,
+        )
 
         # In some cases we need to have an actualized artifact, with all of
         # it's metadata, such that we can derive attributes about the artifact
@@ -1444,7 +1556,12 @@ class Stream:
             # ensure those remotes are also initialized.
             #
             self._context.initialize_remotes(
-                connect_artifact_cache, connect_source_cache, artifact_remote, 
source_remote
+                connect_artifact_cache,
+                connect_source_cache,
+                artifact_remotes,
+                source_remotes,
+                
ignore_project_artifact_remotes=ignore_project_artifact_remotes,
+                ignore_project_source_remotes=ignore_project_source_remotes,
             )
 
         self.targets += artifacts
@@ -1806,7 +1923,7 @@ class Stream:
     #    (list): artifact names present in the targets
     #
     def _expand_and_classify_targets(
-        self, targets: List[str], valid_artifact_names: bool = False
+        self, targets: Iterable[str], valid_artifact_names: bool = False
     ) -> Tuple[List[str], List[str]]:
         initial_targets = []
         element_targets = []
diff --git a/tests/frontend/completions.py b/tests/frontend/completions.py
index 21ef3be..0632af0 100644
--- a/tests/frontend/completions.py
+++ b/tests/frontend/completions.py
@@ -128,7 +128,18 @@ def test_commands(cli, cmd, word_idx, expected):
         ("bst -", 1, MAIN_OPTIONS),
         ("bst --l", 1, ["--log-file "]),
         # Test that options of subcommands also complete
-        ("bst --no-colors build -", 3, ["--deps ", "-d ", "--remote ", "-r "]),
+        (
+            "bst --no-colors build -",
+            3,
+            [
+                "--deps ",
+                "-d ",
+                "--artifact-remote ",
+                "--source-remote ",
+                "--ignore-project-artifact-remotes ",
+                "--ignore-project-source-remotes ",
+            ],
+        ),
         # Test the behavior of completing after an option that has a
         # parameter that cannot be completed, vs an option that has
         # no parameter
diff --git a/tests/frontend/pull.py b/tests/frontend/pull.py
index 3e50b72..6c44da0 100644
--- a/tests/frontend/pull.py
+++ b/tests/frontend/pull.py
@@ -417,7 +417,7 @@ def test_build_remote_option(caplog, cli, tmpdir, 
datafiles):
         # Now check that a build with cli set as sharecli results in nothing 
being pulled,
         # as it doesn't have them cached and shareuser should be ignored. This
         # will however result in the artifacts being built and pushed to it
-        result = cli.run(project=project, args=["build", "--remote", 
sharecli.repo, "target.bst"])
+        result = cli.run(project=project, args=["build", "--artifact-remote", 
sharecli.repo, "target.bst"])
         result.assert_success()
         for element_name in all_elements:
             assert element_name not in result.get_pulled_elements()
@@ -426,7 +426,7 @@ def test_build_remote_option(caplog, cli, tmpdir, 
datafiles):
 
         # Now check that a clean build with cli set as sharecli should result 
in artifacts only
         # being pulled from it, as that was provided via the cli and is 
populated
-        result = cli.run(project=project, args=["build", "--remote", 
sharecli.repo, "target.bst"])
+        result = cli.run(project=project, args=["build", "--artifact-remote", 
sharecli.repo, "target.bst"])
         result.assert_success()
         for element_name in all_elements:
             assert cli.get_element_state(project, element_name) == "cached"

Reply via email to