This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1e20ef215a Rename `db upgrade` to `db migrate` and add `connections 
create-default-connections` (#32810)
1e20ef215a is described below

commit 1e20ef215ab8e688dc4331513fc5df34db443e84
Author: Akash Sharma <[email protected]>
AuthorDate: Tue Aug 1 02:21:54 2023 +0530

    Rename `db upgrade` to `db migrate` and add `connections 
create-default-connections` (#32810)
---
 .github/actions/migration_tests/action.yml         |  8 ++---
 Dockerfile                                         | 12 ++++---
 LOCAL_VIRTUALENV.rst                               |  2 +-
 airflow/cli/cli_config.py                          | 40 ++++++++++++++++++++--
 airflow/cli/cli_parser.py                          | 10 +++---
 airflow/cli/commands/connection_command.py         |  5 +++
 airflow/cli/commands/db_command.py                 | 20 ++++++++---
 airflow/models/crypto.py                           |  2 +-
 .../dag-serialization.rst                          |  2 +-
 .../production-deployment.rst                      |  8 ++---
 .../howto/docker-compose/docker-compose.yaml       |  2 +-
 docs/apache-airflow/howto/set-up-database.rst      |  2 +-
 .../howto/upgrading-from-1-10/index.rst            |  4 +--
 docs/apache-airflow/howto/usage-cli.rst            |  6 ++--
 .../installation/setting-up-the-database.rst       | 12 ++++---
 docs/apache-airflow/installation/upgrading.rst     | 22 ++++++++----
 docs/apache-airflow/migrations-ref.rst             |  2 +-
 docs/apache-airflow/start.rst                      |  2 +-
 docs/apache-airflow/tutorial/fundamentals.rst      |  2 +-
 docs/docker-stack/entrypoint.rst                   | 10 +++---
 docs/helm-chart/production-guide.rst               |  2 +-
 scripts/docker/entrypoint_prod.sh                  | 12 ++++---
 scripts/in_container/check_environment.sh          |  2 +-
 tests/cli/commands/test_connection_command.py      | 12 ++++++-
 tests/cli/commands/test_db_command.py              | 14 +++++---
 tests/cli/test_cli_parser.py                       | 20 +++++++++++
 26 files changed, 173 insertions(+), 62 deletions(-)

diff --git a/.github/actions/migration_tests/action.yml 
b/.github/actions/migration_tests/action.yml
index 81ea7e8383..cd1a424099 100644
--- a/.github/actions/migration_tests/action.yml
+++ b/.github/actions/migration_tests/action.yml
@@ -25,13 +25,13 @@ runs:
       shell: bash
       run: >
         breeze shell "airflow db reset --skip-init -y &&
-        airflow db upgrade --to-revision heads &&
+        airflow db migrate --to-revision heads &&
         airflow db downgrade -r e959f08ac86c -y &&
-        airflow db upgrade"
+        airflow db migrate"
     - name: "Test downgrade ORM ${{env.BACKEND}}"
       shell: bash
       run: >
         breeze shell "airflow db reset -y &&
-        airflow db upgrade &&
+        airflow db migrate &&
         airflow db downgrade -r e959f08ac86c -y &&
-        airflow db upgrade"
+        airflow db migrate"
diff --git a/Dockerfile b/Dockerfile
index 21c17afa71..3952736871 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -943,9 +943,9 @@ function wait_for_airflow_db() {
     run_check_with_retries "airflow db check"
 }
 
-function upgrade_db() {
-    # Runs airflow db upgrade
-    airflow db upgrade || true
+function migrate_db() {
+    # Runs airflow db migrate
+    airflow db migrate || true
 }
 
 function wait_for_celery_broker() {
@@ -1023,8 +1023,12 @@ if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
     wait_for_airflow_db
 fi
 
+if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] || [[ -n "${_AIRFLOW_DB_MIGRATE=}" ]] ; 
then
+    migrate_db
+fi
+
 if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] ; then
-    upgrade_db
+    >&2 echo "WARNING: Environment variable '_AIRFLOW_DB_UPGRADE' is 
deprecated please use '_AIRFLOW_DB_MIGRATE' instead"
 fi
 
 if [[ -n "${_AIRFLOW_WWW_USER_CREATE=}" ]] ; then
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index 017f042f25..753f3a9f29 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -213,7 +213,7 @@ You can solve the problem by:
 
     # if necessary, start with a clean AIRFLOW_HOME, e.g.
     # rm -rf ~/airflow
-    airflow db init
+    airflow db migrate
 
 4. Select the virtualenv you created as the project's default virtualenv in 
your IDE.
 
diff --git a/airflow/cli/cli_config.py b/airflow/cli/cli_config.py
index 02908ee8f0..4058eb4f86 100644
--- a/airflow/cli/cli_config.py
+++ b/airflow/cli/cli_config.py
@@ -1056,6 +1056,7 @@ class ActionCommand(NamedTuple):
     args: Iterable[Arg]
     description: str | None = None
     epilog: str | None = None
+    hide: bool = False
 
 
 class GroupCommand(NamedTuple):
@@ -1517,9 +1518,14 @@ VARIABLES_COMMANDS = (
 DB_COMMANDS = (
     ActionCommand(
         name="init",
-        help="Initialize the metadata database",
+        help=(
+            "Deprecated -- use `migrate` instead. "
+            "To create default connections use `connections 
create-default-connections`. "
+            "Initialize the metadata database"
+        ),
         func=lazy_load_command("airflow.cli.commands.db_command.initdb"),
         args=(ARG_VERBOSE,),
+        hide=True,
     ),
     ActionCommand(
         name="check-migrations",
@@ -1536,7 +1542,7 @@ DB_COMMANDS = (
     ),
     ActionCommand(
         name="upgrade",
-        help="Upgrade the metadata database to latest version",
+        help="Deprecated -- use `migrate` instead. Upgrade the metadata 
database to latest version",
         description=(
             "Upgrade the schema of the metadata database. "
             "To print but not execute commands, use option 
``--show-sql-only``. "
@@ -1554,6 +1560,29 @@ DB_COMMANDS = (
             ARG_DB_RESERIALIZE_DAGS,
             ARG_VERBOSE,
         ),
+        hide=True,
+    ),
+    ActionCommand(
+        name="migrate",
+        help="Migrates the metadata database to the latest version",
+        description=(
+            "Migrate the schema of the metadata database. "
+            "Create the database if it does not exist "
+            "To print but not execute commands, use option 
``--show-sql-only``. "
+            "If using options ``--from-revision`` or ``--from-version``, you 
must also use "
+            "``--show-sql-only``, because if actually *running* migrations, we 
should only "
+            "migrate from the *current* Alembic revision."
+        ),
+        func=lazy_load_command("airflow.cli.commands.db_command.migratedb"),
+        args=(
+            ARG_DB_REVISION__UPGRADE,
+            ARG_DB_VERSION__UPGRADE,
+            ARG_DB_SQL_ONLY,
+            ARG_DB_FROM_REVISION,
+            ARG_DB_FROM_VERSION,
+            ARG_DB_RESERIALIZE_DAGS,
+            ARG_VERBOSE,
+        ),
     ),
     ActionCommand(
         name="downgrade",
@@ -1693,6 +1722,13 @@ CONNECTIONS_COMMANDS = (
         
func=lazy_load_command("airflow.cli.commands.connection_command.connections_test"),
         args=(ARG_CONN_ID, ARG_VERBOSE),
     ),
+    ActionCommand(
+        name="create-default-connections",
+        help="Creates all the default connections from all the providers",
+        
func=lazy_load_command("airflow.cli.commands.connection_command.create_default_connections"),
+        # 
func=lazy_load_command("airflow.utils.db.create_default_connections"),
+        args=(ARG_VERBOSE,),
+    ),
 )
 PROVIDERS_COMMANDS = (
     ActionCommand(
diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py
index 1a8ec1b844..669c77ea40 100644
--- a/airflow/cli/cli_parser.py
+++ b/airflow/cli/cli_parser.py
@@ -137,9 +137,12 @@ def _sort_args(args: Iterable[Arg]) -> Iterable[Arg]:
 
 
 def _add_command(subparsers: argparse._SubParsersAction, sub: CLICommand) -> 
None:
-    sub_proc = subparsers.add_parser(
-        sub.name, help=sub.help, description=sub.description or sub.help, 
epilog=sub.epilog
-    )
+    if isinstance(sub, ActionCommand) and sub.hide:
+        sub_proc = subparsers.add_parser(sub.name, epilog=sub.epilog)
+    else:
+        sub_proc = subparsers.add_parser(
+            sub.name, help=sub.help, description=sub.description or sub.help, 
epilog=sub.epilog
+        )
     sub_proc.formatter_class = LazyRichHelpFormatter
 
     if isinstance(sub, GroupCommand):
@@ -160,6 +163,5 @@ def _add_group_command(sub: GroupCommand, sub_proc: 
argparse.ArgumentParser) ->
     subcommands = sub.subcommands
     sub_subparsers = sub_proc.add_subparsers(dest="subcommand", 
metavar="COMMAND")
     sub_subparsers.required = True
-
     for command in sorted(subcommands, key=lambda x: x.name):
         _add_command(sub_subparsers, command)
diff --git a/airflow/cli/commands/connection_command.py 
b/airflow/cli/commands/connection_command.py
index 5db123f004..02251a70ec 100644
--- a/airflow/cli/commands/connection_command.py
+++ b/airflow/cli/commands/connection_command.py
@@ -39,6 +39,7 @@ from airflow.providers_manager import ProvidersManager
 from airflow.secrets.local_filesystem import load_connections_dict
 from airflow.utils import cli as cli_utils, helpers, yaml
 from airflow.utils.cli import suppress_logs_and_warning
+from airflow.utils.db import create_default_connections as 
db_create_default_connections
 from airflow.utils.providers_configuration_loader import 
providers_configuration_loaded
 from airflow.utils.session import create_session
 
@@ -107,6 +108,10 @@ def _connection_to_dict(conn: Connection) -> dict:
     )
 
 
+def create_default_connections(args):
+    db_create_default_connections()
+
+
 def _format_connections(conns: list[Connection], file_format: str, 
serialization_format: str) -> str:
     if serialization_format == "json":
         serializer_func = lambda x: json.dumps(_connection_to_dict(x))
diff --git a/airflow/cli/commands/db_command.py 
b/airflow/cli/commands/db_command.py
index e703f6271e..390d940fcf 100644
--- a/airflow/cli/commands/db_command.py
+++ b/airflow/cli/commands/db_command.py
@@ -20,6 +20,7 @@ from __future__ import annotations
 import logging
 import os
 import textwrap
+import warnings
 from tempfile import NamedTemporaryFile
 
 from packaging.version import parse as parse_version
@@ -39,6 +40,11 @@ log = logging.getLogger(__name__)
 @providers_configuration_loaded
 def initdb(args):
     """Initializes the metadata database."""
+    warnings.warn(
+        "`db init` is deprecated.  Use `db migrate` instead to migrate the db 
and/or "
+        "create-default-connections to create the default connections",
+        DeprecationWarning,
+    )
     print("DB: " + repr(settings.engine.url))
     db.initdb()
     print("Initialization done")
@@ -53,10 +59,16 @@ def resetdb(args):
     db.resetdb(skip_init=args.skip_init)
 
 
-@cli_utils.action_cli(check_db=False)
-@providers_configuration_loaded
 def upgradedb(args):
     """Upgrades the metadata database."""
+    warnings.warn("`db updgrade` is deprecated. Use `db migrate` instead.", 
DeprecationWarning)
+    migratedb(args)
+
+
+@cli_utils.action_cli(check_db=False)
+@providers_configuration_loaded
+def migratedb(args):
+    """Migrates the metadata database."""
     print("DB: " + repr(settings.engine.url))
     if args.to_revision and args.to_version:
         raise SystemExit("Cannot supply both `--to-revision` and 
`--to-version`.")
@@ -85,7 +97,7 @@ def upgradedb(args):
         to_revision = args.to_revision
 
     if not args.show_sql_only:
-        print("Performing upgrade with database " + repr(settings.engine.url))
+        print("Performing upgrade to the metadata database " + 
repr(settings.engine.url))
     else:
         print("Generating sql for upgrade -- upgrade commands will *not* be 
submitted.")
 
@@ -96,7 +108,7 @@ def upgradedb(args):
         reserialize_dags=args.reserialize_dags,
     )
     if not args.show_sql_only:
-        print("Upgrades done")
+        print("Database migrating done!")
 
 
 @cli_utils.action_cli(check_db=False)
diff --git a/airflow/models/crypto.py b/airflow/models/crypto.py
index 97495ea90e..74c2db042f 100644
--- a/airflow/models/crypto.py
+++ b/airflow/models/crypto.py
@@ -42,7 +42,7 @@ class NullFernet:
 
     The purpose of this is to make the rest of the code not have to know the
     difference, and to only display the message once, not 20 times when
-    `airflow db init` is ran.
+    `airflow db migrate` is run.
     """
 
     is_encrypted = False
diff --git 
a/docs/apache-airflow/administration-and-deployment/dag-serialization.rst 
b/docs/apache-airflow/administration-and-deployment/dag-serialization.rst
index 9c4550544c..385db66123 100644
--- a/docs/apache-airflow/administration-and-deployment/dag-serialization.rst
+++ b/docs/apache-airflow/administration-and-deployment/dag-serialization.rst
@@ -88,7 +88,7 @@ Add the following settings in ``airflow.cfg``:
 *   ``compress_serialized_dags``: This option controls whether to compress the 
Serialized DAG to the Database.
     It is useful when there are very large DAGs in your cluster. When 
``True``, this will disable the DAG dependencies view.
 
-If you are updating Airflow from <1.10.7, please do not forget to run 
``airflow db upgrade``.
+If you are updating Airflow from <1.10.7, please do not forget to run 
``airflow db migrate``.
 
 
 Limitations
diff --git 
a/docs/apache-airflow/administration-and-deployment/production-deployment.rst 
b/docs/apache-airflow/administration-and-deployment/production-deployment.rst
index bff0d785a0..6f414ed194 100644
--- 
a/docs/apache-airflow/administration-and-deployment/production-deployment.rst
+++ 
b/docs/apache-airflow/administration-and-deployment/production-deployment.rst
@@ -43,14 +43,14 @@ Once that is done, you can run -
 
 .. code-block:: bash
 
-    airflow db upgrade
+    airflow db migrate
 
-``upgrade`` keeps track of migrations already applied, so it's safe to run as 
often as you need.
+``migrate`` keeps track of migrations already applied, so it's safe to run as 
often as you need.
 
 .. note::
 
-    Do not use ``airflow db init`` as it can create a lot of default 
connections, charts, etc. which are not
-    required in production DB.
+    Prior to Airflow version 2.7.0, ``airflow db upgrade`` was used to apply 
migrations,
+    however, it has been deprecated in favor of ``airflow db migrate``.
 
 
 Multi-Node Cluster
diff --git a/docs/apache-airflow/howto/docker-compose/docker-compose.yaml 
b/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
index 86b319b233..2fcdbaa407 100644
--- a/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
+++ b/docs/apache-airflow/howto/docker-compose/docker-compose.yaml
@@ -252,7 +252,7 @@ services:
     # yamllint enable rule:line-length
     environment:
       <<: *airflow-common-env
-      _AIRFLOW_DB_UPGRADE: 'true'
+      _AIRFLOW_DB_MIGRATE: 'true'
       _AIRFLOW_WWW_USER_CREATE: 'true'
       _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
       _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
diff --git a/docs/apache-airflow/howto/set-up-database.rst 
b/docs/apache-airflow/howto/set-up-database.rst
index b9041c8e28..870c9edfb2 100644
--- a/docs/apache-airflow/howto/set-up-database.rst
+++ b/docs/apache-airflow/howto/set-up-database.rst
@@ -378,7 +378,7 @@ After configuring the database and connecting to it in 
Airflow configuration, yo
 
 .. code-block:: bash
 
-    airflow db init
+    airflow db migrate
 
 What's next?
 ------------
diff --git a/docs/apache-airflow/howto/upgrading-from-1-10/index.rst 
b/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
index f1956d89b9..96dca4e433 100644
--- a/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
+++ b/docs/apache-airflow/howto/upgrading-from-1-10/index.rst
@@ -280,7 +280,7 @@ now represented as ``can_read`` on ``DAG:example_dag_id``.
 There is a special view called ``DAGs`` (it was called ``all_dags`` in 
versions 1.10.x) which allows the role to access
 all the DAGs. The default ``Admin``, ``Viewer``, ``User``, ``Op`` roles can 
all access the ``DAGs`` view.
 
-*As part of running ``airflow db upgrade``, existing permissions will be 
migrated for you.*
+*As part of running ``airflow db migrate``, existing permissions will be 
migrated for you.*
 
 When DAGs are initialized with the ``access_control`` variable set, any usage 
of the old permission names will automatically be updated in the database, so 
this won't be a breaking change. A DeprecationWarning will be raised.
 
@@ -541,7 +541,7 @@ At this point, just follow the standard Airflow version 
upgrade process:
   * Please note that you may have to uninstall the backport providers before 
installing the new providers, if you are installing using pip. This would not 
apply if you are installing using an Airflow Docker image with a set of 
specified requirements, where the change automatically gets a fresh set of 
modules.
   * You can read more about providers at :doc:`apache-airflow-providers:index`.
 
-* Upgrade the Airflow meta database using ``airflow db upgrade``.
+* Migrate the Airflow meta database using ``airflow db migrate``.
 
   * The above command may be unfamiliar, since it is shown using the Airflow 
2.0 CLI syntax.
   * The database upgrade may modify the database schema as needed and also map 
the existing data to be compliant with the update database schema.
diff --git a/docs/apache-airflow/howto/usage-cli.rst 
b/docs/apache-airflow/howto/usage-cli.rst
index ec403350f2..db8c9faa97 100644
--- a/docs/apache-airflow/howto/usage-cli.rst
+++ b/docs/apache-airflow/howto/usage-cli.rst
@@ -257,17 +257,17 @@ Considerations for backfillable DAGs
 
 Not all DAGs are designed for use with Airflow's backfill command.  But for 
those which are, special care is warranted.  If you delete DAG runs, and if you 
run backfill over a range of dates that includes the deleted DAG runs, those 
runs will be recreated and run again.  For this reason, if you have DAGs that 
fall into this category you may want to refrain from deleting DAG runs and only 
clean other large tables such as task instance and log etc.
 
-.. _cli-db-upgrade:
+.. _cli-db-migrate:
 
 Upgrading Airflow
 -----------------
 
-Run ``airflow db upgrade --help`` for usage details.
+Run ``airflow db migrate --help`` for usage details.
 
 Running migrations manually
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-If desired, you can generate the sql statements for an upgrade and apply each 
upgrade migration manually, one at a time.  To do so you may use either the 
``--range`` (for Airflow version) or ``--revision-range`` (for Alembic 
revision) option with ``db upgrade``.  Do *not* skip running the Alembic 
revision id update commands; this is how Airflow will know where you are 
upgrading from the next time you need to.  See :doc:`/migrations-ref` for a 
mapping between revision and version.
+If desired, you can generate the sql statements for an upgrade and apply each 
upgrade migration manually, one at a time.  To do so you may use either the 
``--range`` (for Airflow version) or ``--revision-range`` (for Alembic 
revision) option with ``db migrate``.  Do *not* skip running the Alembic 
revision id update commands; this is how Airflow will know where you are 
upgrading from the next time you need to.  See :doc:`/migrations-ref` for a 
mapping between revision and version.
 
 
 .. _cli-db-downgrade:
diff --git a/docs/apache-airflow/installation/setting-up-the-database.rst 
b/docs/apache-airflow/installation/setting-up-the-database.rst
index 3823db7f9c..6c595563ca 100644
--- a/docs/apache-airflow/installation/setting-up-the-database.rst
+++ b/docs/apache-airflow/installation/setting-up-the-database.rst
@@ -22,11 +22,15 @@ Apache Airflow™ requires a database. If you're just 
experimenting and learning
 default SQLite option. If you don't want to use SQLite, then take a look at
 :doc:`/howto/set-up-database` to setup a different database.
 
-Usually, you need to run ``airflow db upgrade`` in order to create the 
database schema that Airflow can use.
+Usually, you need to run ``airflow db migrate`` in order to create the 
database schema if it does not exist
+or migrate to the latest version if it does. You should make sure that Airflow 
components are
+not running while the database migration is being executed.
+
+.. note::
+
+    Prior to Airflow version 2.7.0, ``airflow db upgrade`` was used to apply 
migrations,
+    however, it has been deprecated in favor of ``airflow db migrate``.
 
-Similarly, upgrading Airflow usually requires an extra step of upgrading the 
database. This is done
-with ``airflow db upgrade`` CLI command. You should make sure that Airflow 
components are
-not running while the upgrade is being executed.
 
 In some deployments, such as :doc:`helm-chart:index`, both initializing and 
running the database migration
 is executed automatically when Airflow is upgraded.
diff --git a/docs/apache-airflow/installation/upgrading.rst 
b/docs/apache-airflow/installation/upgrading.rst
index 715b3f5e84..6328e887b0 100644
--- a/docs/apache-airflow/installation/upgrading.rst
+++ b/docs/apache-airflow/installation/upgrading.rst
@@ -21,8 +21,8 @@ Upgrading Airflow™ to a newer version
 Why you need to upgrade
 =======================
 
-Newer Airflow versions can contain database migrations so you must run 
``airflow db upgrade``
-to upgrade your database with the schema changes in the Airflow version you 
are upgrading to.
+Newer Airflow versions can contain database migrations so you must run 
``airflow db migrate``
+to migrate your database with the schema changes in the Airflow version you 
are upgrading to.
 Don't worry, it's safe to run even if there are no migrations to perform.
 
 Upgrade preparation - make a backup of DB
@@ -41,7 +41,7 @@ When you need to upgrade
 ========================
 
 If you have a custom deployment based on virtualenv or Docker Containers, you 
usually need to run
-the DB upgrade manually as part of the upgrade process.
+the DB migrate manually as part of the upgrade process.
 
 In some cases the upgrade happens automatically - it depends if in your 
deployment, the upgrade is
 built-in as post-install action. For example when you are using 
:doc:`helm-chart:index` with
@@ -52,7 +52,7 @@ when you choose to upgrade airflow via their UI.
 How to upgrade
 ==============
 
-In order to manually upgrade the database you should run the ``airflow db 
upgrade`` command in your
+In order to manually migrate the database you should run the ``airflow db 
migrate`` command in your
 environment. It can be run either in your virtual environment or in the 
containers that give
 you access to Airflow ``CLI`` :doc:`/howto/usage-cli` and the database.
 
@@ -63,8 +63,16 @@ to get the SQL statements that would be executed. This 
feature is supported in P
 from Airflow 2.0.0 onward and in MSSQL from Airflow 2.2.0 onward.
 
 Sample usage:
-   ``airflow db upgrade -r "2.0.0:2.2.0"``
-   ``airflow db upgrade --revision-range "e959f08ac86c:142555e44c17"``
+   ``airflow db migrate -r "2.0.0:2.2.0"``
+   ``airflow db migrate --revision-range "e959f08ac86c:142555e44c17"``
+
+But for Airflow version 2.7.0 or greater, please use
+    ``airflow db migrate -r "2.0.0:2.2.0"``
+    ``airflow db migrate --revision-range "e959f08ac86c:142555e44c17"``
+
+.. note::
+    ``airflow db upgrade`` has been replaced by ``airflow db migrate`` since 
Airflow version 2.7.0
+    and former has been deprecated.
 
 
 Handling migration problems
@@ -208,7 +216,7 @@ Airflow version.
 Post-upgrade warnings
 .....................
 
-Typically you just need to successfully run ``airflow db upgrade`` command and 
this is all. However, in
+Typically you just need to successfully run ``airflow db migrate`` command and 
this is all. However, in
 some cases, the migration might find some old, stale and probably wrong data 
in your database and moves it
 aside to a separate table. In this case you might get warning in your 
webserver UI about the data found.
 
diff --git a/docs/apache-airflow/migrations-ref.rst 
b/docs/apache-airflow/migrations-ref.rst
index c0f0443a9d..d218038abd 100644
--- a/docs/apache-airflow/migrations-ref.rst
+++ b/docs/apache-airflow/migrations-ref.rst
@@ -18,7 +18,7 @@
 Reference for Database Migrations
 '''''''''''''''''''''''''''''''''
 
-Here's the list of all the Database Migrations that are executed via when you 
run ``airflow db upgrade``.
+Here's the list of all the Database Migrations that are executed via when you 
run ``airflow db migrate``.
 
 .. warning::
 
diff --git a/docs/apache-airflow/start.rst b/docs/apache-airflow/start.rst
index 3d093c69e2..5ed97ae4d7 100644
--- a/docs/apache-airflow/start.rst
+++ b/docs/apache-airflow/start.rst
@@ -119,7 +119,7 @@ the all-in-one ``standalone`` command, you can instead run:
 
 .. code-block:: bash
 
-    airflow db init
+    airflow db migrate
 
     airflow users create \
         --username admin \
diff --git a/docs/apache-airflow/tutorial/fundamentals.rst 
b/docs/apache-airflow/tutorial/fundamentals.rst
index 695d748c00..d7d3acf2e8 100644
--- a/docs/apache-airflow/tutorial/fundamentals.rst
+++ b/docs/apache-airflow/tutorial/fundamentals.rst
@@ -298,7 +298,7 @@ Let's run a few commands to validate this script further.
 .. code-block:: bash
 
     # initialize the database tables
-    airflow db init
+    airflow db migrate
 
     # print the list of active DAGs
     airflow dags list
diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst
index 8b741da1b7..7ae74789b9 100644
--- a/docs/docker-stack/entrypoint.rst
+++ b/docs/docker-stack/entrypoint.rst
@@ -315,8 +315,8 @@ either as maintenance operations on the database or should 
be embedded in the cu
 Upgrading Airflow DB
 ....................
 
-If you set :envvar:`_AIRFLOW_DB_UPGRADE` variable to a non-empty value, the 
entrypoint will run
-the ``airflow db upgrade`` command right after verifying the connection. You 
can also use this
+If you set :envvar:`_AIRFLOW_DB_MIGRATE` variable to a non-empty value, the 
entrypoint will run
+the ``airflow db migrate`` command right after verifying the connection. You 
can also use this
 when you are running airflow with internal SQLite database (default) to 
upgrade the db and create
 admin users at entrypoint, so that you can start the webserver immediately. 
Note - using SQLite is
 intended only for testing purpose, never use SQLite in production as it has 
severe limitations when it
@@ -360,7 +360,7 @@ database and creating an ``admin/admin`` Admin user with 
the following command:
 .. code-block:: bash
 
   docker run -it -p 8080:8080 \
-    --env "_AIRFLOW_DB_UPGRADE=true" \
+    --env "_AIRFLOW_DB_MIGRATE=true" \
     --env "_AIRFLOW_WWW_USER_CREATE=true" \
     --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \
       apache/airflow:2.7.0.dev0-python3.8 webserver
@@ -369,7 +369,7 @@ database and creating an ``admin/admin`` Admin user with 
the following command:
 .. code-block:: bash
 
   docker run -it -p 8080:8080 \
-    --env "_AIRFLOW_DB_UPGRADE=true" \
+    --env "_AIRFLOW_DB_MIGRATE=true" \
     --env "_AIRFLOW_WWW_USER_CREATE=true" \
     --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \
       apache/airflow:2.7.0.dev0-python3.8 webserver
@@ -409,7 +409,7 @@ Example:
 
   docker run -it -p 8080:8080 \
     --env "_PIP_ADDITIONAL_REQUIREMENTS=lxml==4.6.3 charset-normalizer==1.4.1" 
\
-    --env "_AIRFLOW_DB_UPGRADE=true" \
+    --env "_AIRFLOW_DB_MIGRATE=true" \
     --env "_AIRFLOW_WWW_USER_CREATE=true" \
     --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \
       apache/airflow:2.7.0.dev0-python3.8 webserver
diff --git a/docs/helm-chart/production-guide.rst 
b/docs/helm-chart/production-guide.rst
index 19a4d43689..1f5a9032d8 100644
--- a/docs/helm-chart/production-guide.rst
+++ b/docs/helm-chart/production-guide.rst
@@ -31,7 +31,7 @@ can be found at :doc:`Set up a Database Backend 
<apache-airflow:howto/set-up-dat
 
 .. note::
 
-    When using the helm chart, you do not need to initialize the db with 
``airflow db init``
+    When using the helm chart, you do not need to initialize the db with 
``airflow db migrate``
     as outlined in :doc:`Set up a Database Backend 
<apache-airflow:howto/set-up-database>`.
 
 First disable Postgres so the chart won't deploy its own Postgres container:
diff --git a/scripts/docker/entrypoint_prod.sh 
b/scripts/docker/entrypoint_prod.sh
index 1ba24ec20e..fb50e1ddb0 100755
--- a/scripts/docker/entrypoint_prod.sh
+++ b/scripts/docker/entrypoint_prod.sh
@@ -195,9 +195,9 @@ function wait_for_airflow_db() {
     run_check_with_retries "airflow db check"
 }
 
-function upgrade_db() {
-    # Runs airflow db upgrade
-    airflow db upgrade || true
+function migrate_db() {
+    # Runs airflow db migrate
+    airflow db migrate || true
 }
 
 function wait_for_celery_broker() {
@@ -283,8 +283,12 @@ if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then
     wait_for_airflow_db
 fi
 
+if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] || [[ -n "${_AIRFLOW_DB_MIGRATE=}" ]] ; 
then
+    migrate_db
+fi
+
 if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] ; then
-    upgrade_db
+    >&2 echo "WARNING: Environment variable '_AIRFLOW_DB_UPGRADE' is 
deprecated please use '_AIRFLOW_DB_MIGRATE' instead"
 fi
 
 if [[ -n "${_AIRFLOW_WWW_USER_CREATE=}" ]] ; then
diff --git a/scripts/in_container/check_environment.sh 
b/scripts/in_container/check_environment.sh
index a81145a390..b23dc18dd2 100755
--- a/scripts/in_container/check_environment.sh
+++ b/scripts/in_container/check_environment.sh
@@ -115,7 +115,7 @@ function startairflow_if_requested() {
 
         . "$( dirname "${BASH_SOURCE[0]}" )/configure_environment.sh"
 
-        airflow db init
+        airflow db migrate
         airflow users create -u admin -p admin -f Thor -l Adminstra -r Admin 
-e [email protected]
 
         . "$( dirname "${BASH_SOURCE[0]}" )/run_init_script.sh"
diff --git a/tests/cli/commands/test_connection_command.py 
b/tests/cli/commands/test_connection_command.py
index 0c48cd099b..cad44fce81 100644
--- a/tests/cli/commands/test_connection_command.py
+++ b/tests/cli/commands/test_connection_command.py
@@ -27,7 +27,7 @@ from unittest import mock
 
 import pytest
 
-from airflow.cli import cli_parser
+from airflow.cli import cli_config, cli_parser
 from airflow.cli.commands import connection_command
 from airflow.exceptions import AirflowException
 from airflow.models import Connection
@@ -974,3 +974,13 @@ class TestCliTestConnections:
             "Testing connections is disabled in Airflow configuration. Contact 
your deployment admin to "
             "enable it.\n\n"
         ) in stdout.getvalue()
+
+
+class TestCliCreateDefaultConnection:
+    
@mock.patch("airflow.cli.commands.connection_command.db_create_default_connections")
+    def test_cli_create_default_connections(self, 
mock_db_create_default_connections):
+        create_default_connection_fnc = dict(
+            (db_command.name, db_command.func) for db_command in 
cli_config.CONNECTIONS_COMMANDS
+        )["create-default-connections"]
+        create_default_connection_fnc(())
+        mock_db_create_default_connections.assert_called_once()
diff --git a/tests/cli/commands/test_db_command.py 
b/tests/cli/commands/test_db_command.py
index edc691e7cd..e928dc66bf 100644
--- a/tests/cli/commands/test_db_command.py
+++ b/tests/cli/commands/test_db_command.py
@@ -37,8 +37,8 @@ class TestCliDb:
 
     @mock.patch("airflow.cli.commands.db_command.db.initdb")
     def test_cli_initdb(self, mock_initdb):
-        db_command.initdb(self.parser.parse_args(["db", "init"]))
-
+        with pytest.warns(expected_warning=DeprecationWarning, match="`db 
init` is deprecated"):
+            db_command.initdb(self.parser.parse_args(["db", "init"]))
         mock_initdb.assert_called_once_with()
 
     @mock.patch("airflow.cli.commands.db_command.db.resetdb")
@@ -121,9 +121,15 @@ class TestCliDb:
         ],
     )
     @mock.patch("airflow.cli.commands.db_command.db.upgradedb")
-    def test_cli_upgrade_failure(self, mock_upgradedb, args, pattern):
+    def test_cli_sync_failure(self, mock_upgradedb, args, pattern):
         with pytest.raises(SystemExit, match=pattern):
-            db_command.upgradedb(self.parser.parse_args(["db", "upgrade", 
*args]))
+            db_command.migratedb(self.parser.parse_args(["db", "upgrade", 
*args]))
+
+    @mock.patch("airflow.cli.commands.db_command.migratedb")
+    def test_cli_upgrade(self, mock_migratedb):
+        with pytest.warns(expected_warning=DeprecationWarning, match="`db 
updgrade` is deprecated"):
+            db_command.upgradedb(self.parser.parse_args(["db", "upgrade"]))
+        mock_migratedb.assert_called_once()
 
     @mock.patch("airflow.cli.commands.db_command.execute_interactive")
     @mock.patch("airflow.cli.commands.db_command.NamedTemporaryFile")
diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py
index eaec31db99..28e0eef8c3 100644
--- a/tests/cli/test_cli_parser.py
+++ b/tests/cli/test_cli_parser.py
@@ -34,6 +34,7 @@ from unittest.mock import patch
 import pytest
 
 from airflow.cli import cli_config, cli_parser
+from airflow.cli.cli_config import ActionCommand, lazy_load_command
 from airflow.configuration import AIRFLOW_HOME
 from tests.test_utils.config import conf_vars
 
@@ -311,6 +312,25 @@ class TestCli:
             "(choose from 'csv'), see help above.\n"
         )
 
+    @pytest.mark.parametrize(
+        "action_cmd",
+        [
+            ActionCommand(name="name", help="help", 
func=lazy_load_command(""), args=(), hide=True),
+            ActionCommand(name="name", help="help", 
func=lazy_load_command(""), args=(), hide=False),
+        ],
+    )
+    @patch("argparse._SubParsersAction")
+    def test_add_command_with_hide(self, mock_subparser_actions, action_cmd):
+        cli_parser._add_command(mock_subparser_actions, action_cmd)
+        if action_cmd.hide:
+            mock_subparser_actions.add_parser.assert_called_once_with(
+                action_cmd.name, epilog=action_cmd.epilog
+            )
+        else:
+            mock_subparser_actions.add_parser.assert_called_once_with(
+                action_cmd.name, help=action_cmd.help, 
description=action_cmd.help, epilog=action_cmd.epilog
+            )
+
 
 # We need to run it from sources with PYTHONPATH, not command line tool,
 # because we need to make sure that we have providers configured from source 
provider.yaml files

Reply via email to