This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new e23d53fe338 Remove old provider references and replace "new" with just 
providers (#46810)
e23d53fe338 is described below

commit e23d53fe3383f162ea402a4da266c813013154de
Author: Jarek Potiuk <[email protected]>
AuthorDate: Mon Feb 17 14:23:47 2025 +0100

    Remove old provider references and replace "new" with just providers 
(#46810)
    
    There was a lot of code and references to old provider ways of handling
    old structure of providers. Once all providers have been moved, we can
    now remove that old code and rename old the "new_providers" references
    to just "providers"
---
 .github/workflows/test-provider-packages.yml       |   2 +-
 .pre-commit-config.yaml                            |   2 +-
 airflow/new_provider.yaml.schema.json              | 519 ---------------------
 airflow/provider.yaml.schema.json                  |  39 --
 contributing-docs/11_provider_packages.rst         |   6 +-
 dev/breeze/README.md                               |   2 +-
 dev/breeze/pyproject.toml                          |   1 +
 .../commands/release_management_commands.py        |  37 +-
 .../prepare_providers/provider_documentation.py    | 208 ++-------
 .../prepare_providers/provider_packages.py         |  47 +-
 dev/breeze/src/airflow_breeze/utils/packages.py    | 323 +++++--------
 dev/breeze/src/airflow_breeze/utils/path_utils.py  |  11 +-
 .../airflow_breeze/utils/publish_docs_helpers.py   |  12 +-
 dev/breeze/src/airflow_breeze/utils/run_tests.py   |   5 +-
 .../src/airflow_breeze/utils/selective_checks.py   |  49 +-
 dev/breeze/tests/test_packages.py                  | 106 +----
 dev/breeze/tests/test_provider_documentation.py    |  14 +-
 .../tests/test_pytest_args_for_test_types.py       |  17 +-
 dev/breeze/tests/test_selective_checks.py          |   8 +-
 dev/breeze/uv.lock                                 |  11 +
 docs/.gitignore                                    |  98 +---
 docs/conf.py                                       |  18 +-
 docs/exts/docs_build/docs_builder.py               |  39 +-
 docs/exts/provider_yaml_utils.py                   |  96 +---
 providers/MANAGING_PROVIDERS_LIFECYCLE.rst         | 175 +++----
 providers/fab/docs/changelog.rst                   |  52 ++-
 scripts/ci/pre_commit/check_provider_docs.py       |   4 +-
 .../pre_commit/check_pyproject_toml_consistency.py |   7 +-
 scripts/ci/pre_commit/common_precommit_utils.py    |  11 +-
 .../ci/pre_commit/generate_volumes_for_sources.py  |   4 +-
 scripts/ci/pre_commit/mypy_folder.py               |  13 +-
 .../ci/pre_commit/update_providers_build_files.py  |   5 +-
 .../ci/pre_commit/update_providers_dependencies.py |   7 +-
 .../in_container/run_provider_yaml_files_check.py  |  22 +-
 tests/always/test_project_structure.py             |  18 +-
 35 files changed, 444 insertions(+), 1544 deletions(-)

diff --git a/.github/workflows/test-provider-packages.yml 
b/.github/workflows/test-provider-packages.yml
index ffeea11c2c4..3f1b50e9a76 100644
--- a/.github/workflows/test-provider-packages.yml
+++ b/.github/workflows/test-provider-packages.yml
@@ -99,7 +99,7 @@ jobs:
       - name: "Prepare provider documentation"
         run: >
           breeze release-management prepare-provider-documentation 
--include-not-ready-providers
-          --non-interactive
+          --non-interactive --answer yes
         if: matrix.package-format == 'wheel'
       - name: "Prepare provider packages: ${{ matrix.package-format }}"
         run: >
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index d1d21d5442e..ab73a9dfe8f 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1382,7 +1382,7 @@ repos:
         stages: ['manual']
         name: Run mypy for providers (manual)
         language: python
-        entry: ./scripts/ci/pre_commit/mypy_folder.py all_new_providers
+        entry: ./scripts/ci/pre_commit/mypy_folder.py all_providers
         pass_filenames: false
         files: ^.*\.py$
         require_serial: true
diff --git a/airflow/new_provider.yaml.schema.json 
b/airflow/new_provider.yaml.schema.json
deleted file mode 100644
index 4b5e16cedc0..00000000000
--- a/airflow/new_provider.yaml.schema.json
+++ /dev/null
@@ -1,519 +0,0 @@
-{
-    "$schema": "http://json-schema.org/draft-07/schema#";,
-    "type": "object",
-    "properties": {
-        "package-name": {
-            "description": "Package name available under which the package is 
available in the PyPI repository.",
-            "type": "string"
-        },
-        "name": {
-            "description": "Provider name",
-            "type": "string"
-        },
-        "description": {
-            "description": "Information about the package in RST format",
-            "type": "string"
-        },
-        "versions": {
-            "description": "List of available versions in PyPI. Sorted 
descending according to release date.",
-            "type": "array",
-            "items": {
-                "type": "string"
-            }
-        },
-        "state": {
-            "description": "State of provider: might be not-ready, regular, 
suspended, removed.",
-            "type:": "string",
-            "enum": [
-                "not-ready",
-                "ready",
-                "suspended",
-                "removed"
-            ]
-        },
-        "excluded-python-versions": {
-            "description": "List of python versions excluded for that 
provider",
-            "type": "array",
-            "items": {
-                "type": "string"
-            }
-        },
-        "integrations": {
-            "description": "List of integrations supported by the provider.",
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "integration-name": {
-                        "type": "string",
-                        "description": "Name of the integration."
-                    },
-                    "external-doc-url": {
-                        "type": "string",
-                        "description": "URL to external documentation for the 
integration."
-                    },
-                    "how-to-guide": {
-                        "description": "List of paths to how-to-guide for the 
integration. The path must start with '/docs/'",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    },
-                    "logo": {
-                        "description": "Path to the logo for the integration. 
The path must start with '/integration-logos/'",
-                        "type": "string"
-                    },
-                    "tags": {
-                        "description": "List of tags describing the 
integration. While we're using RST, only one tag is supported per integration.",
-                        "type": "array",
-                        "items": {
-                            "type": "string",
-                            "enum": [
-                                "alibaba",
-                                "apache",
-                                "aws",
-                                "azure",
-                                "dbt",
-                                "gcp",
-                                "gmp",
-                                "google",
-                                "kafka",
-                                "protocol",
-                                "service",
-                                "software",
-                                "yandex"
-                            ]
-                        },
-                        "minItems": 1,
-                        "maxItems": 1
-                    }
-                },
-                "additionalProperties": false,
-                "required": [
-                    "integration-name",
-                    "external-doc-url",
-                    "tags"
-                ]
-            }
-        },
-        "operators": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "integration-name": {
-                        "type": "string",
-                        "description": "Integration name. It must have a 
matching item in the 'integration' section of any provider."
-                    },
-                    "python-modules": {
-                        "description": "List of python modules containing the 
operators.",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    }
-                },
-                "additionalProperties": false,
-                "required": [
-                    "integration-name",
-                    "python-modules"
-                ]
-            }
-        },
-        "sensors": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "integration-name": {
-                        "type": "string",
-                        "description": "Integration name. It must have a 
matching item in the 'integration' section of any provider."
-                    },
-                    "python-modules": {
-                        "description": "List of python modules containing the 
sensors.",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    }
-                },
-                "required": [
-                    "integration-name",
-                    "python-modules"
-                ],
-                "additionalProperties": true
-            }
-        },
-        "dialects": {
-            "type": "array",
-            "description": "Array of dialects mapped to dialect class names",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "dialect-type": {
-                        "description": "Type of dialect defined by the 
provider",
-                        "type": "string"
-                    },
-                    "dialect-class-name": {
-                        "description": "Dialect class name that implements the 
dialect type",
-                        "type": "string"
-                    }
-                },
-                "required": [
-                    "dialect-type",
-                    "dialect-class-name"
-                ]
-            }
-        },
-        "hooks": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "integration-name": {
-                        "type": "string",
-                        "description": "Integration name. It must have a 
matching item in the 'integration' section of any provider."
-                    },
-                    "python-modules": {
-                        "description": "List of python modules containing the 
hooks.",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    }
-                },
-                "additionalProperties": false,
-                "required": [
-                    "integration-name",
-                    "python-modules"
-                ]
-            }
-        },
-        "filesystems": {
-            "type": "array",
-            "description": "Filesystem module names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "xcom": {
-            "type": "array",
-            "description": "XCom module names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "asset-uris": {
-            "type": "array",
-            "description": "Asset URI formats",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "schemes": {
-                        "type": "array",
-                        "description": "List of supported URI schemes",
-                        "items": {
-                            "type": "string"
-                        }
-                    },
-                    "handler": {
-                        "type": ["string", "null"],
-                        "description": "Normalization function for specified 
URI schemes. Import path to a callable taking and returning a SplitResult. 
'null' specifies a no-op."
-                    },
-                    "factory": {
-                        "type": ["string", "null"],
-                        "description": "Asset factory for specified URI. 
Creates AIP-60 compliant Asset."
-                    },
-                    "to_openlineage_converter": {
-                        "type": ["string", "null"],
-                        "description": "OpenLineage converter function for 
specified URI schemes. Import path to a callable accepting an Asset and 
LineageContext and returning OpenLineage dataset."
-                    }
-                }
-            }
-        },
-        "dataset-uris": {
-            "type": "array",
-            "description": "Dataset URI formats (will be removed in Airflow 
3.0)",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "schemes": {
-                        "type": "array",
-                        "description": "List of supported URI schemes",
-                        "items": {
-                            "type": "string"
-                        }
-                    },
-                    "handler": {
-                        "type": ["string", "null"],
-                        "description": "Normalization function for specified 
URI schemes. Import path to a callable taking and returning a SplitResult. 
'null' specifies a no-op."
-                    },
-                    "factory": {
-                        "type": ["string", "null"],
-                        "description": "Dataset factory for specified URI. 
Creates AIP-60 compliant Dataset."
-                    },
-                    "to_openlineage_converter": {
-                        "type": ["string", "null"],
-                        "description": "OpenLineage converter function for 
specified URI schemes. Import path to a callable accepting a Dataset and 
LineageContext and returning OpenLineage dataset."
-                    }
-                }
-            }
-        },
-        "transfers": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "how-to-guide": {
-                        "description": "Path to how-to-guide for the transfer. 
The path must start with '/docs/'",
-                        "type": "string"
-                    },
-                    "source-integration-name": {
-                        "type": "string",
-                        "description": "Integration name. It must have a 
matching item in the 'integration' section of any provider."
-                    },
-                    "target-integration-name": {
-                        "type": "string",
-                        "description": "Target integration name. It must have 
a matching item in the 'integration' section of any provider."
-                    },
-                    "python-module": {
-                        "type": "string",
-                        "description": "List of python modules containing the 
transfers."
-                    }
-                },
-                "additionalProperties": false,
-                "required": [
-                    "source-integration-name",
-                    "target-integration-name",
-                    "python-module"
-                ]
-            }
-        },
-        "triggers": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "integration-name": {
-                        "type": "string",
-                        "description": "Integration name. It must have a 
matching item in the 'integration' section of any provider."
-                    },
-                    "python-modules": {
-                        "description": "List of Python modules containing the 
triggers.",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    }
-                },
-                "additionalProperties": false,
-                "required": [
-                    "integration-name",
-                    "python-modules"
-                ]
-            }
-        },
-        "connection-types": {
-            "type": "array",
-            "description": "Array of connection types mapped to hook class 
names",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "connection-type": {
-                        "description": "Type of connection defined by the 
provider",
-                        "type": "string"
-                    },
-                    "hook-class-name": {
-                        "description": "Hook class name that implements the 
connection type",
-                        "type": "string"
-                    }
-                },
-                "required": [
-                    "connection-type",
-                    "hook-class-name"
-                ]
-            }
-        },
-        "extra-links": {
-            "type": "array",
-            "description": "Operator class names that provide extra link 
functionality",
-            "items": {
-                "type": "string"
-            }
-        },
-        "task-decorators": {
-            "type": "array",
-            "description": "Decorators to use with the TaskFlow API. Can be 
accessed by users via '@task.<name>'",
-            "items": {
-                "name": {
-                    "type": "string"
-                },
-                "path": {
-                    "type": "string"
-                }
-            }
-        },
-        "secrets-backends": {
-            "type": "array",
-            "description": "Secrets Backend class names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "auth-managers": {
-            "type": "array",
-            "description": "Auth managers class names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "logging": {
-            "type": "array",
-            "description": "Logging Task Handlers class names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "auth-backends": {
-            "type": "array",
-            "description": "API Auth Backend module names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "notifications": {
-            "type": "array",
-            "description": "Notification class names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "executors": {
-            "type": "array",
-            "description": "Executor class names",
-            "items": {
-                "type": "string"
-            }
-        },
-        "config": {
-            "type": "object",
-            "additionalProperties": {
-                "type": "object",
-                "properties": {
-                    "description": {
-                        "type": [
-                            "string",
-                            "null"
-                        ]
-                    },
-                    "options": {
-                        "type": "object",
-                        "additionalProperties": {
-                            "$ref": "#/definitions/option"
-                        }
-                    },
-                    "renamed": {
-                        "type": "object",
-                        "properties": {
-                            "previous_name": {
-                                "type": "string"
-                            },
-                            "version": {
-                                "type": "string"
-                            }
-                        }
-                    }
-                },
-                "required": [
-                    "description",
-                    "options"
-                ],
-                "additionalProperties": false
-            }
-        },
-        "plugins": {
-            "type": "array",
-            "description": "Plugins exposed by the provider",
-            "items": {
-                "name": {
-                    "type": "string"
-                },
-                "plugin-class": {
-                    "type": "string"
-                }
-            }
-        },
-        "source-date-epoch": {
-            "type": "integer",
-            "description": "Source date epoch - seconds since epoch (gmtime) 
when the release documentation was prepared. Used to generate reproducible 
package builds with flint.",
-            "minimum": 0,
-            "default": 0,
-            "examples": [
-                1609459200
-            ]
-        }
-    },
-    "additionalProperties": false,
-    "definitions": {
-        "option": {
-            "type": "object",
-            "properties": {
-                "description": {
-                    "type": [
-                        "string",
-                        "null"
-                    ]
-                },
-                "version_added": {
-                    "type": [
-                        "string",
-                        "null"
-                    ]
-                },
-                "type": {
-                    "type": "string",
-                    "enum": [
-                        "string",
-                        "boolean",
-                        "integer",
-                        "float"
-                    ]
-                },
-                "example": {
-                    "type": [
-                        "string",
-                        "null",
-                        "number"
-                    ]
-                },
-                "default": {
-                    "type": [
-                        "string",
-                        "null",
-                        "number"
-                    ]
-                },
-                "sensitive": {
-                    "type": "boolean",
-                    "description": "When true, this option is sensitive and 
can be specified using AIRFLOW__{section}___{name}__SECRET or 
AIRFLOW__{section}___{name}_CMD environment variables. See: 
airflow.configuration.AirflowConfigParser.sensitive_config_values"
-                }
-            },
-            "required": [
-                "description",
-                "version_added",
-                "type",
-                "example",
-                "default"
-            ],
-            "additional_properties": false
-        }
-    },
-    "required": [
-        "name",
-        "package-name",
-        "description",
-        "state",
-        "source-date-epoch",
-        "versions"
-    ]
-}
diff --git a/airflow/provider.yaml.schema.json 
b/airflow/provider.yaml.schema.json
index 0b770ee2b3f..4b5e16cedc0 100644
--- a/airflow/provider.yaml.schema.json
+++ b/airflow/provider.yaml.schema.json
@@ -31,20 +31,6 @@
                 "removed"
             ]
         },
-        "dependencies": {
-            "description": "Dependencies that should be added to the provider",
-            "type": "array",
-            "items": {
-                "type": "string"
-            }
-        },
-        "devel-dependencies": {
-            "description": "Dependencies that should be added to development 
requirements of the provider",
-            "type": "array",
-            "items": {
-                "type": "string"
-            }
-        },
         "excluded-python-versions": {
             "description": "List of python versions excluded for that 
provider",
             "type": "array",
@@ -355,30 +341,6 @@
                 "type": "string"
             }
         },
-        "additional-extras": {
-            "type": "array",
-            "items": {
-                "type": "object",
-                "properties": {
-                    "name": {
-                        "description": "Name of the extra",
-                        "type": "string"
-                    },
-                    "dependencies": {
-                        "description": "Dependencies that should be added for 
the extra",
-                        "type": "array",
-                        "items": {
-                            "type": "string"
-                        }
-                    }
-                },
-                "required": [
-                    "name",
-                    "dependencies"
-                ]
-            },
-            "description": "Additional extras that the provider should have. 
Replaces auto-generated cross-provider extras, if matching the same prefix, so 
that you can specify boundaries for existing dependencies."
-        },
         "task-decorators": {
             "type": "array",
             "description": "Decorators to use with the TaskFlow API. Can be 
accessed by users via '@task.<name>'",
@@ -552,7 +514,6 @@
         "description",
         "state",
         "source-date-epoch",
-        "dependencies",
         "versions"
     ]
 }
diff --git a/contributing-docs/11_provider_packages.rst 
b/contributing-docs/11_provider_packages.rst
index 383f8d298eb..bdd34c9eb71 100644
--- a/contributing-docs/11_provider_packages.rst
+++ b/contributing-docs/11_provider_packages.rst
@@ -174,13 +174,9 @@ there where you should add and remove dependencies for 
providers (following by r
 ``update-providers-dependencies`` pre-commit to synchronize the dependencies 
with ``pyproject.toml``
 of Airflow).
 
-The old ``provider.yaml`` file is compliant with the schema that is available 
in
+The ``provider.yaml`` file is compliant with the schema that is available in
 `json-schema specification 
<https://github.com/apache/airflow/blob/main/airflow/provider.yaml.schema.json>`_.
 
-# TODO(potiuk) - rename when all providers are new-style
-The new ``provider.yaml`` file is compliant with the new schema that is 
available in
-`json-schema specification 
<https://github.com/apache/airflow/blob/main/airflow/new_provider.yaml.schema.json>`_.
-
 Thanks to that mechanism, you can develop community managed providers in a 
seamless way directly from
 Airflow sources, without preparing and releasing them as packages separately, 
which would be rather
 complicated.
diff --git a/dev/breeze/README.md b/dev/breeze/README.md
index 8fd7e9c0886..aee803b7469 100644
--- a/dev/breeze/README.md
+++ b/dev/breeze/README.md
@@ -128,6 +128,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY 
UPDATED BY PRE-COMMIT.
 
 
---------------------------------------------------------------------------------------------------------
 
-Package config hash: 
79fadb6850f8cd60994498d51df4f29046aab45e4bb15944afe8bbeacf76770e379d0462dced117e4dc911426dff136fef9b2d6a930957f829413e0ae2261cc9
+Package config hash: 
6b59c778b56a87c8b91951e553072e1dbca5c871dc7eeb6c010481c5ebf6388ad04571c04d89ed8fe3cc9d9e5c827d889b0d08d3cb8ef0fb2d3a6fd3457aa607
 
 
---------------------------------------------------------------------------------------------------------
diff --git a/dev/breeze/pyproject.toml b/dev/breeze/pyproject.toml
index 0207760849b..877dbd59f7b 100644
--- a/dev/breeze/pyproject.toml
+++ b/dev/breeze/pyproject.toml
@@ -72,6 +72,7 @@ dependencies = [
     "pytest>=8.2,<9",
     "pyyaml>=6.0.1",
     "requests>=2.31.0",
+    "restructuredtext-lint>=1.4.0",
     "rich-click>=1.7.1",
     "rich>=13.6.0",
     "semver>=3.0.2",
diff --git 
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py 
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index 0fdb5085284..f979356086f 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -101,8 +101,6 @@ from airflow_breeze.prepare_providers.provider_packages 
import (
     apply_version_suffix_to_pyproject_toml,
     build_provider_package,
     cleanup_build_remnants,
-    copy_provider_sources_to_target,
-    generate_build_files,
     get_packages_list_to_act_on,
     move_built_packages_and_cleanup,
     restore_pyproject_toml,
@@ -1054,49 +1052,29 @@ def prepare_provider_packages(
             get_console().print()
             with ci_group(f"Preparing provider package 
[special]{provider_id}"):
                 get_console().print()
-                # TODO(potiuk) - rename when all providers are new-style
-                new_provider_root_dir = 
AIRFLOW_PROVIDERS_DIR.joinpath(*provider_id.split("."))
-                if (new_provider_root_dir / "provider.yaml").exists():
+                provider_root_dir = 
AIRFLOW_PROVIDERS_DIR.joinpath(*provider_id.split("."))
+                if (provider_root_dir / "provider.yaml").exists():
                     get_console().print(
                         f"[info]Provider {provider_id} is a new-style provider 
building in-place."
                     )
-                    cleanup_build_remnants(new_provider_root_dir)
+                    cleanup_build_remnants(provider_root_dir)
                     old_content = apply_version_suffix_to_pyproject_toml(
-                        provider_id, new_provider_root_dir, 
package_version_suffix
+                        provider_id, provider_root_dir, package_version_suffix
                     )
                     try:
                         build_provider_package(
                             provider_id=provider_id,
                             package_format=package_format,
-                            
target_provider_root_sources_path=new_provider_root_dir,
+                            
target_provider_root_sources_path=provider_root_dir,
                         )
                     finally:
-                        restore_pyproject_toml(new_provider_root_dir, 
old_content)
+                        restore_pyproject_toml(provider_root_dir, old_content)
                     move_built_packages_and_cleanup(
-                        new_provider_root_dir,
+                        provider_root_dir,
                         DIST_DIR,
                         skip_cleanup=skip_deleting_generated_files,
                         delete_only_build_and_dist_folders=True,
                     )
-                else:
-                    # TODO(potiuk) - remove this once all providers are 
new-style
-                    target_provider_root_sources_path = 
copy_provider_sources_to_target(provider_id)
-                    generate_build_files(
-                        provider_id=provider_id,
-                        version_suffix=package_version_suffix,
-                        
target_provider_root_sources_path=target_provider_root_sources_path,
-                    )
-                    cleanup_build_remnants(target_provider_root_sources_path)
-                    build_provider_package(
-                        provider_id=provider_id,
-                        package_format=package_format,
-                        
target_provider_root_sources_path=target_provider_root_sources_path,
-                    )
-                    move_built_packages_and_cleanup(
-                        target_provider_root_sources_path,
-                        DIST_DIR,
-                        skip_cleanup=skip_deleting_generated_files,
-                    )
         except PrepareReleasePackageTagExistException:
             skipped_as_already_released_packages.append(provider_id)
         except PrepareReleasePackageWrongSetupException:
@@ -1927,7 +1905,6 @@ def _add_chicken_egg_providers_to_build_args(
         python_build_args["DOCKER_CONTEXT_FILES"] = "./docker-context-files"
 
 
-# TODO(potiuk) - remove when all providers are new-style
 @release_management.command(
     name="clean-old-provider-artifacts",
     help="Cleans the old provider artifacts",
diff --git 
a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py 
b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
index 98936310c3c..82e604a0d0b 100644
--- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
+++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_documentation.py
@@ -32,11 +32,8 @@ from shutil import copyfile
 from time import time
 from typing import Any, NamedTuple
 
-import jinja2
-import semver
 from rich.syntax import Syntax
 
-from airflow_breeze.global_constants import PROVIDER_DEPENDENCIES
 from airflow_breeze.utils.black_utils import black_format
 from airflow_breeze.utils.confirm import Answer, user_confirm
 from airflow_breeze.utils.console import get_console
@@ -44,11 +41,11 @@ from airflow_breeze.utils.packages import (
     HTTPS_REMOTE,
     ProviderPackageDetails,
     clear_cache_for_provider_metadata,
-    get_pip_package_name,
     get_provider_details,
     get_provider_jinja_context,
     get_provider_yaml,
     refresh_provider_metadata_from_yaml_file,
+    regenerate_pyproject_toml,
     render_template,
 )
 from airflow_breeze.utils.path_utils import AIRFLOW_SOURCES_ROOT, 
BREEZE_SOURCES_DIR
@@ -143,9 +140,11 @@ def get_most_impactful_change(changes: list[TypeOfChange]):
 
 
 def format_message_for_classification(message):
-    num = re.search(r"#(\d+)", message).group(1)
-    new_message = re.sub(r"#(\d+)", 
f"https://github.com/apache/airflow/pull/{num}";, message)
-    return new_message
+    find_pr = re.search(r"#(\d+)", message)
+    if find_pr:
+        num = find_pr.group(1)
+        message = re.sub(r"#(\d+)", 
f"https://github.com/apache/airflow/pull/{num}";, message)
+    return message
 
 
 class ClassifiedChanges:
@@ -268,7 +267,7 @@ def _convert_git_changes_to_table(
             (
                 f"[{change.short_hash}]({base_url}{change.full_hash})"
                 if markdown
-                else f"`{change.short_hash} <{base_url}{change.full_hash}>`_",
+                else f"`{change.short_hash} <{base_url}{change.full_hash}>`__",
                 change.date,
                 f"`{change.message_without_backticks}`"
                 if markdown
@@ -322,8 +321,8 @@ def _get_all_changes_for_package(
     )
     providers_folder_paths_for_git_commit_retrieval = [
         provider_details.root_provider_path,
-        provider_details.old_source_provider_package_path,
-        provider_details.old_documentation_provider_package_path,
+        provider_details.previous_source_provider_package_path,
+        provider_details.previous_documentation_provider_package_path,
         provider_details.original_source_provider_package_path,
     ]
     if not reapply_templates_only and result.returncode == 0:
@@ -344,14 +343,9 @@ def _get_all_changes_for_package(
         changes = result.stdout.strip()
         if changes:
             provider_details = get_provider_details(provider_package_id)
-            if provider_details.is_new_structure:
-                doc_only_change_file = (
-                    provider_details.root_provider_path / "docs" / 
".latest-doc-only-change.txt"
-                )
-            else:
-                doc_only_change_file = (
-                    provider_details.base_provider_package_path / 
".latest-doc-only-change.txt"
-                )
+            doc_only_change_file = (
+                provider_details.root_provider_path / "docs" / 
".latest-doc-only-change.txt"
+            )
             if doc_only_change_file.exists():
                 last_doc_only_hash = doc_only_change_file.read_text().strip()
                 try:
@@ -485,14 +479,7 @@ def _mark_latest_changes_as_documentation_only(
         f"[special]Marking last change: {latest_change.short_hash} and all 
above "
         f"changes since the last release as doc-only changes!"
     )
-    if provider_details.is_new_structure:
-        latest_doc_onl_change_file = (
-            provider_details.root_provider_path / "docs" / 
".latest-doc-only-change.txt"
-        )
-    else:
-        latest_doc_onl_change_file = (
-            provider_details.base_provider_package_path / 
".latest-doc-only-change.txt"
-        )
+    latest_doc_onl_change_file = provider_details.root_provider_path / "docs" 
/ ".latest-doc-only-change.txt"
 
     latest_doc_onl_change_file.write_text(latest_change.full_hash + "\n")
     raise PrepareReleaseDocsChangesOnlyException()
@@ -510,6 +497,8 @@ def _update_version_in_provider_yaml(
     """
     provider_details = get_provider_details(provider_id)
     version = provider_details.versions[0]
+    import semver
+
     v = semver.VersionInfo.parse(version)
     with_breaking_changes = False
     maybe_with_new_features = False
@@ -525,7 +514,7 @@ def _update_version_in_provider_yaml(
         v = v.bump_patch()
     elif type_of_change == TypeOfChange.MISC:
         v = v.bump_patch()
-    provider_yaml_path, is_new_structure = get_provider_yaml(provider_id)
+    provider_yaml_path = get_provider_yaml(provider_id)
     original_provider_yaml_content = provider_yaml_path.read_text()
     updated_provider_yaml_content = re.sub(
         r"^versions:", f"versions:\n  - {v}", original_provider_yaml_content, 
1, re.MULTILINE
@@ -543,7 +532,7 @@ def _update_source_date_epoch_in_provider_yaml(
 
     :param provider_id: provider package
     """
-    provider_yaml_path, is_new_structure = get_provider_yaml(provider_id)
+    provider_yaml_path = get_provider_yaml(provider_id)
     original_text = provider_yaml_path.read_text()
     source_date_epoch = int(time())
     new_text = re.sub(
@@ -556,15 +545,13 @@ def _update_source_date_epoch_in_provider_yaml(
 
 def _verify_changelog_exists(package: str) -> Path:
     provider_details = get_provider_details(package)
-    changelog_path = (
-        Path(provider_details.root_provider_path) / "docs" / "changelog.rst"
-        if provider_details.is_new_structure
-        else Path(provider_details.root_provider_path) / "CHANGELOG.rst"
-    )
+    changelog_path = Path(provider_details.root_provider_path) / "docs" / 
"changelog.rst"
     if not os.path.isfile(changelog_path):
         get_console().print(f"\n[error]ERROR: Missing {changelog_path}[/]\n")
         get_console().print("[info]Please add the file with initial content:")
         get_console().print("----- START COPYING AFTER THIS LINE ------- ")
+        import jinja2
+
         processed_changelog = jinja2.Template(INITIAL_CHANGELOG_CONTENT, 
autoescape=True).render(
             package_name=provider_details.pypi_package_name,
         )
@@ -663,47 +650,29 @@ def _update_file(
         )
         raise PrepareReleaseDocsErrorOccurredException()
 
-    # TODO: uncomment me. Linting revealed that our already generated provider 
docs have duplicate links
-    #       in the generated files, we should fix those and uncomment linting 
as separate step - so that
-    #       we do not hold current release for fixing the docs.
-    # console.print(f"Linting: {file_path}")
-    # errors = restructuredtext_lint.lint_file(file_path)
-    # real_errors = False
-    # if errors:
-    #     for error in errors:
-    #         # Skip known issue: linter with doc role similar to 
https://github.com/OCA/pylint-odoo/issues/38
-    #         if (
-    #             'No role entry for "doc"' in error.message
-    #             or 'Unknown interpreted text role "doc"' in error.message
-    #         ):
-    #             continue
-    #         real_errors = True
-    #         console.print(f"* [red] {error.message}")
-    #     if real_errors:
-    #         console.print(f"\n[red] Errors found in {file_path}")
-    #         raise PrepareReleaseDocsErrorOccurredException()
+    get_console().print(f"Linting: {target_file_path}")
+    import restructuredtext_lint
+
+    errors = restructuredtext_lint.lint_file(target_file_path.as_posix())
+    real_errors = False
+    if errors:
+        for error in errors:
+            # Skip known issue: linter with doc role similar to 
https://github.com/OCA/pylint-odoo/issues/38
+            if (
+                'No role entry for "doc"' in error.message
+                or 'Unknown interpreted text role "doc"' in error.message
+            ):
+                continue
+            real_errors = True
+            get_console().print(f"* [red] {error.message}")
+        if real_errors:
+            get_console().print(f"\n[red] Errors found in {target_file_path}")
+            raise PrepareReleaseDocsErrorOccurredException()
 
     get_console().print(f"[success]Generated {target_file_path} for 
{provider_package_id} is OK[/]")
     return
 
 
-def _update_changelog_rst(
-    context: dict[str, Any],
-    provider_package_id: str,
-    target_path: Path,
-    regenerate_missing_docs: bool,
-) -> None:
-    _update_file(
-        context=context,
-        template_name="PROVIDER_CHANGELOG",
-        extension=".rst",
-        file_name="changelog.rst",
-        provider_package_id=provider_package_id,
-        target_path=target_path,
-        regenerate_missing_docs=regenerate_missing_docs,
-    )
-
-
 def _update_commits_rst(
     context: dict[str, Any],
     provider_package_id: str,
@@ -845,7 +814,7 @@ def update_release_notes(
     else:
         answer = Answer.YES
 
-    provider_yaml_path, is_new_structure = 
get_provider_yaml(provider_package_id)
+    provider_yaml_path = get_provider_yaml(provider_package_id)
     if answer == Answer.NO:
         if original_provider_yaml_content is not None:
             # Restore original content of the provider.yaml
@@ -892,13 +861,6 @@ def update_release_notes(
     )
     jinja_context["DETAILED_CHANGES_RST"] = changes_as_table
     jinja_context["DETAILED_CHANGES_PRESENT"] = bool(changes_as_table)
-    if not provider_details.is_new_structure:
-        _update_changelog_rst(
-            jinja_context,
-            provider_package_id,
-            provider_details.documentation_provider_package_path,
-            regenerate_missing_docs,
-        )
     _update_commits_rst(
         jinja_context,
         provider_package_id,
@@ -1162,82 +1124,6 @@ def _generate_readme_rst(context: dict[str, Any], 
provider_details: ProviderPack
     )
 
 
-def _regenerate_pyproject_toml(context: dict[str, Any], provider_details: 
ProviderPackageDetails):
-    get_pyproject_toml_path = provider_details.root_provider_path / 
"pyproject.toml"
-    # we want to preserve comments in dependencies - both required and 
additional,
-    # so we should not really parse the toml file but extract dependencies "as 
is" in text form and pass
-    # them to context. While this is not "generic toml" perfect, for provider 
pyproject.toml files it is
-    # good enough, because we fully control the pyproject.toml content for 
providers as they are generated
-    # from our templates (Except the dependencies section that is manually 
updated)
-    pyproject_toml_content = get_pyproject_toml_path.read_text()
-    required_dependencies: list[str] = []
-    optional_dependencies: list[str] = []
-    dependency_groups: list[str] = []
-    in_required_dependencies = False
-    in_optional_dependencies = False
-    in_dependency_groups = False
-    for line in pyproject_toml_content.splitlines():
-        if line == "dependencies = [":
-            in_required_dependencies = True
-            continue
-        if in_required_dependencies and line == "]":
-            in_required_dependencies = False
-            continue
-        if line == "[dependency-groups]":
-            in_dependency_groups = True
-            continue
-        if in_dependency_groups and line == "":
-            in_dependency_groups = False
-            continue
-        if in_dependency_groups and line.startswith("["):
-            in_dependency_groups = False
-        if line == "[project.optional-dependencies]":
-            in_optional_dependencies = True
-            continue
-        if in_optional_dependencies and line == "":
-            in_optional_dependencies = False
-            continue
-        if in_optional_dependencies and line.startswith("["):
-            in_optional_dependencies = False
-        if in_required_dependencies:
-            required_dependencies.append(line)
-        if in_optional_dependencies:
-            optional_dependencies.append(line)
-        if in_dependency_groups:
-            dependency_groups.append(line)
-
-    # For additional providers we want to load the dependencies and see if 
cross-provider-dependencies are
-    # present and if not, add them to the optional dependencies
-
-    context["INSTALL_REQUIREMENTS"] = "\n".join(required_dependencies)
-
-    # Add cross-provider dependencies to the optional dependencies if they are 
missing
-    for module in 
PROVIDER_DEPENDENCIES.get(provider_details.provider_id)["cross-providers-deps"]:
-        if f'"{module}" = [' not in optional_dependencies and 
get_pip_package_name(module) not in "\n".join(
-            required_dependencies
-        ):
-            optional_dependencies.append(f'"{module}" = [')
-            optional_dependencies.append(f'    
"{get_pip_package_name(module)}"')
-            optional_dependencies.append("]")
-    context["EXTRAS_REQUIREMENTS"] = "\n".join(optional_dependencies)
-    context["DEPENDENCY_GROUPS"] = "\n".join(dependency_groups)
-
-    get_pyproject_toml_content = render_template(
-        template_name="pyproject",
-        context=context,
-        extension=".toml",
-        autoescape=False,
-        lstrip_blocks=True,
-        trim_blocks=True,
-        keep_trailing_newline=True,
-    )
-
-    get_pyproject_toml_path.write_text(get_pyproject_toml_content)
-    get_console().print(
-        f"[info]Generated {get_pyproject_toml_path} for the 
{provider_details.provider_id} provider\n"
-    )
-
-
 def _generate_build_files_for_provider(
     context: dict[str, Any],
     provider_details: ProviderPackageDetails,
@@ -1252,15 +1138,13 @@ def _generate_build_files_for_provider(
     )
     init_py_path = provider_details.base_provider_package_path / "__init__.py"
     init_py_path.write_text(init_py_content)
-    # TODO(potiuk) - remove this if when we move all providers to new structure
-    if provider_details.is_new_structure:
-        _generate_readme_rst(context, provider_details)
-        _regenerate_pyproject_toml(context, provider_details)
-        _generate_get_provider_info_py(context, provider_details)
-        shutil.copy(
-            BREEZE_SOURCES_DIR / "airflow_breeze" / "templates" / 
"PROVIDER_LICENSE.txt",
-            provider_details.base_provider_package_path / "LICENSE",
-        )
+    _generate_readme_rst(context, provider_details)
+    regenerate_pyproject_toml(context, provider_details, version_suffix=None)
+    _generate_get_provider_info_py(context, provider_details)
+    shutil.copy(
+        BREEZE_SOURCES_DIR / "airflow_breeze" / "templates" / 
"PROVIDER_LICENSE.txt",
+        provider_details.base_provider_package_path / "LICENSE",
+    )
 
 
 def _replace_min_airflow_version_in_provider_yaml(
diff --git 
a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py 
b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
index 57ca5942f67..4a2d8599f6d 100644
--- a/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
+++ b/dev/breeze/src/airflow_breeze/prepare_providers/provider_packages.py
@@ -21,24 +21,20 @@ import shutil
 import subprocess
 import sys
 from pathlib import Path
-from shutil import copytree, rmtree
 from typing import Any, TextIO
 
 from airflow_breeze.utils.console import get_console
 from airflow_breeze.utils.packages import (
-    apply_version_suffix,
     get_available_packages,
     get_latest_provider_tag,
     get_not_ready_provider_ids,
-    get_old_source_providers_package_path,
     get_provider_details,
     get_provider_jinja_context,
     get_removed_provider_ids,
-    get_target_root_for_copied_provider_sources,
+    regenerate_pyproject_toml,
     render_template,
     tag_exists_for_provider,
 )
-from airflow_breeze.utils.path_utils import BREEZE_SOURCES_DIR, 
OLD_AIRFLOW_PROVIDERS_SRC_DIR
 from airflow_breeze.utils.run_utils import run_command
 from airflow_breeze.utils.version_utils import is_local_package_version
 
@@ -74,37 +70,6 @@ class 
PrepareReleasePackageErrorBuildingPackageException(Exception):
     """Error when building the package."""
 
 
-def copy_provider_sources_to_target(provider_id: str) -> Path:
-    target_provider_root_path = 
get_target_root_for_copied_provider_sources(provider_id)
-
-    if target_provider_root_path.exists() and not 
target_provider_root_path.is_dir():
-        get_console().print(
-            f"[error]Target folder for {provider_id} sources is not a 
directory "
-            f"please delete {target_provider_root_path} and try again!"
-        )
-    rmtree(target_provider_root_path, ignore_errors=True)
-    target_provider_root_path.mkdir(parents=True)
-    source_provider_sources_path = 
get_old_source_providers_package_path(provider_id)
-    relative_provider_path = 
source_provider_sources_path.relative_to(OLD_AIRFLOW_PROVIDERS_SRC_DIR)
-    target_providers_sub_folder = target_provider_root_path / 
relative_provider_path
-    get_console().print(
-        f"[info]Copying provider sources: {source_provider_sources_path} -> 
{target_providers_sub_folder}"
-    )
-    copytree(source_provider_sources_path, target_providers_sub_folder)
-    shutil.copy(
-        BREEZE_SOURCES_DIR / "airflow_breeze" / "templates" / 
"PROVIDER_LICENSE.txt",
-        target_providers_sub_folder / "LICENSE",
-    )
-    # We do not copy NOTICE from the top level source of Airflow because 
NOTICE only refers to
-    # Airflow sources - not to providers. If any of the providers is going to 
have a code that
-    # requires NOTICE, then it should be stored in the provider sources 
(airflow/providers/PROVIDER_ID)
-    # And it will be copied from there.
-    (target_providers_sub_folder / 
".latest-doc-only-change.txt").unlink(missing_ok=True)
-    (target_providers_sub_folder / "CHANGELOG.rst").unlink(missing_ok=True)
-    (target_providers_sub_folder / "provider.yaml").unlink(missing_ok=True)
-    return target_provider_root_path
-
-
 def get_provider_package_jinja_context(provider_id: str, version_suffix: str) 
-> dict[str, Any]:
     provider_details = get_provider_details(provider_id)
     jinja_context = get_provider_jinja_context(
@@ -212,15 +177,9 @@ def apply_version_suffix_to_pyproject_toml(
     if not version_suffix:
         return original_pyproject_toml_content
     get_console().print(f"\n[info]Applying version suffix {version_suffix} to 
{pyproject_toml_path}")
-    try:
-        import tomllib
-    except ImportError:
-        import tomli as tomllib
-    pyproject_toml_data = tomllib.loads(original_pyproject_toml_content)
-    new_dependencies = []
-    for dependency in pyproject_toml_data["project"].get("dependencies", []):
-        new_dependencies.append(apply_version_suffix(dependency, 
version_suffix))
     jinja_context = 
get_provider_package_jinja_context(provider_id=provider_id, 
version_suffix=version_suffix)
+    provider_details = get_provider_details(provider_id)
+    regenerate_pyproject_toml(jinja_context, provider_details, version_suffix)
     _prepare_pyproject_toml_file(jinja_context, 
target_provider_root_sources_path)
     return original_pyproject_toml_content
 
diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py 
b/dev/breeze/src/airflow_breeze/utils/packages.py
index 022f093871e..2ffc88d354c 100644
--- a/dev/breeze/src/airflow_breeze/utils/packages.py
+++ b/dev/breeze/src/airflow_breeze/utils/packages.py
@@ -20,6 +20,7 @@ from __future__ import annotations
 import fnmatch
 import json
 import os
+import re
 import subprocess
 import sys
 from collections.abc import Iterable
@@ -44,16 +45,11 @@ from airflow_breeze.utils.path_utils import (
     AIRFLOW_PROVIDERS_DIR,
     BREEZE_SOURCES_DIR,
     DOCS_ROOT,
-    GENERATED_PROVIDER_PACKAGES_DIR,
-    OLD_AIRFLOW_PROVIDERS_NS_PACKAGE,
-    OLD_AIRFLOW_PROVIDERS_SRC_DIR,
+    PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE,
     PROVIDER_DEPENDENCIES_JSON_FILE_PATH,
 )
 from airflow_breeze.utils.publish_docs_helpers import (
-    # TODO(potiuk) - rename when all providers are new-style
-    NEW_PROVIDER_DATA_SCHEMA_PATH,
-    # TODO(potiuk) - remove when all providers are new-style
-    OLD_PROVIDER_DATA_SCHEMA_PATH,
+    PROVIDER_DATA_SCHEMA_PATH,
 )
 from airflow_breeze.utils.run_utils import run_command
 from airflow_breeze.utils.version_utils import remove_local_version_suffix
@@ -82,16 +78,15 @@ class PluginInfo(NamedTuple):
 class ProviderPackageDetails(NamedTuple):
     provider_id: str
     provider_yaml_path: Path
-    is_new_structure: bool
     source_date_epoch: int
     full_package_name: str
     pypi_package_name: str
     original_source_provider_package_path: Path
-    old_source_provider_package_path: Path
     root_provider_path: Path
     base_provider_package_path: Path
     documentation_provider_package_path: Path
-    old_documentation_provider_package_path: Path
+    previous_documentation_provider_package_path: Path
+    previous_source_provider_package_path: Path
     changelog_path: Path
     provider_description: str
     dependencies: list[str]
@@ -135,17 +130,9 @@ class PipRequirements(NamedTuple):
         return cls(package=package, version_required=version_required.strip())
 
 
-# TODO(potiuk) - remove when all providers are new-style
 @clearable_cache
-def old_provider_yaml_schema() -> dict[str, Any]:
-    with open(OLD_PROVIDER_DATA_SCHEMA_PATH) as schema_file:
-        return json.load(schema_file)
-
-
-# TODO(potiuk) - rename when all providers are new-style
-@clearable_cache
-def new_provider_yaml_schema() -> dict[str, Any]:
-    with open(NEW_PROVIDER_DATA_SCHEMA_PATH) as schema_file:
+def provider_yaml_schema() -> dict[str, Any]:
+    with open(PROVIDER_DATA_SCHEMA_PATH) as schema_file:
         return json.load(schema_file)
 
 
@@ -153,44 +140,23 @@ PROVIDER_METADATA: dict[str, dict[str, Any]] = {}
 
 
 def refresh_provider_metadata_from_yaml_file(provider_yaml_path: Path):
-    # TODO(potiuk) - this should be removed once we have all providers in the 
new structure
-    is_old_provider_structure = 
provider_yaml_path.is_relative_to(OLD_AIRFLOW_PROVIDERS_SRC_DIR)
     import yaml
 
-    if is_old_provider_structure:
-        schema = old_provider_yaml_schema()
-    else:
-        schema = new_provider_yaml_schema()
     with open(provider_yaml_path) as yaml_file:
         provider_yaml_content = yaml.safe_load(yaml_file)
-    try:
-        import jsonschema
-
-        try:
-            if 
provider_yaml_path.is_relative_to(OLD_AIRFLOW_PROVIDERS_SRC_DIR):
-                jsonschema.validate(provider_yaml_content, schema=schema)
-        except jsonschema.ValidationError as ex:
-            msg = f"Unable to parse: {provider_yaml_path}. Original error 
{type(ex).__name__}: {ex}"
-            raise RuntimeError(msg)
-    except ImportError:
-        # we only validate the schema if jsonschema is available. This is 
needed for autocomplete
-        # to not fail with import error if jsonschema is not installed
-        pass
     provider_id = get_short_package_name(provider_yaml_content["package-name"])
     PROVIDER_METADATA[provider_id] = provider_yaml_content
-    # TODO(potiuk) - remove if when all providers are new-style
-    if not is_old_provider_structure:
-        toml_content = load_pyproject_toml(provider_yaml_path.parent / 
"pyproject.toml")
-        dependencies = toml_content["project"].get("dependencies")
-        if dependencies:
-            PROVIDER_METADATA[provider_id]["dependencies"] = dependencies
-        optional_dependencies = 
toml_content["project"].get("optional-dependencies")
-        if optional_dependencies:
-            PROVIDER_METADATA[provider_id]["optional-dependencies"] = 
optional_dependencies
-        dependency_groups = toml_content.get("dependency-groups")
-        if dependency_groups and dependency_groups.get("dev"):
-            devel_dependencies = dependency_groups.get("dev")
-            PROVIDER_METADATA[provider_id]["devel-dependencies"] = 
devel_dependencies
+    toml_content = load_pyproject_toml(provider_yaml_path.parent / 
"pyproject.toml")
+    dependencies = toml_content["project"].get("dependencies")
+    if dependencies:
+        PROVIDER_METADATA[provider_id]["dependencies"] = dependencies
+    optional_dependencies = 
toml_content["project"].get("optional-dependencies")
+    if optional_dependencies:
+        PROVIDER_METADATA[provider_id]["optional-dependencies"] = 
optional_dependencies
+    dependency_groups = toml_content.get("dependency-groups")
+    if dependency_groups and dependency_groups.get("dev"):
+        devel_dependencies = dependency_groups.get("dev")
+        PROVIDER_METADATA[provider_id]["devel-dependencies"] = 
devel_dependencies
 
 
 def clear_cache_for_provider_metadata(provider_yaml_path: Path):
@@ -450,28 +416,25 @@ def find_matching_long_package_names(
     )
 
 
-# We should not remove those old/original package paths as they are used to 
get changes
-# When documentation is generated
+# !!!! We should not remove those old/original package paths as they are used 
to get changes
+# When documentation is generated using git_log
 def get_original_source_package_path(provider_id: str) -> Path:
     return AIRFLOW_ORIGINAL_PROVIDERS_DIR.joinpath(*provider_id.split("."))
 
 
-def get_old_source_providers_package_path(provider_id: str) -> Path:
-    return OLD_AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split("."))
+def get_previous_source_providers_package_path(provider_id: str) -> Path:
+    return 
PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE.joinpath(*provider_id.split("."))
 
 
-# TODO(potiuk) - this should be removed once we have all providers in the new 
structure
-def get_old_documentation_package_path(provider_id: str) -> Path:
+def get_previous_documentation_package_path(provider_id: str) -> Path:
     return DOCS_ROOT / f"apache-airflow-providers-{provider_id.replace('.', 
'-')}"
 
 
-def get_new_documentation_package_path(provider_id: str) -> Path:
-    return AIRFLOW_PROVIDERS_DIR.joinpath(*provider_id.split(".")) / "docs"
+# End of do not remove those package paths.
 
 
-# TODO(potiuk) - this should be removed once we have all providers in the new 
structure
-def get_target_root_for_copied_provider_sources(provider_id: str) -> Path:
-    return GENERATED_PROVIDER_PACKAGES_DIR.joinpath(*provider_id.split("."))
+def get_documentation_package_path(provider_id: str) -> Path:
+    return AIRFLOW_PROVIDERS_DIR.joinpath(*provider_id.split(".")) / "docs"
 
 
 def get_pip_package_name(provider_id: str) -> str:
@@ -523,111 +486,8 @@ def apply_version_suffix(install_clause: str, 
version_suffix: str) -> str:
     return install_clause
 
 
-# TODO(potiuk): remove this function once we have all providers in the new 
structure
-def get_install_requirements_for_old_providers(provider_id: str, 
version_suffix: str) -> str:
-    """
-    Returns install requirements for the package.
-
-    :param provider_id: id of the provider package
-    :param version_suffix: optional version suffix for packages
-
-    :return: install requirements of the package
-    """
-    if provider_id in get_removed_provider_ids():
-        dependencies = get_provider_requirements(provider_id)
-    else:
-        dependencies = PROVIDER_DEPENDENCIES.get(provider_id)["deps"]
-    install_requires = [
-        apply_version_suffix(clause, version_suffix).replace('"', '\\"') for 
clause in dependencies
-    ]
-    return "\n".join(f'    "{ir}",' for ir in install_requires)
-
-
-# TODO(potiuk): remove this function once we have all providers in the new 
structure
-def get_package_extras_for_old_providers(provider_id: str, version_suffix: 
str) -> str:
-    """
-    Finds extras for the package specified.
-
-    :param provider_id: id of the package
-    """
-
-    if provider_id == "providers":
-        return ""
-    if provider_id in get_removed_provider_ids():
-        return ""
-
-    from packaging.requirements import Requirement
-
-    deps_list = list(
-        map(
-            lambda x: Requirement(x).name,
-            PROVIDER_DEPENDENCIES.get(provider_id)["deps"],
-        )
-    )
-    deps = list(filter(lambda x: x.startswith("apache-airflow-providers"), 
deps_list))
-    extras_dict: dict[str, list[str]] = {
-        module: [get_pip_package_name(module)]
-        for module in 
PROVIDER_DEPENDENCIES.get(provider_id)["cross-providers-deps"]
-    }
-
-    to_pop_extras = []
-    # remove the keys from extras_dict if the provider is already a required 
dependency
-    for k, v in extras_dict.items():
-        if v and v[0] in deps:
-            to_pop_extras.append(k)
-
-    for k in to_pop_extras:
-        get_console().print(f"[warning]Removing {k} from extras as it is 
already a required dependency")
-        del extras_dict[k]
-
-    provider_yaml_dict = get_provider_packages_metadata().get(provider_id)
-    additional_extras = provider_yaml_dict.get("additional-extras") if 
provider_yaml_dict else None
-    if additional_extras:
-        for entry in additional_extras:
-            name = entry["name"]
-            dependencies = entry["dependencies"]
-            if name in extras_dict:
-                # remove non-versioned dependencies if versioned ones are 
coming
-                existing_dependencies = set(extras_dict[name])
-                for new_dependency in dependencies:
-                    for dependency in existing_dependencies:
-                        # remove extra if exists as non-versioned one
-                        if new_dependency.startswith(dependency):
-                            extras_dict[name].remove(dependency)
-                            break
-                    extras_dict[name].append(new_dependency)
-            else:
-                extras_dict[name] = dependencies
-    for extra, dependencies in extras_dict.items():
-        extras_dict[extra] = [apply_version_suffix(clause, version_suffix) for 
clause in dependencies]
-
-    extras_requirements_list = []
-    for extra_name in sorted(extras_dict.keys()):
-        dependencies_list = extras_dict[extra_name]
-        if not dependencies_list:
-            continue
-        extras_requirements_list.append(f'"{extra_name}" = [')
-        for dependency in dependencies_list:
-            escaped_dependency = dependency.replace('"', '\\"')
-            extras_requirements_list.append(f'     "{escaped_dependency}",')
-        extras_requirements_list.append("]")
-    return "\n".join(extras_requirements_list)
-
-
-# TODO(potiuk) - this should be simplified once we have all providers in the 
new structure
-def get_provider_yaml(provider_id: str) -> tuple[Path, bool]:
-    new_structure_provider_path = AIRFLOW_PROVIDERS_DIR / 
provider_id.replace(".", "/") / "provider.yaml"
-    if new_structure_provider_path.exists():
-        return new_structure_provider_path, True
-    else:
-        return (
-            OLD_AIRFLOW_PROVIDERS_SRC_DIR
-            / "airflow"
-            / "providers"
-            / provider_id.replace(".", "/")
-            / "provider.yaml",
-            False,
-        )
+def get_provider_yaml(provider_id: str) -> Path:
+    return AIRFLOW_PROVIDERS_DIR / provider_id.replace(".", "/") / 
"provider.yaml"
 
 
 def load_pyproject_toml(pyproject_toml_file_path: Path) -> dict[str, Any]:
@@ -660,35 +520,27 @@ def get_provider_details(provider_id: str) -> 
ProviderPackageDetails:
                     class_name=class_name,
                 )
             )
-    provider_yaml_path, is_new_structure = get_provider_yaml(provider_id)
-    if is_new_structure:
-        pyproject_toml = load_pyproject_toml(provider_yaml_path.parent / 
"pyproject.toml")
-        dependencies = pyproject_toml["project"]["dependencies"]
-        changelog_path = provider_yaml_path.parent / "docs" / "changelog.rst"
-        documentation_provider_package_path = 
get_new_documentation_package_path(provider_id)
-        root_provider_path = provider_yaml_path.parent
-        base_provider_package_path = (provider_yaml_path.parent / "src" / 
"airflow" / "providers").joinpath(
-            *provider_id.split(".")
-        )
-    else:
-        dependencies = provider_info["dependencies"]
-        changelog_path = get_old_source_providers_package_path(provider_id) / 
"CHANGELOG.rst"
-        documentation_provider_package_path = 
get_old_documentation_package_path(provider_id)
-        root_provider_path = get_old_source_providers_package_path(provider_id)
-        base_provider_package_path = 
get_old_source_providers_package_path(provider_id)
+    provider_yaml_path = get_provider_yaml(provider_id)
+    pyproject_toml = load_pyproject_toml(provider_yaml_path.parent / 
"pyproject.toml")
+    dependencies = pyproject_toml["project"]["dependencies"]
+    changelog_path = provider_yaml_path.parent / "docs" / "changelog.rst"
+    documentation_provider_package_path = 
get_documentation_package_path(provider_id)
+    root_provider_path = provider_yaml_path.parent
+    base_provider_package_path = (provider_yaml_path.parent / "src" / 
"airflow" / "providers").joinpath(
+        *provider_id.split(".")
+    )
     return ProviderPackageDetails(
         provider_id=provider_id,
         provider_yaml_path=provider_yaml_path,
-        is_new_structure=is_new_structure,
         source_date_epoch=provider_info["source-date-epoch"],
         full_package_name=f"airflow.providers.{provider_id}",
         pypi_package_name=f"apache-airflow-providers-{provider_id.replace('.', 
'-')}",
         root_provider_path=root_provider_path,
         base_provider_package_path=base_provider_package_path,
-        
old_source_provider_package_path=get_old_source_providers_package_path(provider_id),
         
original_source_provider_package_path=get_original_source_package_path(provider_id),
+        
previous_documentation_provider_package_path=get_previous_documentation_package_path(provider_id),
+        
previous_source_provider_package_path=get_previous_source_providers_package_path(provider_id),
         
documentation_provider_package_path=documentation_provider_package_path,
-        
old_documentation_provider_package_path=get_old_documentation_package_path(provider_id),
         changelog_path=changelog_path,
         provider_description=provider_info["description"],
         dependencies=dependencies,
@@ -805,16 +657,6 @@ def get_provider_jinja_context(
         "VERSION_SUFFIX": format_version_suffix(version_suffix),
         "PIP_REQUIREMENTS": 
get_provider_requirements(provider_details.provider_id),
         "PROVIDER_DESCRIPTION": provider_details.provider_description,
-        # TODO(potiuk) - remove when all providers are new-style
-        "INSTALL_REQUIREMENTS": get_install_requirements_for_old_providers(
-            provider_id=provider_details.provider_id, 
version_suffix=version_suffix
-        ),
-        # TODO(potiuk) - remove when all providers are new-style
-        "EXTRAS_REQUIREMENTS": get_package_extras_for_old_providers(
-            provider_id=provider_details.provider_id, 
version_suffix=version_suffix
-        ),
-        # TODO(potiuk) - remove when all providers are new-style
-        "DEPENDENCY_GROUPS": {},
         "CHANGELOG_RELATIVE_PATH": os.path.relpath(
             provider_details.root_provider_path,
             provider_details.documentation_provider_package_path,
@@ -969,3 +811,90 @@ def get_latest_provider_tag(provider_id: str, suffix: str) 
-> str:
     provider_details = get_provider_details(provider_id)
     current_version = provider_details.versions[0]
     return get_version_tag(current_version, provider_id, suffix)
+
+
+def regenerate_pyproject_toml(
+    context: dict[str, Any], provider_details: ProviderPackageDetails, 
version_suffix: str | None
+):
+    get_pyproject_toml_path = provider_details.root_provider_path / 
"pyproject.toml"
+    # we want to preserve comments in dependencies - both required and 
additional,
+    # so we should not really parse the toml file but extract dependencies "as 
is" in text form and pass
+    # them to context. While this is not "generic toml" perfect, for provider 
pyproject.toml files it is
+    # good enough, because we fully control the pyproject.toml content for 
providers as they are generated
+    # from our templates (Except the dependencies section that is manually 
updated)
+    pyproject_toml_content = get_pyproject_toml_path.read_text()
+    required_dependencies: list[str] = []
+    optional_dependencies: list[str] = []
+    dependency_groups: list[str] = []
+    in_required_dependencies = False
+    in_optional_dependencies = False
+    in_dependency_groups = False
+    for line in pyproject_toml_content.splitlines():
+        if line == "dependencies = [":
+            in_required_dependencies = True
+            continue
+        if in_required_dependencies and line == "]":
+            in_required_dependencies = False
+            continue
+        if line == "[dependency-groups]":
+            in_dependency_groups = True
+            continue
+        if in_dependency_groups and line == "":
+            in_dependency_groups = False
+            continue
+        if in_dependency_groups and line.startswith("["):
+            in_dependency_groups = False
+        if line == "[project.optional-dependencies]":
+            in_optional_dependencies = True
+            continue
+        if in_optional_dependencies and line == "":
+            in_optional_dependencies = False
+            continue
+        if in_optional_dependencies and line.startswith("["):
+            in_optional_dependencies = False
+        if in_required_dependencies:
+            required_dependencies.append(line)
+        if in_optional_dependencies:
+            optional_dependencies.append(line)
+        if in_dependency_groups:
+            dependency_groups.append(line)
+    matcher = re.compile(r"(^.*\")(apache-airflow.*>=[\d.]*)(\".*)$")
+    # For additional providers we want to load the dependencies and see if 
cross-provider-dependencies are
+    # present and if not, add them to the optional dependencies
+    if version_suffix:
+        new_dependencies = []
+        for dependency in required_dependencies:
+            match = matcher.match(dependency)
+            if match:
+                specifier_with_version_suffix = 
apply_version_suffix(match.group(2), version_suffix)
+                new_dependencies.append(match.group(1) + 
specifier_with_version_suffix + match.group(3))
+            else:
+                new_dependencies.append(dependency)
+        required_dependencies = new_dependencies
+    context["INSTALL_REQUIREMENTS"] = "\n".join(required_dependencies)
+
+    # Add cross-provider dependencies to the optional dependencies if they are 
missing
+    for module in 
PROVIDER_DEPENDENCIES.get(provider_details.provider_id)["cross-providers-deps"]:
+        if f'"{module}" = [' not in optional_dependencies and 
get_pip_package_name(module) not in "\n".join(
+            required_dependencies
+        ):
+            optional_dependencies.append(f'"{module}" = [')
+            optional_dependencies.append(f'    
"{get_pip_package_name(module)}"')
+            optional_dependencies.append("]")
+    context["EXTRAS_REQUIREMENTS"] = "\n".join(optional_dependencies)
+    context["DEPENDENCY_GROUPS"] = "\n".join(dependency_groups)
+
+    get_pyproject_toml_content = render_template(
+        template_name="pyproject",
+        context=context,
+        extension=".toml",
+        autoescape=False,
+        lstrip_blocks=True,
+        trim_blocks=True,
+        keep_trailing_newline=True,
+    )
+
+    get_pyproject_toml_path.write_text(get_pyproject_toml_content)
+    get_console().print(
+        f"[info]Generated {get_pyproject_toml_path} for the 
{provider_details.provider_id} provider\n"
+    )
diff --git a/dev/breeze/src/airflow_breeze/utils/path_utils.py 
b/dev/breeze/src/airflow_breeze/utils/path_utils.py
index 7861b102109..676194280b6 100644
--- a/dev/breeze/src/airflow_breeze/utils/path_utils.py
+++ b/dev/breeze/src/airflow_breeze/utils/path_utils.py
@@ -288,10 +288,13 @@ AIRFLOW_WWW_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "www"
 AIRFLOW_UI_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "ui"
 AIRFLOW_ORIGINAL_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "airflow" / "providers"
 AIRFLOW_PROVIDERS_DIR = AIRFLOW_SOURCES_ROOT / "providers"
-OLD_AIRFLOW_PROVIDERS_SRC_DIR = AIRFLOW_PROVIDERS_DIR / "src"
-OLD_AIRFLOW_PROVIDERS_NS_PACKAGE = OLD_AIRFLOW_PROVIDERS_SRC_DIR / "airflow" / 
"providers"
-OLD_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_DIR / "tests"
-OLD_SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_DIR / "tests" / "system"
+
+PREVIOUS_AIRFLOW_PROVIDERS_SRC_DIR = AIRFLOW_PROVIDERS_DIR / "src"
+PREVIOUS_AIRFLOW_PROVIDERS_NS_PACKAGE = PREVIOUS_AIRFLOW_PROVIDERS_SRC_DIR / 
"airflow" / "providers"
+PREVIOUS_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_DIR / "tests"
+PREVIOUS_SYSTEM_TESTS_PROVIDERS_ROOT = AIRFLOW_PROVIDERS_DIR / "tests" / 
"system"
+
+AIRFLOW_TEST_COMMON_DIR = AIRFLOW_SOURCES_ROOT / "tests_common"
 DOCS_ROOT = AIRFLOW_SOURCES_ROOT / "docs"
 BUILD_CACHE_DIR = AIRFLOW_SOURCES_ROOT / ".build"
 GENERATED_DIR = AIRFLOW_SOURCES_ROOT / "generated"
diff --git a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py 
b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py
index c2c16880892..2fe5cd3525d 100644
--- a/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py
+++ b/dev/breeze/src/airflow_breeze/utils/publish_docs_helpers.py
@@ -22,27 +22,17 @@ from pathlib import Path
 
 from airflow_breeze.utils.path_utils import (
     AIRFLOW_SOURCES_ROOT,
-    OLD_AIRFLOW_PROVIDERS_NS_PACKAGE,
-    OLD_SYSTEM_TESTS_PROVIDERS_ROOT,
 )
 
 CONSOLE_WIDTH = 180
 
-# TODO(potiuk): remove it when we move all providers to the new structure
-OLD_PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_SOURCES_ROOT / "airflow" / 
"provider.yaml.schema.json"
-# TODO(potiuk) - rename when all providers are new-style
-NEW_PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_SOURCES_ROOT / "airflow" / 
"new_provider.yaml.schema.json"
+PROVIDER_DATA_SCHEMA_PATH = AIRFLOW_SOURCES_ROOT / "airflow" / 
"provider.yaml.schema.json"
 
 
 def _filepath_to_module(filepath: str):
-    # TODO: handle relative to providers project
     return str(Path(filepath).relative_to(AIRFLOW_SOURCES_ROOT)).replace("/", 
".")
 
 
-def _filepath_to_system_tests(filepath: str):
-    return str(OLD_SYSTEM_TESTS_PROVIDERS_ROOT / 
Path(filepath).relative_to(OLD_AIRFLOW_PROVIDERS_NS_PACKAGE))
-
-
 def pretty_format_path(path: str, start: str) -> str:
     """Formats path nicely."""
     relpath = os.path.relpath(path, start)
diff --git a/dev/breeze/src/airflow_breeze/utils/run_tests.py 
b/dev/breeze/src/airflow_breeze/utils/run_tests.py
index 0d39ba58c49..4073ba33cef 100644
--- a/dev/breeze/src/airflow_breeze/utils/run_tests.py
+++ b/dev/breeze/src/airflow_breeze/utils/run_tests.py
@@ -175,8 +175,7 @@ TEST_TYPE_CORE_MAP_TO_PYTEST_ARGS: dict[str, list[str]] = {
     "OpenAPI": ["clients/python"],
 }
 
-# TODO(potiuk) - rename when all providers are new-style
-ALL_NEW_PROVIDER_TEST_FOLDERS: list[str] = sorted(
+ALL_PROVIDER_TEST_FOLDERS: list[str] = sorted(
     [
         path.relative_to(AIRFLOW_SOURCES_ROOT).as_posix()
         for path in AIRFLOW_SOURCES_ROOT.glob("providers/*/tests/")
@@ -200,7 +199,7 @@ ALL_PROVIDER_INTEGRATION_TEST_FOLDERS: list[str] = sorted(
 
 TEST_GROUP_TO_TEST_FOLDERS: dict[GroupOfTests, list[str]] = {
     GroupOfTests.CORE: ["tests"],
-    GroupOfTests.PROVIDERS: ALL_NEW_PROVIDER_TEST_FOLDERS,
+    GroupOfTests.PROVIDERS: ALL_PROVIDER_TEST_FOLDERS,
     GroupOfTests.TASK_SDK: ["task_sdk/tests"],
     GroupOfTests.HELM: ["helm_tests"],
     GroupOfTests.INTEGRATION_CORE: ["tests/integration"],
diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py 
b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
index 2420a3fb43e..1cf969539f5 100644
--- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py
+++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py
@@ -66,10 +66,7 @@ from airflow_breeze.utils.packages import 
get_available_packages
 from airflow_breeze.utils.path_utils import (
     AIRFLOW_PROVIDERS_DIR,
     AIRFLOW_SOURCES_ROOT,
-    DOCS_DIR,
-    OLD_AIRFLOW_PROVIDERS_NS_PACKAGE,
-    OLD_SYSTEM_TESTS_PROVIDERS_ROOT,
-    OLD_TESTS_PROVIDERS_ROOT,
+    AIRFLOW_TEST_COMMON_DIR,
 )
 from airflow_breeze.utils.provider_dependencies import DEPENDENCIES, 
get_related_providers
 from airflow_breeze.utils.run_utils import run_command
@@ -190,6 +187,7 @@ CI_FILE_GROUP_MATCHES = HashableDict(
             r"^airflow/.*\.py$",
             r"^chart",
             r"^providers/.*/src/",
+            r"^providers/.*/docs/",
             r"^task_sdk/src/",
             r"^tests/system",
             r"^CHANGELOG\.txt",
@@ -323,48 +321,27 @@ TEST_TYPE_EXCLUDES = HashableDict({})
 
 def find_provider_affected(changed_file: str, include_docs: bool) -> str | 
None:
     file_path = AIRFLOW_SOURCES_ROOT / changed_file
-    # Check providers in SRC/SYSTEM_TESTS/TESTS/(optionally) DOCS
-    # TODO(potiuk) - this should be removed once we have all providers in the 
new structure (OLD + docs)
-    for provider_root in (
-        OLD_SYSTEM_TESTS_PROVIDERS_ROOT,
-        OLD_TESTS_PROVIDERS_ROOT,
-        OLD_AIRFLOW_PROVIDERS_NS_PACKAGE,
-        AIRFLOW_PROVIDERS_DIR,
-    ):
-        if file_path.is_relative_to(provider_root):
-            provider_base_path = provider_root
-            break
-    else:
-        if include_docs and file_path.is_relative_to(DOCS_DIR):
-            relative_path = file_path.relative_to(DOCS_DIR)
-            if relative_path.parts[0].startswith("apache-airflow-providers-"):
-                return 
relative_path.parts[0].replace("apache-airflow-providers-", "").replace("-", 
".")
-        # This is neither providers nor provider docs files - not a provider 
change
-        return None
-
     if not include_docs:
         for parent_dir_path in file_path.parents:
             if parent_dir_path.name == "docs" and (parent_dir_path.parent / 
"provider.yaml").exists():
                 # Skip Docs changes if include_docs is not set
                 return None
-
     # Find if the path under src/system tests/tests belongs to provider or is 
a common code across
     # multiple providers
     for parent_dir_path in file_path.parents:
-        if parent_dir_path == provider_base_path:
+        if parent_dir_path == AIRFLOW_PROVIDERS_DIR:
             # We have not found any provider specific path up to the root of 
the provider base folder
             break
-        relative_path = parent_dir_path.relative_to(provider_base_path)
-        # check if this path belongs to a specific provider
-        # TODO(potiuk) - this should be removed once we have all providers in 
the new structure
-        if (OLD_AIRFLOW_PROVIDERS_NS_PACKAGE / relative_path / 
"provider.yaml").exists():
-            return 
str(parent_dir_path.relative_to(provider_base_path)).replace(os.sep, ".")
-        if (parent_dir_path / "provider.yaml").exists():
-            # new providers structure
-            return str(relative_path).replace(os.sep, ".")
-
-    # If we got here it means that some "common" files were modified. so we 
need to test all Providers
-    return "Providers"
+        if parent_dir_path.is_relative_to(AIRFLOW_PROVIDERS_DIR):
+            relative_path = parent_dir_path.relative_to(AIRFLOW_PROVIDERS_DIR)
+            # check if this path belongs to a specific provider
+            if (parent_dir_path / "provider.yaml").exists():
+                # new providers structure
+                return str(relative_path).replace(os.sep, ".")
+    if file_path.is_relative_to(AIRFLOW_TEST_COMMON_DIR):
+        # if tests_common changes, we want to run tests for all providers, as 
they might start failing
+        return "Providers"
+    return None
 
 
 def _match_files_with_regexps(files: tuple[str, ...], matched_files, 
matching_regexps):
diff --git a/dev/breeze/tests/test_packages.py 
b/dev/breeze/tests/test_packages.py
index 74956e7e89d..ec6c894c734 100644
--- a/dev/breeze/tests/test_packages.py
+++ b/dev/breeze/tests/test_packages.py
@@ -30,12 +30,11 @@ from airflow_breeze.utils.packages import (
     get_available_packages,
     get_cross_provider_dependent_packages,
     get_dist_package_name_prefix,
-    get_install_requirements_for_old_providers,
     get_long_package_name,
     get_min_airflow_version,
-    get_old_documentation_package_path,
-    get_old_source_providers_package_path,
     get_pip_package_name,
+    get_previous_documentation_package_path,
+    get_previous_source_providers_package_path,
     get_provider_info_dict,
     get_provider_requirements,
     get_removed_provider_ids,
@@ -52,6 +51,19 @@ def test_get_available_packages():
     assert all(package not in REGULAR_DOC_PACKAGES for package in 
get_available_packages())
 
 
+def test_get_source_package_path():
+    assert get_previous_source_providers_package_path("apache.hdfs") == 
AIRFLOW_SOURCES_ROOT.joinpath(
+        "providers", "src", "airflow", "providers", "apache", "hdfs"
+    )
+
+
+def test_get_old_documentation_package_path():
+    assert (
+        get_previous_documentation_package_path("apache.hdfs")
+        == DOCS_ROOT / "apache-airflow-providers-apache-hdfs"
+    )
+
+
 def test_expand_all_provider_packages():
     assert len(expand_all_provider_packages(("all-providers",))) > 70
 
@@ -147,94 +159,6 @@ def test_find_matching_long_package_name_bad_filter():
         find_matching_long_package_names(short_packages=(), 
filters=("bad-filter-*",))
 
 
-def test_get_source_package_path():
-    assert get_old_source_providers_package_path("apache.hdfs") == 
AIRFLOW_SOURCES_ROOT.joinpath(
-        "providers", "src", "airflow", "providers", "apache", "hdfs"
-    )
-
-
-def test_get_old_documentation_package_path():
-    assert (
-        get_old_documentation_package_path("apache.hdfs")
-        == DOCS_ROOT / "apache-airflow-providers-apache-hdfs"
-    )
-
-
-# TODO(potiuk) - remove when all providers are new-style
[email protected](
-    "provider, version_suffix, expected",
-    [
-        pytest.param(
-            "fab",
-            "",
-            """
-    "apache-airflow-providers-common-compat>=1.2.1",
-    "apache-airflow>=3.0.0.dev0",
-    "flask-appbuilder==4.5.3",
-    "flask-login>=0.6.2",
-    "flask>=2.2,<2.3",
-    "google-re2>=1.0",
-    "jmespath>=0.7.0",
-    """,
-            id="No suffix fab",
-        ),
-        pytest.param(
-            "fab",
-            "dev0",
-            """
-    "apache-airflow-providers-common-compat>=1.2.1.dev0",
-    "apache-airflow>=3.0.0.dev0",
-    "flask-appbuilder==4.5.3",
-    "flask-login>=0.6.2",
-    "flask>=2.2,<2.3",
-    "google-re2>=1.0",
-    "jmespath>=0.7.0",
-    """,
-            id="dev0 suffix fab",
-        ),
-        pytest.param(
-            "fab",
-            "beta0",
-            """
-    "apache-airflow-providers-common-compat>=1.2.1b0",
-    "apache-airflow>=3.0.0b0",
-    "flask-appbuilder==4.5.3",
-    "flask-login>=0.6.2",
-    "flask>=2.2,<2.3",
-    "google-re2>=1.0",
-    "jmespath>=0.7.0",
-    """,
-            id="beta0 suffix fab",
-        ),
-        pytest.param(
-            "postgres",
-            "beta0",
-            """
-    "apache-airflow-providers-common-sql>=1.20.0b0",
-    "apache-airflow>=2.9.0b0",
-    "asyncpg>=0.30.0",
-    "psycopg2-binary>=2.9.9",
-    """,
-            id="beta0 suffix postgres",
-        ),
-        pytest.param(
-            "postgres",
-            "",
-            """
-    "apache-airflow-providers-common-sql>=1.20.0",
-    "apache-airflow>=2.9.0",
-    "asyncpg>=0.30.0",
-    "psycopg2-binary>=2.9.9",
-    """,
-            id="No suffix postgres",
-        ),
-    ],
-)
-def test_get_install_requirements(provider: str, version_suffix: str, 
expected: str):
-    actual = get_install_requirements_for_old_providers(provider, 
version_suffix)
-    assert actual.strip() == expected.strip()
-
-
 @pytest.mark.parametrize(
     "provider_id, pip_package_name",
     [
diff --git a/dev/breeze/tests/test_provider_documentation.py 
b/dev/breeze/tests/test_provider_documentation.py
index a2e84ecd108..20e8a9cd787 100644
--- a/dev/breeze/tests/test_provider_documentation.py
+++ b/dev/breeze/tests/test_provider_documentation.py
@@ -191,13 +191,13 @@ LONG_HASH_123144 SHORT_HASH 2023-01-01 Description `with` 
pr (#12346)
 
 Latest change: 2023-01-01
 
-============================================  ===========  
==================================
-Commit                                        Committed    Subject
-============================================  ===========  
==================================
-`SHORT_HASH <https://url/LONG_HASH_123144>`_  2023-01-01   ``Description 
'with' no pr``
-`SHORT_HASH <https://url/LONG_HASH_123144>`_  2023-01-01   ``Description 
'with' pr (#12345)``
-`SHORT_HASH <https://url/LONG_HASH_123144>`_  2023-01-01   ``Description 
'with' pr (#12346)``
-============================================  ===========  
==================================""",
+=============================================  ===========  
==================================
+Commit                                         Committed    Subject
+=============================================  ===========  
==================================
+`SHORT_HASH <https://url/LONG_HASH_123144>`__  2023-01-01   ``Description 
'with' no pr``
+`SHORT_HASH <https://url/LONG_HASH_123144>`__  2023-01-01   ``Description 
'with' pr (#12345)``
+`SHORT_HASH <https://url/LONG_HASH_123144>`__  2023-01-01   ``Description 
'with' pr (#12346)``
+=============================================  ===========  
==================================""",
             False,
             3,
         ),
diff --git a/dev/breeze/tests/test_pytest_args_for_test_types.py 
b/dev/breeze/tests/test_pytest_args_for_test_types.py
index 7f145722918..426977375c5 100644
--- a/dev/breeze/tests/test_pytest_args_for_test_types.py
+++ b/dev/breeze/tests/test_pytest_args_for_test_types.py
@@ -23,8 +23,7 @@ from airflow_breeze.utils.path_utils import 
AIRFLOW_SOURCES_ROOT
 from airflow_breeze.utils.run_tests import convert_parallel_types_to_folders, 
convert_test_type_to_pytest_args
 
 
-# TODO(potiuk): rename to all_providers when we move all providers to the new 
structure
-def _all_new_providers() -> list[str]:
+def _all_providers() -> list[str]:
     providers_root = AIRFLOW_SOURCES_ROOT / "providers"
     return sorted(
         file.parent.relative_to(providers_root).as_posix() for file in 
providers_root.rglob("provider.yaml")
@@ -98,7 +97,7 @@ def _find_all_integration_folders() -> list[str]:
             GroupOfTests.PROVIDERS,
             "Providers",
             [
-                *[f"providers/{provider}/tests" for provider in 
_all_new_providers()],
+                *[f"providers/{provider}/tests" for provider in 
_all_providers()],
             ],
         ),
         (
@@ -126,7 +125,7 @@ def _find_all_integration_folders() -> list[str]:
             [
                 *[
                     f"providers/{provider}/tests"
-                    for provider in _all_new_providers()
+                    for provider in _all_providers()
                     if provider not in ["amazon", "google", "microsoft/azure"]
                 ],
             ],
@@ -135,7 +134,7 @@ def _find_all_integration_folders() -> list[str]:
             GroupOfTests.PROVIDERS,
             "Providers[-edge]",
             [
-                *[f"providers/{provider}/tests" for provider in 
_all_new_providers() if provider != "edge"],
+                *[f"providers/{provider}/tests" for provider in 
_all_providers() if provider != "edge"],
             ],
         ),
         (
@@ -147,7 +146,7 @@ def _find_all_integration_folders() -> list[str]:
             GroupOfTests.PROVIDERS,
             "All-Quarantined",
             [
-                *[f"providers/{provider}/tests" for provider in 
_all_new_providers()],
+                *[f"providers/{provider}/tests" for provider in 
_all_providers()],
                 "-m",
                 "quarantined",
                 "--include-quarantined",
@@ -249,7 +248,7 @@ def test_pytest_args_for_missing_provider():
             GroupOfTests.PROVIDERS,
             "Providers",
             [
-                *[f"providers/{provider}/tests" for provider in 
_all_new_providers()],
+                *[f"providers/{provider}/tests" for provider in 
_all_providers()],
             ],
         ),
         (
@@ -273,7 +272,7 @@ def test_pytest_args_for_missing_provider():
             [
                 *[
                     f"providers/{provider}/tests"
-                    for provider in _all_new_providers()
+                    for provider in _all_providers()
                     if provider not in ["amazon", "google"]
                 ],
             ],
@@ -284,7 +283,7 @@ def test_pytest_args_for_missing_provider():
             [
                 *[
                     f"providers/{provider}/tests"
-                    for provider in _all_new_providers()
+                    for provider in _all_providers()
                     if provider not in ["amazon", "google"]
                 ],
                 *["providers/amazon/tests", "providers/google/tests"],
diff --git a/dev/breeze/tests/test_selective_checks.py 
b/dev/breeze/tests/test_selective_checks.py
index 36b50815bbe..87206ea6613 100644
--- a/dev/breeze/tests/test_selective_checks.py
+++ b/dev/breeze/tests/test_selective_checks.py
@@ -1938,7 +1938,7 @@ def test_upgrade_to_newer_dependencies(
     "files, expected_outputs,",
     [
         pytest.param(
-            ("docs/apache-airflow-providers-google/docs.rst",),
+            ("providers/google/docs/some_file.rst",),
             {
                 "docs-list-as-string": "amazon apache.beam apache.cassandra "
                 "cncf.kubernetes common.compat common.sql facebook google 
hashicorp "
@@ -1958,14 +1958,14 @@ def test_upgrade_to_newer_dependencies(
             id="Common SQL provider package python files changed",
         ),
         pytest.param(
-            ("docs/apache-airflow-providers-airbyte/docs.rst",),
+            ("providers/airbyte/docs/some_file.rst",),
             {
                 "docs-list-as-string": "airbyte",
             },
             id="Airbyte provider docs changed",
         ),
         pytest.param(
-            ("docs/apache-airflow-providers-airbyte/docs.rst", 
"docs/apache-airflow/docs.rst"),
+            ("providers/airbyte/docs/some_file.rst", 
"docs/apache-airflow/docs.rst"),
             {
                 "docs-list-as-string": "apache-airflow airbyte",
             },
@@ -1973,7 +1973,7 @@ def test_upgrade_to_newer_dependencies(
         ),
         pytest.param(
             (
-                "docs/apache-airflow-providers-airbyte/docs.rst",
+                "providers/airbyte/docs/some_file.rst",
                 "docs/apache-airflow/docs.rst",
                 "docs/apache-airflow-providers/docs.rst",
             ),
diff --git a/dev/breeze/uv.lock b/dev/breeze/uv.lock
index 9b98a8746a2..73a8d190cdb 100644
--- a/dev/breeze/uv.lock
+++ b/dev/breeze/uv.lock
@@ -47,6 +47,7 @@ dependencies = [
     { name = "pytest-xdist" },
     { name = "pyyaml" },
     { name = "requests" },
+    { name = "restructuredtext-lint" },
     { name = "rich" },
     { name = "rich-click" },
     { name = "semver" },
@@ -80,6 +81,7 @@ requires-dist = [
     { name = "pytest-xdist", specifier = ">=3.3.1" },
     { name = "pyyaml", specifier = ">=6.0.1" },
     { name = "requests", specifier = ">=2.31.0" },
+    { name = "restructuredtext-lint", specifier = ">=1.4.0" },
     { name = "rich", specifier = ">=13.6.0" },
     { name = "rich-click", specifier = ">=1.7.1" },
     { name = "semver", specifier = ">=3.0.2" },
@@ -1380,6 +1382,15 @@ wheels = [
     { url = 
"https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl";,
 hash = 
"sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size 
= 54481 },
 ]
 
+[[package]]
+name = "restructuredtext-lint"
+version = "1.4.0"
+source = { registry = "https://pypi.org/simple"; }
+dependencies = [
+    { name = "docutils" },
+]
+sdist = { url = 
"https://files.pythonhosted.org/packages/48/9c/6d8035cafa2d2d314f34e6cd9313a299de095b26e96f1c7312878f988eec/restructuredtext_lint-1.4.0.tar.gz";,
 hash = 
"sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45", size 
= 16723 }
+
 [[package]]
 name = "rfc3986"
 version = "2.0.0"
diff --git a/docs/.gitignore b/docs/.gitignore
index 009cbfc28a9..d00593d0ade 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -1,97 +1 @@
-# TODO(potiuk): change it to apache-airflow-providers-* after all providers 
are migrated to the new structure
-# Eventually when we swtich to individually build docs for each provider, we 
should remove this altogether
-apache-airflow-providers-airbyte
-apache-airflow-providers-alibaba
-apache-airflow-providers-amazon
-apache-airflow-providers-apache-beam
-apache-airflow-providers-apache-cassandra
-apache-airflow-providers-apache-drill
-apache-airflow-providers-apache-druid
-apache-airflow-providers-apache-flink
-apache-airflow-providers-apache-hdfs
-apache-airflow-providers-apache-hive
-apache-airflow-providers-apache-iceberg
-apache-airflow-providers-apache-impala
-apache-airflow-providers-apache-kafka
-apache-airflow-providers-apache-kylin
-apache-airflow-providers-apache-livy
-apache-airflow-providers-apache-pig
-apache-airflow-providers-apache-pinot
-apache-airflow-providers-apache-spark
-apache-airflow-providers-apprise
-apache-airflow-providers-arangodb
-apache-airflow-providers-asana
-apache-airflow-providers-atlassian-jira
-apache-airflow-providers-celery
-apache-airflow-providers-cloudant
-apache-airflow-providers-cncf-kubernetes
-apache-airflow-providers-cohere
-apache-airflow-providers-common-compat
-apache-airflow-providers-common-io
-apache-airflow-providers-common-sql
-apache-airflow-providers-databricks
-apache-airflow-providers-datadog
-apache-airflow-providers-dbt-cloud
-apache-airflow-providers-dingding
-apache-airflow-providers-discord
-apache-airflow-providers-docker
-apache-airflow-providers-edge
-apache-airflow-providers-elasticsearch
-apache-airflow-providers-exasol
-apache-airflow-providers-fab
-apache-airflow-providers-facebook
-apache-airflow-providers-ftp
-apache-airflow-providers-github
-apache-airflow-providers-google
-apache-airflow-providers-grpc
-apache-airflow-providers-http
-apache-airflow-providers-microsoft-azure
-apache-airflow-providers-jdbc
-apache-airflow-providers-influxdb
-apache-airflow-providers-microsoft-mssql
-apache-airflow-providers-microsoft-psrp
-apache-airflow-providers-microsoft-winrm
-apache-airflow-providers-mongo
-apache-airflow-providers-openlineage
-apache-airflow-providers-hashicorp
-apache-airflow-providers-imap
-apache-airflow-providers-neo4j
-apache-airflow-providers-openai
-apache-airflow-providers-openfaas
-apache-airflow-providers-opensearch
-apache-airflow-providers-opsgenie
-apache-airflow-providers-oracle
-apache-airflow-providers-papermill
-apache-airflow-providers-pgvector
-apache-airflow-providers-mysql
-apache-airflow-providers-odbc
-apache-airflow-providers-jenkins
-apache-airflow-providers-pagerduty
-apache-airflow-providers-openai
-apache-airflow-providers-pgvector
-apache-airflow-providers-pinecone
-apache-airflow-providers-postgres
-apache-airflow-providers-presto
-apache-airflow-providers-qdrant
-apache-airflow-providers-redis
-apache-airflow-providers-salesforce
-apache-airflow-providers-samba
-apache-airflow-providers-segment
-apache-airflow-providers-sendgrid
-apache-airflow-providers-sftp
-apache-airflow-providers-singularity
-apache-airflow-providers-slack
-apache-airflow-providers-smtp
-apache-airflow-providers-sqlite
-apache-airflow-providers-ssh
-apache-airflow-providers-snowflake
-apache-airflow-providers-standard
-apache-airflow-providers-teradata
-apache-airflow-providers-tableau
-apache-airflow-providers-telegram
-apache-airflow-providers-trino
-apache-airflow-providers-vertica
-apache-airflow-providers-weaviate
-apache-airflow-providers-yandex
-apache-airflow-providers-ydb
-apache-airflow-providers-zendesk
+apache-airflow-providers-*
diff --git a/docs/conf.py b/docs/conf.py
index cb3abd612ac..50aa390cbd6 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -84,18 +84,12 @@ elif PACKAGE_NAME.startswith("apache-airflow-providers-"):
     # with "src" in the output paths of modules which we don't want
 
     package_id = PACKAGE_NAME[len("apache-airflow-providers-") :].replace("-", 
".")
-    # TODO(potiuk) - remove the if when all providers are new-style
-    if CURRENT_PROVIDER["is_new_provider"]:
-        base_provider_dir = (ROOT_DIR / 
"providers").joinpath(*package_id.split("."))
-        PACKAGE_DIR = base_provider_dir / "src" / "airflow"
-        PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0]
-        SYSTEM_TESTS_DIR = base_provider_dir / "tests" / "system"
-        target_dir = ROOT_DIR / "docs" / PACKAGE_NAME
-        conf_py_path = f"/providers/{package_id.replace('.', '/')}/docs/"
-    else:
-        PACKAGE_DIR = ROOT_DIR / "providers" / "src" / "airflow"
-        PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0]
-        SYSTEM_TESTS_DIR = ROOT_DIR / "providers" / "tests" / "system"
+    base_provider_dir = (ROOT_DIR / 
"providers").joinpath(*package_id.split("."))
+    PACKAGE_DIR = base_provider_dir / "src" / "airflow"
+    PACKAGE_VERSION = CURRENT_PROVIDER["versions"][0]
+    SYSTEM_TESTS_DIR = base_provider_dir / "tests" / "system"
+    target_dir = ROOT_DIR / "docs" / PACKAGE_NAME
+    conf_py_path = f"/providers/{package_id.replace('.', '/')}/docs/"
 elif PACKAGE_NAME == "apache-airflow-providers":
     from provider_yaml_utils import load_package_data
 
diff --git a/docs/exts/docs_build/docs_builder.py 
b/docs/exts/docs_build/docs_builder.py
index 3ec517dc5f2..8d569f300be 100644
--- a/docs/exts/docs_build/docs_builder.py
+++ b/docs/exts/docs_build/docs_builder.py
@@ -45,12 +45,11 @@ class AirflowDocsBuilder:
 
     def __init__(self, package_name: str):
         self.package_name = package_name
-        self.new_structure_provider = False
+        self.is_provider = False
         if self.package_name.startswith("apache-airflow-providers-"):
             self.package_id = 
self.package_name.split("apache-airflow-providers-", 1)[1].replace("-", ".")
             self.provider_path = (Path(ROOT_PROJECT_DIR) / 
"providers").joinpath(*self.package_id.split("."))
-            if self.provider_path.exists():
-                self.new_structure_provider = True
+            self.is_provider = True
 
     @property
     def _doctree_dir(self) -> str:
@@ -123,9 +122,8 @@ class AirflowDocsBuilder:
         shutil.rmtree(self.log_spelling_output_dir, ignore_errors=True)
         os.makedirs(self.log_spelling_output_dir, exist_ok=True)
 
-        # TODO(potiuk) - remove if when all providers are new-style
-        if self.new_structure_provider:
-            self.cleanup_new_provider_dir()
+        if self.is_provider:
+            self.cleanup_provider_dir()
         build_cmd = [
             "sphinx-build",
             "-W",  # turn warnings into errors
@@ -196,28 +194,17 @@ class AirflowDocsBuilder:
                 console.print(
                     f"[bright_blue]{self.package_name:60}:[/] [green]Finished 
spell-checking successfully[/]"
                 )
-        if self.new_structure_provider:
+        if self.is_provider:
             if skip_deletion:
                 console.print(
-                    f"[bright_blue]{self.package_name:60}:[/] 
[magenta](NEW)[/] Leaving generated files in {self._src_dir}."
+                    f"[bright_blue]{self.package_name:60}:[/] Leaving 
generated files in {self._src_dir}."
                 )
             else:
-                console.print(
-                    f"[bright_blue]{self.package_name:60}:[/] 
[magenta](NEW)[/] Cleaning up generated files in {self._src_dir}."
-                )
                 shutil.rmtree(self._src_dir, ignore_errors=True)
         return spelling_errors, build_errors
 
-    # TODO(potiuk) - rename when all providers are new-style
-    def cleanup_new_provider_dir(self):
-        console.print(
-            f"[bright_blue]{self.package_name:60}:[/] [magenta](NEW)[/] 
Cleaning up old files in {self._src_dir}."
-        )
+    def cleanup_provider_dir(self):
         shutil.rmtree(self._src_dir, ignore_errors=True)
-        console.print(
-            f"[bright_blue]{self.package_name:60}:[/] [magenta](NEW)[/] 
Copying docs "
-            f"from {self.provider_path}/docs to {self._src_dir}."
-        )
         shutil.copytree(
             f"{self.provider_path}/docs",
             self._src_dir,
@@ -234,9 +221,8 @@ class AirflowDocsBuilder:
         build_errors = []
         os.makedirs(self._build_dir, exist_ok=True)
 
-        # TODO(potiuk) - remove if when all providers are new-style
-        if self.new_structure_provider:
-            self.cleanup_new_provider_dir()
+        if self.is_provider:
+            self.cleanup_provider_dir()
         build_cmd = [
             "sphinx-build",
             "-T",  # show full traceback on exception
@@ -295,15 +281,12 @@ class AirflowDocsBuilder:
             console.print(
                 f"[bright_blue]{self.package_name:60}:[/] [green]Finished docs 
building successfully[/]"
             )
-        if self.new_structure_provider:
+        if self.is_provider:
             if skip_deletion:
                 console.print(
-                    f"[bright_blue]{self.package_name:60}:[/] 
[magenta](NEW)[/] Leaving generated files in {self._src_dir}."
+                    f"[bright_blue]{self.package_name:60}:[/] Leaving 
generated files in {self._src_dir}."
                 )
             else:
-                console.print(
-                    f"[bright_blue]{self.package_name:60}:[/] 
[magenta](NEW)[/] Cleaning up generated files in {self._src_dir}."
-                )
                 shutil.rmtree(self._src_dir, ignore_errors=True)
         return build_errors
 
diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py
index 7ef60d4ddb0..4f8988088f1 100644
--- a/docs/exts/provider_yaml_utils.py
+++ b/docs/exts/provider_yaml_utils.py
@@ -28,24 +28,12 @@ import yaml
 ROOT_DIR = Path(__file__).parents[2].resolve()
 AIRFLOW_PROVIDERS_DIR = ROOT_DIR / "providers"
 AIRFLOW_PROVIDERS_SRC = AIRFLOW_PROVIDERS_DIR / "src"
-AIRFLOW_PROVIDERS_NS_PACKAGE = AIRFLOW_PROVIDERS_SRC / "airflow" / "providers"
-# TODO(potiuk) remove this when we move all providers from the old structure
-OLD_PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / 
"provider.yaml.schema.json"
-# TODO(potiuk) - rename when all providers are new-style
-NEW_PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / 
"new_provider.yaml.schema.json"
+PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR / "airflow" / "provider.yaml.schema.json"
 
 
-# TODO(potiuk) - remove when all providers are new-style
 @cache
-def old_provider_yaml_schema() -> dict[str, Any]:
-    with open(OLD_PROVIDER_DATA_SCHEMA_PATH) as schema_file:
-        return json.load(schema_file)
-
-
-# TODO(potiuk) - rename when all providers are new-style
-@cache
-def new_provider_yaml_schema() -> dict[str, Any]:
-    with open(NEW_PROVIDER_DATA_SCHEMA_PATH) as schema_file:
+def provider_yaml_schema() -> dict[str, Any]:
+    with open(PROVIDER_DATA_SCHEMA_PATH) as schema_file:
         return json.load(schema_file)
 
 
@@ -53,18 +41,7 @@ def 
_provider_yaml_directory_to_module(provider_yaml_directory_path: str) -> str
     return 
str(Path(provider_yaml_directory_path).relative_to(AIRFLOW_PROVIDERS_SRC)).replace("/",
 ".")
 
 
-def _filepath_to_system_tests_str(provider_yaml_directory_path: str) -> str:
-    return (
-        ROOT_DIR
-        / "providers"
-        / "tests"
-        / "system"
-        / 
Path(provider_yaml_directory_path).relative_to(AIRFLOW_PROVIDERS_NS_PACKAGE)
-    ).as_posix()
-
-
-# TODO(potiuk) - rename when all providers are new-style
-def _get_new_provider_root_path(provider_yaml_directory_path: Path) -> Path:
+def _get_provider_root_path(provider_yaml_directory_path: Path) -> Path:
     for parent in Path(provider_yaml_directory_path).parents:
         if (parent / "src").exists():
             return parent
@@ -74,37 +51,15 @@ def 
_get_new_provider_root_path(provider_yaml_directory_path: Path) -> Path:
     )
 
 
-# TODO(potiuk) - rename when all providers are new-style
-def _new_filepath_to_system_tests(provider_yaml_directory_path: Path) -> Path:
-    test_root_path = _get_new_provider_root_path(provider_yaml_directory_path) 
/ "tests"
+def _filepath_to_system_tests(provider_yaml_directory_path: Path) -> Path:
+    test_root_path = _get_provider_root_path(provider_yaml_directory_path) / 
"tests"
     return (test_root_path / "system").relative_to(AIRFLOW_PROVIDERS_DIR)
 
 
 @cache
 def get_all_provider_yaml_paths() -> list[Path]:
     """Returns list of all provider.yaml files including new and old 
structure."""
-    return sorted(
-        
list(AIRFLOW_PROVIDERS_DIR.glob("**/src/airflow/providers/**/provider.yaml"))
-        +
-        # TODO(potiuk) remove this when we move all providers from the old 
structure
-        list(AIRFLOW_PROVIDERS_NS_PACKAGE.glob("**/provider.yaml"))
-    )
-
-
-# TODO(potiuk) remove this when we move all providers from the old structure
-@cache
-def get_old_provider_yaml_paths() -> list[Path]:
-    """Returns list of provider.yaml files for old structure"""
-    return sorted(AIRFLOW_PROVIDERS_NS_PACKAGE.rglob("**/provider.yaml"))
-
-
-# TODO(potiuk) - rename when all providers are new-style
-@cache
-def get_new_provider_yaml_paths() -> list[Path]:
-    """Returns list of provider.yaml files for new structure"""
-    return sorted(
-        list(set(AIRFLOW_PROVIDERS_DIR.rglob("**/provider.yaml")) - 
set(get_old_provider_yaml_paths()))
-    )
+    return sorted(list(AIRFLOW_PROVIDERS_DIR.glob("**/provider.yaml")))
 
 
 @cache
@@ -114,47 +69,24 @@ def load_package_data(include_suspended: bool = False) -> 
list[dict[str, Any]]:
 
     :return: A list containing the contents of all provider.yaml files - old 
and new structure.
     """
-    # TODO(potiuk) - remove when all providers are new-style
-    schema = old_provider_yaml_schema()
-    new_schema = new_provider_yaml_schema()
+    schema = provider_yaml_schema()
     result = []
-    # TODO(potiuk) - rename when all providers are new-style
-    for new_provider_yaml_path in get_new_provider_yaml_paths():
-        with open(new_provider_yaml_path) as yaml_file:
+    for provider_yaml_path in get_all_provider_yaml_paths():
+        with open(provider_yaml_path) as yaml_file:
             provider = yaml.safe_load(yaml_file)
         try:
-            jsonschema.validate(provider, schema=new_schema)
+            jsonschema.validate(provider, schema=schema)
         except jsonschema.ValidationError as ex:
-            msg = f"Unable to parse: {new_provider_yaml_path}. Original error 
{type(ex).__name__}: {ex}"
+            msg = f"Unable to parse: {provider_yaml_path}. Original error 
{type(ex).__name__}: {ex}"
             raise RuntimeError(msg)
         if provider["state"] == "suspended" and not include_suspended:
             continue
-        provider_yaml_dir_str = os.path.dirname(new_provider_yaml_path)
+        provider_yaml_dir_str = os.path.dirname(provider_yaml_path)
         module = provider["package-name"][len("apache-") :].replace("-", ".")
         module_folder = module[len("airflow-providers-") :].replace(".", "/")
         provider["python-module"] = module
         provider["package-dir"] = 
f"{provider_yaml_dir_str}/src/{module.replace('.', '/')}"
-        provider["docs-dir"] = os.path.dirname(new_provider_yaml_path.parent / 
"docs")
+        provider["docs-dir"] = os.path.dirname(provider_yaml_path.parent / 
"docs")
         provider["system-tests-dir"] = 
f"{provider_yaml_dir_str}/tests/system/{module_folder}"
-        # TODO(potiuk) - remove when all providers are new-style
-        provider["is_new_provider"] = True
-        result.append(provider)
-    # TODO(potiuk): Remove me when all providers are moved ALL_PROVIDERS
-    for provider_yaml_path in get_old_provider_yaml_paths():
-        with open(provider_yaml_path) as yaml_file:
-            provider = yaml.safe_load(yaml_file)
-        try:
-            jsonschema.validate(provider, schema=schema)
-        except jsonschema.ValidationError as ex:
-            msg = f"Unable to parse: {provider_yaml_path}. Original error 
{type(ex).__name__}: {ex}"
-            raise RuntimeError(msg)
-        if provider["state"] == "suspended" and not include_suspended:
-            continue
-        old_provider_yaml_dir_str = os.path.dirname(provider_yaml_path)
-        provider["python-module"] = 
_provider_yaml_directory_to_module(old_provider_yaml_dir_str)
-        provider["package-dir"] = old_provider_yaml_dir_str
-        provider["system-tests-dir"] = 
_filepath_to_system_tests_str(old_provider_yaml_dir_str)
-        provider["is_new_provider"] = False
         result.append(provider)
-
     return result
diff --git a/providers/MANAGING_PROVIDERS_LIFECYCLE.rst 
b/providers/MANAGING_PROVIDERS_LIFECYCLE.rst
index 749f8e67caf..9a4157d068b 100644
--- a/providers/MANAGING_PROVIDERS_LIFECYCLE.rst
+++ b/providers/MANAGING_PROVIDERS_LIFECYCLE.rst
@@ -42,9 +42,7 @@ Using the code above you will set up Docker containers. These 
containers your lo
 In this way, the changes made in your IDE are already applied to the code 
inside the container and tests can
 be carried out quickly.
 
-# TODO(potiuk) - rename when all providers are new-style
-
-In this how-to guide our example provider name will be ``<NEW_PROVIDER>``.
+In this how-to guide our example provider name will be ``<PROVIDER>``.
 When you see this placeholder you must change for your provider name.
 
 
@@ -64,55 +62,59 @@ triggers (and the list changes continuously).
 
     GIT apache/airflow/
     └── providers/
-        ├── src/
-        │   └── airflow/
-        │       └── providers/<NEW_PROVIDER>/
-        │           ├── __init__.py
-        │           ├── executors/
-        │           │   ├── __init__.py
-        │           │   └── *.py
-        │           ├── hooks/
-        │           │   ├── __init__.py
-        │           │   └── *.py
-        │           ├── notifications/
-        │           │   ├── __init__.py
-        │           │   └── *.py
-        │           ├── operators/
-        │           │   ├── __init__.py
-        │           │   └── *.py
-        │           ├── transfers/
-        │           │   ├── __init__.py
-        │           │   └── *.py
-        │           └── triggers/
-        │               ├── __init__.py
-        │               └── *.py
-        └── tests/
-            ├── <NEW_PROVIDER>/
-            |   ├── __init__.py
-            |   ├── executors/
-            |   │   ├── __init__.py
-            |   │   └── test_*.py
-            |   ├── hooks/
-            |   │   ├── __init__.py
-            |   │   └── test_*.py
-            |   ├── notifications/
-            |   │   ├── __init__.py
-            |   │   └── test_*.py
-            |   ├── operators/
-            |   │   ├── __init__.py
-            |   │   └── test_*.py
-            |   ├── transfers/
-            |   │   ├── __init__.py
-            |   │   └── test_*.py
-            |   └── triggers/
-            |       ├── __init__.py
-            |       └── test_*.py
-            └── system/<NEW_PROVIDER>/
-                ├── __init__.py
-                └── example_*.py
-
-.. note::
-      The above structure is work in progress and subject to change till Task 
SDK feature is complete.
+                 └── <PROVIDER>/
+                               ├── pyproject.toml
+                               ├── provider.yaml
+                               ├── src/
+                               │   └── airflow/
+                               │       └── providers/<PROVIDER>/
+                               │                               ├── __init__.py
+                               │                               ├── executors/
+                               │                               │   ├── 
__init__.py
+                               │                               │   └── *.py
+                               │                               ├── hooks/
+                               │                               │   ├── 
__init__.py
+                               │                               │   └── *.py
+                               │                               ├── 
notifications/
+                               │                               │   ├── 
__init__.py
+                               │                               │   └── *.py
+                               │                               ├── operators/
+                               │                               │   ├── 
__init__.py
+                               │                               │   └── *.py
+                               │                               ├── transfers/
+                               │                               │   ├── 
__init__.py
+                               │                               │   └── *.py
+                               │                               └── triggers/
+                               │                                   ├── 
__init__.py
+                               │                                   └── *.py
+                               └── tests/
+                                        ├── unit\
+                                        │       └── <PROVIDER>/
+                                        │                     ├── __init__.py
+                                        │                     ├── executors/
+                                        │                     │   ├── 
__init__.py
+                                        │                     │   └── test_*.py
+                                        │                     ├── hooks/
+                                        │                     │   ├── 
__init__.py
+                                        │                     │   └── test_*.py
+                                        │                     ├── 
notifications/
+                                        │                     │   ├── 
__init__.py
+                                        │                     │   └── test_*.py
+                                        │                     ├── operators/
+                                        │                     │   ├── 
__init__.py
+                                        │                     │   └── test_*.py
+                                        │                     ├── transfers/
+                                        │                     │   ├── 
__init__.py
+                                        │                     │   └── test_*.py
+                                        │                     └── triggers/
+                                        │                         ├── 
__init__.py
+                                        │                         └── test_*.py
+                                        ├── integration/<PROVIDER>/
+                                        │                         ├── 
__init__.py
+                                        │                         └── 
test_integration_*.py
+                                        └── system/<PROVIDER>/
+                                                             ├── __init__.py
+                                                             └── example_*.py
 
 Considering that you have already transferred your provider's code to the 
above structure, it will now be necessary
 to create unit tests for each component you created. The example below I have 
already set up an environment using
@@ -120,7 +122,7 @@ breeze and I'll run unit tests for my Hook.
 
   .. code-block:: bash
 
-      root@fafd8d630e46:/opt/airflow# python -m pytest 
providers/<NEW_PROVIDER>/tests/<NEW_PROVIDER>/hook/test_*.py
+      root@fafd8d630e46:/opt/airflow# python -m pytest 
providers/<PROVIDER>/tests/<PROVIDER>/hook/test_*.py
 
 Adding chicken-egg providers
 ----------------------------
@@ -219,20 +221,19 @@ by ``breeze release-management`` command by release 
manager when providers are r
   .. code-block:: bash
 
      ├── pyproject.toml
-     └── providers/<NEW_PROVIDER>/src/airflow/providers/
-                                ├── provider.yaml
-                                ├── pyproject.toml
-                                ├── CHANGELOG.rst
-                                │
-                                └── docs/
-                                    ├── integration-logos
-                                    │                   └── <NEW_PROVIDER>.png
-                                    ├── index.rst
-                                    ├── commits.rst
-                                    ├── connections.rst
-                                    └── operators/
-                                        └── <NEW_PROVIDER>.rst
-
+     └── providers/<PROVIDER>/src/airflow/providers/
+                                                   ├── provider.yaml
+                                                   ├── pyproject.toml
+                                                   ├── CHANGELOG.rst
+                                                   │
+                                                   └── docs/
+                                                       ├── integration-logos
+                                                       │                   └── 
<PROVIDER>.png
+                                                       ├── index.rst
+                                                       ├── commits.rst
+                                                       ├── connections.rst
+                                                       └── operators/
+                                                           └── <PROVIDER>.rst
 
 There is a chance that your provider's name is not a common English word.
 In this case is necessary to add it to the file ``docs/spelling_wordlist.txt``.
@@ -240,11 +241,11 @@ In this case is necessary to add it to the file 
``docs/spelling_wordlist.txt``.
 Add your provider dependencies into ``provider.yaml`` under ``dependencies`` 
key..
 If your provider doesn't have any dependency add a empty list.
 
-In the ``docs/apache-airflow-providers-<NEW_PROVIDER>/connections.rst``:
+In the ``docs/apache-airflow-providers-<PROVIDER>/connections.rst``:
 
 - add information how to configure connection for your provider.
 
-In the provider's ``docs/operators/<NEW_PROVIDER>.rst`` add information
+In the provider's ``docs/operators/<PROVIDER>.rst`` add information
 how to use the Operator. It's important to add examples and additional 
information if your
 Operator has extra-parameters.
 
@@ -255,7 +256,7 @@ Operator has extra-parameters.
       NewProviderOperator
       ===================
 
-      Use the 
:class:`~airflow.providers.<NEW_PROVIDER>.operators.NewProviderOperator` to do 
something
+      Use the 
:class:`~airflow.providers.<PROVIDER>.operators.NewProviderOperator` to do 
something
       amazing with Airflow!
 
       Using the Operator
@@ -264,10 +265,10 @@ Operator has extra-parameters.
       The NewProviderOperator requires a ``connection_id`` and this other 
awesome parameter.
       You can see an example below:
 
-      .. exampleinclude:: 
/../../providers/<NEW_PROVIDER>/example_dags/example_<NEW_PROVIDER>.py
+      .. exampleinclude:: 
/../../providers/<PROVIDER>/example_dags/example_<PROVIDER>.py
           :language: python
-          :start-after: [START howto_operator_<NEW_PROVIDER>]
-          :end-before: [END howto_operator_<NEW_PROVIDER>]
+          :start-after: [START howto_operator_<PROVIDER>]
+          :end-before: [END howto_operator_<PROVIDER>]
 
 
 Copy from another, similar provider the docs: ``docs/*.rst``:
@@ -284,42 +285,42 @@ At least those docs should be present
 
 Make sure to update/add all information that are specific for the new provider.
 
-In the 
``providers/<NEW_PROVIDER>/src/airflow/providers/<NEW_PROVIDER>/provider.yaml`` 
add information of your provider:
+In the ``providers/<PROVIDER>/src/airflow/providers/<PROVIDER>/provider.yaml`` 
add information of your provider:
 
   .. code-block:: yaml
 
-      package-name: apache-airflow-providers-<NEW_PROVIDER>
-      name: <NEW_PROVIDER>
+      package-name: apache-airflow-providers-<PROVIDER>
+      name: <PROVIDER>
       description: |
-        `<NEW_PROVIDER> <https://example.io/>`__
+        `<PROVIDER> <https://example.io/>`__
       versions:
         - 1.0.0
 
       integrations:
-        - integration-name: <NEW_PROVIDER>
+        - integration-name: <PROVIDER>
           external-doc-url: https://www.example.io/
-          logo: /docs/integration-logos/<NEW_PROVIDER>.png
+          logo: /docs/integration-logos/<PROVIDER>.png
           how-to-guide:
-            - 
/docs/apache-airflow-providers-<NEW_PROVIDER>/operators/<NEW_PROVIDER>.rst
+            - 
/docs/apache-airflow-providers-<PROVIDER>/operators/<PROVIDER>.rst
           tags: [service]
 
       operators:
-        - integration-name: <NEW_PROVIDER>
+        - integration-name: <PROVIDER>
           python-modules:
-            - airflow.providers.<NEW_PROVIDER>.operators.<NEW_PROVIDER>
+            - airflow.providers.<PROVIDER>.operators.<PROVIDER>
 
       hooks:
-        - integration-name: <NEW_PROVIDER>
+        - integration-name: <PROVIDER>
           python-modules:
-            - airflow.providers.<NEW_PROVIDER>.hooks.<NEW_PROVIDER>
+            - airflow.providers.<PROVIDER>.hooks.<PROVIDER>
 
       sensors:
-        - integration-name: <NEW_PROVIDER>
+        - integration-name: <PROVIDER>
           python-modules:
-            - airflow.providers.<NEW_PROVIDER>.sensors.<NEW_PROVIDER>
+            - airflow.providers.<PROVIDER>.sensors.<PROVIDER>
 
       connection-types:
-        - hook-class-name: 
airflow.providers.<NEW_PROVIDER>.hooks.<NEW_PROVIDER>.NewProviderHook
+        - hook-class-name: 
airflow.providers.<PROVIDER>.hooks.<PROVIDER>.NewProviderHook
         - connection-type: provider-connection-type
 
 After changing and creating these files you can build the documentation 
locally. The two commands below will
diff --git a/providers/fab/docs/changelog.rst b/providers/fab/docs/changelog.rst
index 3fbadba25b0..801a4ba6a75 100644
--- a/providers/fab/docs/changelog.rst
+++ b/providers/fab/docs/changelog.rst
@@ -35,24 +35,48 @@ Breaking changes
   All deprecated classes, parameters and features have been removed from the 
Fab provider package.
   The following breaking changes were introduced:
 
-  * Removed ``is_authorized_dataset`` method from ``FabAuthManager``. Use 
``is_authorized_asset`` instead
-  * Removed ``oauth_whitelists`` property from the security manager override. 
Use ``oauth_allow_list`` instead
-  * Removed the authentication type ``AUTH_OID``
-  * Removed ``get_readable_dags`` method from the security manager override
-  * Removed ``get_editable_dags`` method from the security manager override
-  * Removed ``get_accessible_dags`` method from the security manager override
-  * Removed ``get_accessible_dag_ids`` method from the security manager 
override
-  * Removed ``prefixed_dag_id`` method from the security manager override
-  * Removed ``init_role`` method from the security manager override
+* Removed ``is_authorized_dataset`` method from ``FabAuthManager``. Use 
``is_authorized_asset`` instead
+* Removed ``oauth_whitelists`` property from the security manager override. 
Use ``oauth_allow_list`` instead
+* Removed the authentication type ``AUTH_OID``
+* Removed ``get_readable_dags`` method from the security manager override
+* Removed ``get_editable_dags`` method from the security manager override
+* Removed ``get_accessible_dags`` method from the security manager override
+* Removed ``get_accessible_dag_ids`` method from the security manager override
+* Removed ``prefixed_dag_id`` method from the security manager override
+* Removed ``init_role`` method from the security manager override
+
+* ``Prepare FAB provider to set next version as major version (#43939)``
+* ``Remove deprecations from fab provider (#44198)``
+
+Features
+~~~~~~~~
+
+* ``Set up JWT token authentication in Fast APIs (#42634)``
+* ``AIP-79 Support Airflow 2.x plugins in fast api. Embed a minimal version of 
the Flask application in fastapi application (#44464)``
+
+
+Misc
+~~~~
+
+* ``AIP-81 Move CLI Commands to directories according to Hybrid, Local and 
Remote (#44538)``
 
 .. Review and move the new changes to one of the sections above:
-   * ``AIP-79 Support Airflow 2.x plugins in fast api. Embed a minimal version 
of the Flask application in fastapi application (#44464)``
    * ``Prevent __init__.py in providers from being modified (#44713)``
-   * ``AIP-81 Move CLI Commands to directories according to Hybrid, Local and 
Remote (#44538)``
    * ``Use Python 3.9 as target version for Ruff & Black rules (#44298)``
-   * ``Remove deprecations from fab provider (#44198)``
-   * ``Set up JWT token authentication in Fast APIs (#42634)``
-   * ``Prepare FAB provider to set next version as major version (#43939)``
+
+1.5.3
+.....
+
+Bug Fixes
+~~~~~~~~~
+
+* ``[providers-fab/v1-5] Use different default algorithms for different 
werkzeug versions (#46384) (#46392)``
+
+Misc
+~~~~
+
+* ``[providers-fab/v1-5] Upgrade to FAB 4.5.3 (#45874) (#45918)``
+
 
 1.5.2
 .....
diff --git a/scripts/ci/pre_commit/check_provider_docs.py 
b/scripts/ci/pre_commit/check_provider_docs.py
index 04ef21e65b8..bc6e287c581 100755
--- a/scripts/ci/pre_commit/check_provider_docs.py
+++ b/scripts/ci/pre_commit/check_provider_docs.py
@@ -29,7 +29,7 @@ sys.path.insert(0, str(Path(__file__).parent.resolve()))  # 
make sure common uti
 from common_precommit_utils import (
     AIRFLOW_PROVIDERS_ROOT_PATH,
     AIRFLOW_SOURCES_ROOT_PATH,
-    get_all_new_provider_info_dicts,
+    get_all_provider_info_dicts,
 )
 
 sys.path.insert(0, str(AIRFLOW_SOURCES_ROOT_PATH))  # make sure setup is 
imported from Airflow
@@ -189,7 +189,7 @@ def has_executor_package_defined(provider_id: str) -> bool:
 
 def run_all_checks():
     jinja_loader = Environment(loader=BaseLoader(), autoescape=True)
-    all_providers = get_all_new_provider_info_dicts()
+    all_providers = get_all_provider_info_dicts()
     status: list[bool] = []
 
     for provider_id, provider_info in all_providers.items():
diff --git a/scripts/ci/pre_commit/check_pyproject_toml_consistency.py 
b/scripts/ci/pre_commit/check_pyproject_toml_consistency.py
index c1b6fb18837..18ecb61076b 100755
--- a/scripts/ci/pre_commit/check_pyproject_toml_consistency.py
+++ b/scripts/ci/pre_commit/check_pyproject_toml_consistency.py
@@ -26,7 +26,7 @@ import sys
 from pathlib import Path
 
 sys.path.insert(0, str(Path(__file__).parent.resolve()))  # make sure 
common_precommit_utils is imported
-from common_precommit_utils import console, get_all_new_provider_ids
+from common_precommit_utils import console, get_all_provider_ids
 
 AIRFLOW_ROOT_PATH = Path(__file__).parents[3].resolve()
 PYPROJECT_TOML_FILE = AIRFLOW_ROOT_PATH / "pyproject.toml"
@@ -40,9 +40,8 @@ if __name__ == "__main__":
 
     error = False
     toml_dict = tomllib.loads(PYPROJECT_TOML_FILE.read_text())
-    # TODO(potiuk): renamme when all providers are switched to new style
-    all_new_providers = get_all_new_provider_ids()
-    for provider_id in all_new_providers:
+    all_providers = get_all_provider_ids()
+    for provider_id in all_providers:
         expected_provider_package = 
f"apache-airflow-providers-{provider_id.replace('.', '-')}"
         expected_member = "providers/" + provider_id.replace(".", "/")
         dev_dependency_group = toml_dict["dependency-groups"]["dev"]
diff --git a/scripts/ci/pre_commit/common_precommit_utils.py 
b/scripts/ci/pre_commit/common_precommit_utils.py
index 946b7d81994..627807c4152 100644
--- a/scripts/ci/pre_commit/common_precommit_utils.py
+++ b/scripts/ci/pre_commit/common_precommit_utils.py
@@ -263,8 +263,7 @@ def get_provider_base_dir_from_path(file_path: Path) -> 
Path | None:
     return None
 
 
-# TODO(potiuk): rename this function when all providers are moved to new 
structure
-def get_all_new_provider_ids() -> list[str]:
+def get_all_provider_ids() -> list[str]:
     """
     Get all providers from the new provider structure
     """
@@ -278,8 +277,7 @@ def get_all_new_provider_ids() -> list[str]:
     return all_provider_ids
 
 
-# TODO(potiuk): rename this function when all providers are moved to new 
structure
-def get_all_new_provider_yaml_files() -> list[Path]:
+def get_all_provider_yaml_files() -> list[Path]:
     """
     Get all providers from the new provider structure
     """
@@ -291,13 +289,12 @@ def get_all_new_provider_yaml_files() -> list[Path]:
     return all_provider_yaml_files
 
 
-# TODO(potiuk): rename this function when all providers are moved to new 
structure
-def get_all_new_provider_info_dicts() -> dict[str, dict]:
+def get_all_provider_info_dicts() -> dict[str, dict]:
     """
     Get provider yaml info for all providers from the new provider structure
     """
     providers: dict[str, dict] = {}
-    for provider_file in get_all_new_provider_yaml_files():
+    for provider_file in get_all_provider_yaml_files():
         provider_id = 
str(provider_file.parent.relative_to(AIRFLOW_PROVIDERS_ROOT_PATH)).replace(os.sep,
 ".")
         import yaml
 
diff --git a/scripts/ci/pre_commit/generate_volumes_for_sources.py 
b/scripts/ci/pre_commit/generate_volumes_for_sources.py
index 451cac467a9..a8e9763efc7 100755
--- a/scripts/ci/pre_commit/generate_volumes_for_sources.py
+++ b/scripts/ci/pre_commit/generate_volumes_for_sources.py
@@ -21,7 +21,7 @@ import sys
 from pathlib import Path
 
 sys.path.insert(0, str(Path(__file__).parent.resolve()))  # make sure 
common_precommit_utils is imported
-from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH, 
get_all_new_provider_ids, insert_documentation
+from common_precommit_utils import AIRFLOW_SOURCES_ROOT_PATH, 
get_all_provider_ids, insert_documentation
 
 START_MARKER = "      # START automatically generated volumes by 
generate-volumes-for-sources pre-commit"
 END_MARKER = "      # END automatically generated volumes by 
generate-volumes-for-sources pre-commit"
@@ -29,7 +29,7 @@ END_MARKER = "      # END automatically generated volumes by 
generate-volumes-fo
 REMOVE_SOURCES_YAML = AIRFLOW_SOURCES_ROOT_PATH / "scripts" / "ci" / 
"docker-compose" / "remove-sources.yml"
 TESTS_SOURCES_YAML = AIRFLOW_SOURCES_ROOT_PATH / "scripts" / "ci" / 
"docker-compose" / "tests-sources.yml"
 
-providers_paths = sorted([provider.replace(".", "/") for provider in 
get_all_new_provider_ids()])
+providers_paths = sorted([provider.replace(".", "/") for provider in 
get_all_provider_ids()])
 
 
 if __name__ == "__main__":
diff --git a/scripts/ci/pre_commit/mypy_folder.py 
b/scripts/ci/pre_commit/mypy_folder.py
index 7bf7826de0f..09a643d45cc 100755
--- a/scripts/ci/pre_commit/mypy_folder.py
+++ b/scripts/ci/pre_commit/mypy_folder.py
@@ -25,7 +25,7 @@ sys.path.insert(0, str(Path(__file__).parent.resolve()))
 
 from common_precommit_utils import (
     console,
-    get_all_new_provider_ids,
+    get_all_provider_ids,
     initialize_breeze_precommit,
     run_command_via_breeze_shell,
 )
@@ -35,12 +35,11 @@ initialize_breeze_precommit(__name__, __file__)
 
 ALLOWED_FOLDERS = [
     "airflow",
-    *[f"providers/{provider_id.replace('.', '/')}/src" for provider_id in 
get_all_new_provider_ids()],
+    *[f"providers/{provider_id.replace('.', '/')}/src" for provider_id in 
get_all_provider_ids()],
     "dev",
     "docs",
     "task_sdk/src/airflow/sdk",
-    # TODO(potiuk): rename it to "all_providers" when we move all providers to 
new structure
-    "all_new_providers",
+    "all_providers",
 ]
 
 if len(sys.argv) < 2:
@@ -61,9 +60,9 @@ arguments = mypy_folders.copy()
 namespace_packages = False
 
 for mypy_folder in mypy_folders:
-    if mypy_folder == "all_new_providers":
-        arguments.remove("all_new_providers")
-        for provider_id in get_all_new_provider_ids():
+    if mypy_folder == "all_providers":
+        arguments.remove("all_providers")
+        for provider_id in get_all_provider_ids():
             arguments.append(f"providers/{provider_id.replace('.', '/')}/src")
             arguments.append(f"providers/{provider_id.replace('.', 
'/')}/tests")
         namespace_packages = True
diff --git a/scripts/ci/pre_commit/update_providers_build_files.py 
b/scripts/ci/pre_commit/update_providers_build_files.py
index 9d1abc910a5..500a457ea41 100755
--- a/scripts/ci/pre_commit/update_providers_build_files.py
+++ b/scripts/ci/pre_commit/update_providers_build_files.py
@@ -32,8 +32,7 @@ file_list = sys.argv[1:]
 console.print(f"[bright_blue]Determining providers to regenerate from: 
{file_list}\n")
 
 
-# TODO(potiuk) - rename when all providers are new-style
-def _find_new_providers_structure(examined_file: Path) -> None:
+def _find_all_providers(examined_file: Path) -> None:
     console.print(f"[bright_blue]Looking at {examined_file} for new structure 
provider.yaml")
     # find the folder where provider.yaml is
     for parent in Path(examined_file).parents:
@@ -60,7 +59,7 @@ def _find_new_providers_structure(examined_file: Path) -> 
None:
 
 # get all folders from arguments
 for examined_file in file_list:
-    _find_new_providers_structure(Path(examined_file))
+    _find_all_providers(Path(examined_file))
 
 console.print(f"[bright_blue]Regenerating build files for providers: 
{providers}[/]")
 
diff --git a/scripts/ci/pre_commit/update_providers_dependencies.py 
b/scripts/ci/pre_commit/update_providers_dependencies.py
index 6f4a6792623..4cd9bf5bc79 100755
--- a/scripts/ci/pre_commit/update_providers_dependencies.py
+++ b/scripts/ci/pre_commit/update_providers_dependencies.py
@@ -46,8 +46,7 @@ PYPROJECT_TOML_FILE_PATH = AIRFLOW_SOURCES_ROOT / 
"pyproject.toml"
 MY_FILE = Path(__file__).resolve()
 MY_MD5SUM_FILE = MY_FILE.parent / MY_FILE.name.replace(".py", ".py.md5sum")
 
-# TODO(potiuk) - remove this when we move all providers to the new structure
-NEW_STRUCTURE_PROVIDERS: set[str] = set()
+PROVIDERS: set[str] = set()
 
 PYPROJECT_TOML_CONTENT: dict[str, dict[str, Any]] = {}
 
@@ -121,7 +120,7 @@ def find_all_providers_and_provider_files():
                     provider_name = 
str(provider_yaml_file.parent.relative_to(AIRFLOW_PROVIDERS_DIR)).replace(
                         os.sep, "."
                     )
-                    NEW_STRUCTURE_PROVIDERS.add(provider_name)
+                    PROVIDERS.add(provider_name)
                     PYPROJECT_TOML_CONTENT[provider_name] = 
load_pyproject_toml(
                         provider_yaml_file.parent / "pyproject.toml"
                     )
@@ -234,7 +233,7 @@ if __name__ == "__main__":
     for provider in sorted(ALL_PROVIDERS.keys()):
         provider_yaml_content = ALL_PROVIDERS[provider]
         console.print(f"Reading dependencies for provider: {provider}")
-        if provider in NEW_STRUCTURE_PROVIDERS:
+        if provider in PROVIDERS:
             ALL_DEPENDENCIES[provider]["deps"].extend(
                 PYPROJECT_TOML_CONTENT[provider]["project"]["dependencies"]
             )
diff --git a/scripts/in_container/run_provider_yaml_files_check.py 
b/scripts/in_container/run_provider_yaml_files_check.py
index 5dddba1c851..d5e361c840d 100755
--- a/scripts/in_container/run_provider_yaml_files_check.py
+++ b/scripts/in_container/run_provider_yaml_files_check.py
@@ -31,6 +31,7 @@ import warnings
 from collections import Counter
 from collections.abc import Iterable
 from enum import Enum
+from functools import cache
 from typing import Any, Callable
 
 import jsonschema
@@ -62,11 +63,6 @@ KNOWN_DEPRECATED_CLASSES = [
     
"airflow.providers.google.cloud.operators.automl.AutoMLDeployModelOperator",
 ]
 
-try:
-    from yaml import CSafeLoader as SafeLoader
-except ImportError:
-    from yaml import SafeLoader  # type: ignore
-
 if __name__ != "__main__":
     raise SystemExit(
         "This file is intended to be executed as an executable program. You 
cannot use it as a module."
@@ -77,7 +73,6 @@ PROVIDERS_DIR_PATH = ROOT_DIR / "providers"
 PROVIDERS_SRC_DIR_PATH = PROVIDERS_DIR_PATH / "src"
 DOCS_DIR_PATH = ROOT_DIR / "docs"
 PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR.joinpath("airflow", 
"provider.yaml.schema.json")
-NEW_PROVIDER_DATA_SCHEMA_PATH = ROOT_DIR.joinpath("airflow", 
"new_provider.yaml.schema.json")
 PROVIDER_ISSUE_TEMPLATE_PATH = ROOT_DIR.joinpath(
     ".github", "ISSUE_TEMPLATE", "airflow_providers_bug_report.yml"
 )
@@ -117,29 +112,22 @@ def _filepath_to_module(filepath: pathlib.Path | str) -> 
str:
     return p.as_posix().replace("/", ".")
 
 
-# TODO(potiuk): remove this when we move all providers to the new structure
+@cache
 def _load_schema() -> dict[str, Any]:
     with PROVIDER_DATA_SCHEMA_PATH.open() as schema_file:
         content = json.load(schema_file)
     return content
 
 
-def _load_new_schema() -> dict[str, Any]:
-    with NEW_PROVIDER_DATA_SCHEMA_PATH.open() as schema_file:
-        content = json.load(schema_file)
-    return content
-
-
 def _load_package_data(package_paths: Iterable[str]):
-    # TODO(potiuk): rename me
-    new_schema = _load_new_schema()
     result = {}
+    schema = _load_schema()
     for provider_yaml_path in package_paths:
         with open(provider_yaml_path) as yaml_file:
-            provider = yaml.load(yaml_file, SafeLoader)
+            provider = yaml.safe_load(yaml_file)
         rel_path = 
pathlib.Path(provider_yaml_path).relative_to(ROOT_DIR).as_posix()
         try:
-            jsonschema.validate(provider, schema=new_schema)
+            jsonschema.validate(provider, schema=schema)
         except jsonschema.ValidationError as ex:
             msg = f"Unable to parse: {provider_yaml_path}. Original error 
{type(ex).__name__}: {ex}"
             raise RuntimeError(msg)
diff --git a/tests/always/test_project_structure.py 
b/tests/always/test_project_structure.py
index 1ae11c8d87d..5ec5decc173 100644
--- a/tests/always/test_project_structure.py
+++ b/tests/always/test_project_structure.py
@@ -25,8 +25,7 @@ import pathlib
 import pytest
 
 AIRFLOW_SOURCES_ROOT = pathlib.Path(__file__).parents[2]
-
-NEW_PROVIDER_SRC = AIRFLOW_SOURCES_ROOT.joinpath("providers")
+PROVIDERS_ROOT_PATH = AIRFLOW_SOURCES_ROOT / "providers"
 
 
 class TestProjectStructure:
@@ -216,7 +215,7 @@ class TestProjectStructure:
         modules_files = (f for f in modules_files if "get_provider_info.py" 
not in f.parts)
         # Make path relative
         modules_files = list(f.relative_to(AIRFLOW_SOURCES_ROOT) for f in 
modules_files)
-        current_test_files = list(NEW_PROVIDER_SRC.rglob("**/tests/**/*.py"))
+        current_test_files = 
list(PROVIDERS_ROOT_PATH.rglob("**/tests/**/*.py"))
         # Make path relative
         current_test_files = list(f.relative_to(AIRFLOW_SOURCES_ROOT) for f in 
current_test_files)
         # Exclude __init__.py
@@ -281,18 +280,9 @@ class ProjectStructureTest:
     CLASS_DIRS = {"operators", "sensors", "transfers"}
     CLASS_SUFFIXES = ["Operator", "Sensor"]
 
-    def class_paths(self):
-        for resource_type in self.CLASS_DIRS:
-            python_files = AIRFLOW_SOURCES_ROOT.glob(
-                
f"airflow/providers/{self.PROVIDER}/**/{resource_type}/**/*.py",
-            )
-            # Make path relative
-            resource_files = filter(lambda f: f.name != "__init__.py", 
python_files)
-            yield from resource_files
-
     def new_class_paths(self):
         for resource_type in self.CLASS_DIRS:
-            python_files = NEW_PROVIDER_SRC.glob(
+            python_files = PROVIDERS_ROOT_PATH.glob(
                 f"{self.PROVIDER}/**/{resource_type}/**/*.py",
             )
             # Make path relative
@@ -302,7 +292,7 @@ class ProjectStructureTest:
     def list_of_classes(self):
         classes = {}
         for file in self.new_class_paths():
-            operators_paths = self.get_classes_from_file(file, 
NEW_PROVIDER_SRC)
+            operators_paths = self.get_classes_from_file(file, 
PROVIDERS_ROOT_PATH)
             classes.update(operators_paths)
         return classes
 


Reply via email to