This is an automated email from the ASF dual-hosted git repository.

klesh pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git


The following commit(s) were added to refs/heads/main by this push:
     new 0c9311e51 ScopeConfig for pydevlake (#5345)
0c9311e51 is described below

commit 0c9311e51de0a9af004bdc070f832308a926aa5d
Author: Camille Teruel <[email protected]>
AuthorDate: Wed Jun 7 11:11:45 2023 +0200

    ScopeConfig for pydevlake (#5345)
    
    * feat: Add migration operations
    
    * style: Remove unused dependency
    
    * refactor: Rename TransformationRule to ScopeConfig
    
    TransformationRule is renamed ScopeConfig and now includes domain types.
    
    ---------
    
    Co-authored-by: Camille Teruel <[email protected]>
---
 backend/core/dal/dal.go                            |   2 +
 backend/impls/dalgorm/dalgorm.go                   |  22 +++
 backend/python/README.md                           |  22 +--
 .../python/plugins/azuredevops/azuredevops/main.py |  21 ++-
 .../plugins/azuredevops/azuredevops/models.py      |  12 +-
 .../azuredevops/azuredevops/streams/builds.py      |   6 +-
 .../azuredevops/azuredevops/streams/jobs.py        |   4 +-
 backend/python/plugins/azuredevops/poetry.lock     | 167 ++++++++-------------
 backend/python/plugins/azuredevops/pyproject.toml  |   1 -
 .../plugins/azuredevops/tests/plugin_test.py       |   6 +-
 .../plugins/azuredevops/tests/streams_test.py      |   2 +-
 backend/python/pydevlake/pydevlake/__init__.py     |   6 +-
 backend/python/pydevlake/pydevlake/context.py      |  10 +-
 backend/python/pydevlake/pydevlake/ipc.py          |  23 ++-
 backend/python/pydevlake/pydevlake/message.py      |   5 +-
 backend/python/pydevlake/pydevlake/migration.py    |  44 +++++-
 backend/python/pydevlake/pydevlake/model.py        |  12 +-
 backend/python/pydevlake/pydevlake/plugin.py       |  48 +++---
 backend/python/pydevlake/pydevlake/stream.py       |  12 +-
 .../python/pydevlake/pydevlake/testing/testing.py  |  48 +++---
 backend/python/test/fakeplugin/fakeplugin/main.py  |  13 +-
 backend/server/services/remote/models/migration.go |  47 +++++-
 backend/server/services/remote/models/models.go    |  44 +++---
 .../server/services/remote/plugin/default_api.go   |  44 +++---
 .../server/services/remote/plugin/doc/open_api.go  |  15 +-
 backend/server/services/remote/plugin/init.go      |   2 +-
 .../services/remote/plugin/plugin_extensions.go    |  24 +--
 .../server/services/remote/plugin/plugin_impl.go   | 130 ++++++++--------
 backend/server/services/remote/plugin/scope_api.go |   2 +-
 ...nsformation_rule_api.go => scope_config_api.go} |  44 +++---
 .../services/remote/plugin/scope_db_helper.go      |  29 ++--
 backend/test/e2e/remote/helper.go                  |  45 +++---
 backend/test/e2e/remote/python_plugin_test.go      |  40 ++---
 backend/test/helper/api.go                         |  22 +--
 34 files changed, 505 insertions(+), 469 deletions(-)

diff --git a/backend/core/dal/dal.go b/backend/core/dal/dal.go
index 78a07a9fe..089c428b9 100644
--- a/backend/core/dal/dal.go
+++ b/backend/core/dal/dal.go
@@ -157,6 +157,8 @@ type Dal interface {
        DropTables(dst ...interface{}) errors.Error
        // HasTable checks if table exists
        HasTable(table interface{}) bool
+       // HasColumn checks if column exists
+       HasColumn(table interface{}, columnName string) bool
        // RenameTable renames table name
        RenameTable(oldName, newName string) errors.Error
        // GetColumns returns table columns in database
diff --git a/backend/impls/dalgorm/dalgorm.go b/backend/impls/dalgorm/dalgorm.go
index caa7f3a37..b356b4bf0 100644
--- a/backend/impls/dalgorm/dalgorm.go
+++ b/backend/impls/dalgorm/dalgorm.go
@@ -347,6 +347,28 @@ func (d *Dalgorm) HasTable(table interface{}) bool {
        return d.db.Migrator().HasTable(table)
 }
 
+// HasColumn checks if column exists
+func (d *Dalgorm) HasColumn(table interface{}, columnName string) bool {
+       migrator := d.db.Migrator()
+       // Workaround in case table is a string
+       // which cause migrator.HasColumn to panic
+       // see: https://github.com/go-gorm/gorm/issues/5809
+       _, isString := table.(string)
+       if isString {
+               columnTypes, err := migrator.ColumnTypes(table)
+               if err != nil {
+                       return false
+               }
+               for _, columnType := range columnTypes {
+                       if columnType.Name() == columnName {
+                               return true
+                       }
+               }
+               return false
+       }
+       return migrator.HasColumn(table, columnName)
+}
+
 // RenameTable renames table name
 func (d *Dalgorm) RenameTable(oldName, newName string) errors.Error {
        err := d.db.Migrator().RenameTable(oldName, newName)
diff --git a/backend/python/README.md b/backend/python/README.md
index 1e52053d4..a93056b50 100644
--- a/backend/python/README.md
+++ b/backend/python/README.md
@@ -34,7 +34,7 @@ class MyPluginConnection(dl.Connection):
     pass
 
 
-class MyPluginTransformationRule(dl.TransformationRule):
+class MyPluginScopeConfig(dl.ScopeConfig):
     pass
 
 
@@ -44,8 +44,8 @@ class MyPluginToolScope(dl.ToolScope):
 
 class MyPlugin(dl.Plugin):
     connection_type = MyPluginConnection
-    transformation_rule_type =  MyPluginTransformationRule
     tool_scope_type = MyPluginToolScope
+    scope_config_type =  MyPluginScopeConfig
     streams = []
 
     def domain_scopes(self, tool_scope: MyScope) -> Iterable[dl.DomainScope]:
@@ -68,10 +68,11 @@ if __name__ == '__main__':
 This file is the entry point to your plugin.
 It specifies three datatypes:
 - A connection that groups the parameters that your plugin needs to collect 
data, e.g. the url and credentials to connect to the datasource
-- A transformation rule that groups the parameters that your plugin uses to 
convert some data, e.g. regexes to match issue type from name.
 - A tool layer scope type that represents the top-level entity of this plugin, 
e.g. a board, a repository, a project, etc.
+- A scope config that contains the domain entities for a given scope and the 
the parameters that your plugin uses to convert some data, e.g. regexes to 
match issue type from name.
 
-The plugin class declares what are its connection, transformation rule and 
tool scope types.
+
+The plugin class declares what are its connection, tool scope, and scope 
config types.
 It also declares its list of streams, and is responsible to define 4 methods 
that we'll cover hereafter.
 
 We also need to create two shell scripts in the plugin root directory to build 
and run the plugin.
@@ -96,7 +97,7 @@ poetry run python myplugin/main.py "$@"
 ### Connection parameters
 
 The parameters of your plugin split between those that are required to connect 
to the datasource that are grouped in your connection class
-and those that are used to customize conversion to domain models that are 
grouped in your transformation rule class.
+and those that are used to customize conversion to domain models that are 
grouped in your scope config class.
 For example, to add `url` and `token` parameter, edit `MyPluginConnection` as 
follow:
 
 ```python
@@ -112,17 +113,16 @@ To get the `str` value, you need to call 
`get_secret_value()`: `connection.token
 All plugin methods that have a connection parameter will be called with an 
instance of this class.
 Note that you should not define `__init__`.
 
-### Transformation rule parameters
-
+### Scope config
 
-Transformation rules are used to customize the conversion of data from the 
tool layer to the domain layer. For example, you can define a regex to match 
issue type from issue name.
+A scope config contains the list of domain entities to collect and optionally 
some parameters used to customize the conversion of data from the tool layer to 
the domain layer. For example, you can define a regex to match issue type from 
issue name.
 
 ```python
-class MyPluginTransformationRule(TransformationRule):
+class MyPluginScopeConfig(ScopeConfig):
     issue_type_regex: str
 ```
 
-Not all plugins need transformation rules, so you can omit this class.
+If your plugin does not require any such conversion parameter, leave this 
class empty.
 
 
 ### Tool scope type
@@ -238,7 +238,7 @@ To facilitate or even eliminate extraction, your tool 
models should be close to
 
 #### Migration of tool models
 
-Tool models, connection, scope and transformation rule types are stored in the 
DevLake database.
+Tool models, connection, scope and scope config types are stored in the 
DevLake database.
 When you change the definition of one of those types, the database needs to be 
migrated.
 Automatic migration takes care of most modifications, but some changes require 
manual migration. For example, automatic migration never drops columns. Another 
example is adding a column to the primary key of a table, you need to write a 
script that remove the primary key constraint and add a new compound primary 
key.
 
diff --git a/backend/python/plugins/azuredevops/azuredevops/main.py 
b/backend/python/plugins/azuredevops/azuredevops/main.py
index 28d512572..f65cde172 100644
--- a/backend/python/plugins/azuredevops/azuredevops/main.py
+++ b/backend/python/plugins/azuredevops/azuredevops/main.py
@@ -16,13 +16,13 @@
 from urllib.parse import urlparse
 
 from azuredevops.api import AzureDevOpsAPI
-from azuredevops.models import AzureDevOpsConnection, GitRepository, 
AzureDevOpsTransformationRule
+from azuredevops.models import AzureDevOpsConnection, GitRepository, 
GitRepositoryConfig
 from azuredevops.streams.builds import Builds
 from azuredevops.streams.jobs import Jobs
 from azuredevops.streams.pull_request_commits import GitPullRequestCommits
 from azuredevops.streams.pull_requests import GitPullRequests
 
-from pydevlake import Plugin, RemoteScopeGroup, DomainType, ScopeTxRulePair
+from pydevlake import Plugin, RemoteScopeGroup, DomainType, ScopeConfigPair
 from pydevlake.domain_layer.code import Repo
 from pydevlake.domain_layer.devops import CicdScope
 from pydevlake.pipeline_tasks import gitextractor, refdiff
@@ -40,8 +40,8 @@ class AzureDevOpsPlugin(Plugin):
         return GitRepository
 
     @property
-    def transformation_rule_type(self):
-        return AzureDevOpsTransformationRule
+    def scope_config_type(self):
+        return GitRepositoryConfig
 
     def domain_scopes(self, git_repo: GitRepository):
         yield Repo(
@@ -121,18 +121,17 @@ class AzureDevOpsPlugin(Plugin):
             except APIException as e:
                 raise Exception(f"Invalid token: {e}")
 
-    def extra_tasks(self, scope: GitRepository, tx_rule: 
AzureDevOpsTransformationRule, entity_types: list[DomainType], connection: 
AzureDevOpsConnection):
-        if DomainType.CODE in entity_types and not scope.is_external():
+    def extra_tasks(self, scope: GitRepository, scope_config: 
GitRepositoryConfig, connection: AzureDevOpsConnection):
+        if DomainType.CODE in scope_config.entity_types and not 
scope.is_external():
             url = urlparse(scope.remote_url)
             url = 
url._replace(netloc=f'{url.username}:{connection.token.get_secret_value()}@{url.hostname}')
             yield gitextractor(url.geturl(), scope.domain_id(), 
connection.proxy)
 
-    def extra_stages(self, scope_tx_rule_pairs: list[ScopeTxRulePair], 
entity_types: list[DomainType], _):
-        if DomainType.CODE in entity_types:
-            for scope, tx_rule in scope_tx_rule_pairs:
+    def extra_stages(self, scope_config_pairs: list[ScopeConfigPair], _):
+        if DomainType.CODE in config.entity_types:
+            for scope, config in scope_config_pairs:
                 if not scope.is_external():
-                    options = tx_rule.refdiff if tx_rule else None
-                    yield [refdiff(scope.id, options)]
+                    yield [refdiff(scope.id, config.refdiff)]
 
     @property
     def streams(self):
diff --git a/backend/python/plugins/azuredevops/azuredevops/models.py 
b/backend/python/plugins/azuredevops/azuredevops/models.py
index 07c7626b7..85f45232f 100644
--- a/backend/python/plugins/azuredevops/azuredevops/models.py
+++ b/backend/python/plugins/azuredevops/azuredevops/models.py
@@ -20,7 +20,7 @@ import re
 
 from pydantic import SecretStr
 
-from pydevlake import Field, Connection, TransformationRule
+from pydevlake import Field, Connection, ScopeConfig
 from pydevlake.model import ToolModel, ToolScope
 from pydevlake.pipeline_tasks import RefDiffOptions
 from pydevlake.migration import migration, MigrationScriptBuilder, Dialect
@@ -31,7 +31,7 @@ class AzureDevOpsConnection(Connection):
     organization: Optional[str]
 
 
-class AzureDevOpsTransformationRule(TransformationRule):
+class GitRepositoryConfig(ScopeConfig):
     refdiff: Optional[RefDiffOptions]
     deployment_pattern: Optional[re.Pattern]
     production_pattern: Optional[re.Pattern]
@@ -67,7 +67,7 @@ class GitPullRequest(ToolModel, table=True):
     target_commit_sha: str = Field(source='/lastMergeTargetCommit/commitId')
     merge_commit_sha: Optional[str] = Field(source='/lastMergeCommit/commitId')
     url: Optional[str]
-    type: Optional[str] = Field(source='/labels/0/name') # TODO: get this off 
transformation rules regex
+    type: Optional[str] = Field(source='/labels/0/name') # TODO: Add regex to 
scope config
     title: Optional[str]
     target_ref_name: Optional[str]
     source_ref_name: Optional[str]
@@ -140,3 +140,9 @@ def add_build_id_as_job_primary_key(b: 
MigrationScriptBuilder):
     b.execute(f'ALTER TABLE {table} DROP PRIMARY KEY', Dialect.MYSQL)
     b.execute(f'ALTER TABLE {table} DROP CONSTRAINT {table}_pkey', 
Dialect.POSTGRESQL)
     b.execute(f'ALTER TABLE {table} ADD PRIMARY KEY (id, build_id)')
+
+
+@migration(20230606165630)
+def rename_tx_rule_table_to_scope_config(b: MigrationScriptBuilder):
+    b.rename_table('_tool_azuredevops_azuredevopstransformationrules', 
GitRepositoryConfig.__tablename__)
+    b.add_column(GitRepositoryConfig.__tablename__, 'entities', 'json')
diff --git a/backend/python/plugins/azuredevops/azuredevops/streams/builds.py 
b/backend/python/plugins/azuredevops/azuredevops/streams/builds.py
index f9e6a37cb..46d0a9ea9 100644
--- a/backend/python/plugins/azuredevops/azuredevops/streams/builds.py
+++ b/backend/python/plugins/azuredevops/azuredevops/streams/builds.py
@@ -15,8 +15,6 @@
 
 from typing import Iterable
 
-import iso8601 as iso8601
-
 from azuredevops.api import AzureDevOpsAPI
 from azuredevops.models import GitRepository
 from azuredevops.models import Build
@@ -62,10 +60,10 @@ class Builds(Stream):
             status = devops.CICDStatus.IN_PROGRESS
 
         type = devops.CICDType.BUILD
-        if ctx.transformation_rule and 
ctx.transformation_rule.deployment_pattern.search(b.name):
+        if ctx.scope_config.deployment_pattern and 
ctx.scope_config.deployment_pattern.search(b.name):
             type = devops.CICDType.DEPLOYMENT
         environment = devops.CICDEnvironment.TESTING
-        if ctx.transformation_rule and 
ctx.transformation_rule.production_pattern.search(b.name):
+        if ctx.scope_config.production_pattern and 
ctx.scope_config.production_pattern.search(b.name):
             environment = devops.CICDEnvironment.PRODUCTION
 
         if b.finish_time:
diff --git a/backend/python/plugins/azuredevops/azuredevops/streams/jobs.py 
b/backend/python/plugins/azuredevops/azuredevops/streams/jobs.py
index 76a80f981..cc1c1bbe4 100644
--- a/backend/python/plugins/azuredevops/azuredevops/streams/jobs.py
+++ b/backend/python/plugins/azuredevops/azuredevops/streams/jobs.py
@@ -79,10 +79,10 @@ class Jobs(Substream):
             status = devops.CICDStatus.IN_PROGRESS
 
         type = devops.CICDType.BUILD
-        if ctx.transformation_rule and 
ctx.transformation_rule.deployment_pattern.search(j.name):
+        if ctx.scope_config.deployment_pattern and 
ctx.scope_config.deployment_pattern.search(j.name):
             type = devops.CICDType.DEPLOYMENT
         environment = devops.CICDEnvironment.TESTING
-        if ctx.transformation_rule and 
ctx.transformation_rule.production_pattern.search(j.name):
+        if ctx.scope_config.production_pattern and 
ctx.scope_config.production_pattern.search(j.name):
             environment = devops.CICDEnvironment.PRODUCTION
 
         if j.finish_time:
diff --git a/backend/python/plugins/azuredevops/poetry.lock 
b/backend/python/plugins/azuredevops/poetry.lock
index 85afd5cb0..1a2890754 100644
--- a/backend/python/plugins/azuredevops/poetry.lock
+++ b/backend/python/plugins/azuredevops/poetry.lock
@@ -1,22 +1,20 @@
-# This file is automatically @generated by Poetry 1.4.1 and should not be 
changed by hand.
+# This file is automatically @generated by Poetry 1.5.0 and should not be 
changed by hand.
 
 [[package]]
 name = "certifi"
-version = "2022.12.7"
+version = "2023.5.7"
 description = "Python package for providing Mozilla's CA Bundle."
-category = "main"
 optional = false
 python-versions = ">=3.6"
 files = [
-    {file = "certifi-2022.12.7-py3-none-any.whl", hash = 
"sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
-    {file = "certifi-2022.12.7.tar.gz", hash = 
"sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
+    {file = "certifi-2023.5.7-py3-none-any.whl", hash = 
"sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
+    {file = "certifi-2023.5.7.tar.gz", hash = 
"sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
 ]
 
 [[package]]
 name = "charset-normalizer"
 version = "3.1.0"
 description = "The Real First Universal Charset Detector. Open, modern and 
actively maintained alternative to Chardet."
-category = "main"
 optional = false
 python-versions = ">=3.7.0"
 files = [
@@ -101,7 +99,6 @@ files = [
 name = "colorama"
 version = "0.4.6"
 description = "Cross-platform colored terminal text."
-category = "main"
 optional = false
 python-versions = 
"!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
 files = [
@@ -113,7 +110,6 @@ files = [
 name = "exceptiongroup"
 version = "1.1.1"
 description = "Backport of PEP 654 (exception groups)"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -128,7 +124,6 @@ test = ["pytest (>=6)"]
 name = "fire"
 version = "0.4.0"
 description = "A library for automatically generating command line interfaces."
-category = "main"
 optional = false
 python-versions = "*"
 files = [
@@ -143,7 +138,6 @@ termcolor = "*"
 name = "greenlet"
 version = "2.0.2"
 description = "Lightweight in-process concurrent programming"
-category = "main"
 optional = false
 python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
 files = [
@@ -217,7 +211,6 @@ test = ["objgraph", "psutil"]
 name = "idna"
 version = "3.4"
 description = "Internationalized Domain Names in Applications (IDNA)"
-category = "main"
 optional = false
 python-versions = ">=3.5"
 files = [
@@ -229,7 +222,6 @@ files = [
 name = "inflect"
 version = "6.0.4"
 description = "Correctly generate plurals, singular nouns, ordinals, 
indefinite articles; convert numbers to words"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -248,7 +240,6 @@ testing = ["flake8 (<5)", "pygments", "pytest (>=6)", 
"pytest-black (>=0.3.7)",
 name = "iniconfig"
 version = "2.0.0"
 description = "brain-dead simple config-ini parsing"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -256,23 +247,10 @@ files = [
     {file = "iniconfig-2.0.0.tar.gz", hash = 
"sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
 ]
 
-[[package]]
-name = "iso8601"
-version = "1.1.0"
-description = "Simple module to parse ISO 8601 dates"
-category = "main"
-optional = false
-python-versions = ">=3.6.2,<4.0"
-files = [
-    {file = "iso8601-1.1.0-py3-none-any.whl", hash = 
"sha256:8400e90141bf792bce2634df533dc57e3bee19ea120a87bebcd3da89a58ad73f"},
-    {file = "iso8601-1.1.0.tar.gz", hash = 
"sha256:32811e7b81deee2063ea6d2e94f8819a86d1f3811e49d23623a41fa832bef03f"},
-]
-
 [[package]]
 name = "jsonpointer"
 version = "2.3"
 description = "Identify specific nodes in a JSON document (RFC 6901)"
-category = "main"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
 files = [
@@ -284,7 +262,6 @@ files = [
 name = "jsonref"
 version = "1.1.0"
 description = "jsonref is a library for automatic dereferencing of JSON 
Reference objects for Python."
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -296,7 +273,6 @@ files = [
 name = "mysqlclient"
 version = "2.1.1"
 description = "Python interface to MySQL"
-category = "main"
 optional = false
 python-versions = ">=3.5"
 files = [
@@ -313,7 +289,6 @@ files = [
 name = "packaging"
 version = "23.1"
 description = "Core utilities for Python packages"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -325,7 +300,6 @@ files = [
 name = "pluggy"
 version = "1.0.0"
 description = "plugin and hook calling mechanisms for python"
-category = "main"
 optional = false
 python-versions = ">=3.6"
 files = [
@@ -341,7 +315,6 @@ testing = ["pytest", "pytest-benchmark"]
 name = "psycopg2"
 version = "2.9.6"
 description = "psycopg2 - Python-PostgreSQL Database Adapter"
-category = "main"
 optional = false
 python-versions = ">=3.6"
 files = [
@@ -362,48 +335,47 @@ files = [
 
 [[package]]
 name = "pydantic"
-version = "1.10.7"
+version = "1.10.8"
 description = "Data validation and settings management using python type hints"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = 
"sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
-    {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = 
"sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
-    {file = 
"pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
-    {file = 
"pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
-    {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = 
"sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
-    {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = 
"sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
-    {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = 
"sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
-    {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = 
"sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
-    {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = 
"sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
-    {file = 
"pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
-    {file = 
"pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
-    {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = 
"sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
-    {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = 
"sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
-    {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = 
"sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
-    {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = 
"sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
-    {file = 
"pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
-    {file = 
"pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
-    {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = 
"sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
-    {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = 
"sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
-    {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = 
"sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
-    {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = 
"sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
-    {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = 
"sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
-    {file = 
"pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
-    {file = 
"pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
-    {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = 
"sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
-    {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = 
"sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
-    {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = 
"sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
-    {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = 
"sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
-    {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = 
"sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
-    {file = 
"pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
-    {file = 
"pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
-    {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = 
"sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
-    {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = 
"sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
-    {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = 
"sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
-    {file = "pydantic-1.10.7-py3-none-any.whl", hash = 
"sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
-    {file = "pydantic-1.10.7.tar.gz", hash = 
"sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
+    {file = "pydantic-1.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = 
"sha256:1243d28e9b05003a89d72e7915fdb26ffd1d39bdd39b00b7dbe4afae4b557f9d"},
+    {file = "pydantic-1.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = 
"sha256:c0ab53b609c11dfc0c060d94335993cc2b95b2150e25583bec37a49b2d6c6c3f"},
+    {file = 
"pydantic-1.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:f9613fadad06b4f3bc5db2653ce2f22e0de84a7c6c293909b48f6ed37b83c61f"},
+    {file = 
"pydantic-1.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:df7800cb1984d8f6e249351139667a8c50a379009271ee6236138a22a0c0f319"},
+    {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_i686.whl", hash = 
"sha256:0c6fafa0965b539d7aab0a673a046466d23b86e4b0e8019d25fd53f4df62c277"},
+    {file = "pydantic-1.10.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = 
"sha256:e82d4566fcd527eae8b244fa952d99f2ca3172b7e97add0b43e2d97ee77f81ab"},
+    {file = "pydantic-1.10.8-cp310-cp310-win_amd64.whl", hash = 
"sha256:ab523c31e22943713d80d8d342d23b6f6ac4b792a1e54064a8d0cf78fd64e800"},
+    {file = "pydantic-1.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = 
"sha256:666bdf6066bf6dbc107b30d034615d2627e2121506c555f73f90b54a463d1f33"},
+    {file = "pydantic-1.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = 
"sha256:35db5301b82e8661fa9c505c800d0990bc14e9f36f98932bb1d248c0ac5cada5"},
+    {file = 
"pydantic-1.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:f90c1e29f447557e9e26afb1c4dbf8768a10cc676e3781b6a577841ade126b85"},
+    {file = 
"pydantic-1.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:93e766b4a8226e0708ef243e843105bf124e21331694367f95f4e3b4a92bbb3f"},
+    {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_i686.whl", hash = 
"sha256:88f195f582851e8db960b4a94c3e3ad25692c1c1539e2552f3df7a9e972ef60e"},
+    {file = "pydantic-1.10.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = 
"sha256:34d327c81e68a1ecb52fe9c8d50c8a9b3e90d3c8ad991bfc8f953fb477d42fb4"},
+    {file = "pydantic-1.10.8-cp311-cp311-win_amd64.whl", hash = 
"sha256:d532bf00f381bd6bc62cabc7d1372096b75a33bc197a312b03f5838b4fb84edd"},
+    {file = "pydantic-1.10.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = 
"sha256:7d5b8641c24886d764a74ec541d2fc2c7fb19f6da2a4001e6d580ba4a38f7878"},
+    {file = 
"pydantic-1.10.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:7b1f6cb446470b7ddf86c2e57cd119a24959af2b01e552f60705910663af09a4"},
+    {file = 
"pydantic-1.10.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:c33b60054b2136aef8cf190cd4c52a3daa20b2263917c49adad20eaf381e823b"},
+    {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = 
"sha256:1952526ba40b220b912cdc43c1c32bcf4a58e3f192fa313ee665916b26befb68"},
+    {file = "pydantic-1.10.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = 
"sha256:bb14388ec45a7a0dc429e87def6396f9e73c8c77818c927b6a60706603d5f2ea"},
+    {file = "pydantic-1.10.8-cp37-cp37m-win_amd64.whl", hash = 
"sha256:16f8c3e33af1e9bb16c7a91fc7d5fa9fe27298e9f299cff6cb744d89d573d62c"},
+    {file = "pydantic-1.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = 
"sha256:1ced8375969673929809d7f36ad322934c35de4af3b5e5b09ec967c21f9f7887"},
+    {file = "pydantic-1.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = 
"sha256:93e6bcfccbd831894a6a434b0aeb1947f9e70b7468f274154d03d71fabb1d7c6"},
+    {file = 
"pydantic-1.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:191ba419b605f897ede9892f6c56fb182f40a15d309ef0142212200a10af4c18"},
+    {file = 
"pydantic-1.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:052d8654cb65174d6f9490cc9b9a200083a82cf5c3c5d3985db765757eb3b375"},
+    {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_i686.whl", hash = 
"sha256:ceb6a23bf1ba4b837d0cfe378329ad3f351b5897c8d4914ce95b85fba96da5a1"},
+    {file = "pydantic-1.10.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = 
"sha256:6f2e754d5566f050954727c77f094e01793bcb5725b663bf628fa6743a5a9108"},
+    {file = "pydantic-1.10.8-cp38-cp38-win_amd64.whl", hash = 
"sha256:6a82d6cda82258efca32b40040228ecf43a548671cb174a1e81477195ed3ed56"},
+    {file = "pydantic-1.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = 
"sha256:3e59417ba8a17265e632af99cc5f35ec309de5980c440c255ab1ca3ae96a3e0e"},
+    {file = "pydantic-1.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = 
"sha256:84d80219c3f8d4cad44575e18404099c76851bc924ce5ab1c4c8bb5e2a2227d0"},
+    {file = 
"pydantic-1.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:2e4148e635994d57d834be1182a44bdb07dd867fa3c2d1b37002000646cc5459"},
+    {file = 
"pydantic-1.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
 hash = 
"sha256:12f7b0bf8553e310e530e9f3a2f5734c68699f42218bf3568ef49cd9b0e44df4"},
+    {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_i686.whl", hash = 
"sha256:42aa0c4b5c3025483240a25b09f3c09a189481ddda2ea3a831a9d25f444e03c1"},
+    {file = "pydantic-1.10.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = 
"sha256:17aef11cc1b997f9d574b91909fed40761e13fac438d72b81f902226a69dac01"},
+    {file = "pydantic-1.10.8-cp39-cp39-win_amd64.whl", hash = 
"sha256:66a703d1983c675a6e0fed8953b0971c44dba48a929a2000a493c3772eb61a5a"},
+    {file = "pydantic-1.10.8-py3-none-any.whl", hash = 
"sha256:7456eb22ed9aaa24ff3e7b4757da20d9e5ce2a81018c1b3ebd81a0b88a18f3b2"},
+    {file = "pydantic-1.10.8.tar.gz", hash = 
"sha256:1410275520dfa70effadf4c21811d755e7ef9bb1f1d077a21958153a92c8d9ca"},
 ]
 
 [package.dependencies]
@@ -415,20 +387,18 @@ email = ["email-validator (>=1.0.3)"]
 
 [[package]]
 name = "pydevd-pycharm"
-version = "231.8770.15"
+version = "231.9011.38"
 description = "PyCharm Debugger (used in PyCharm and PyDev)"
-category = "main"
 optional = false
 python-versions = "*"
 files = [
-    {file = "pydevd-pycharm-231.8770.15.tar.gz", hash = 
"sha256:607eb16a0d0e28dd05f68b7b332fd1dcc2cce1faae28db2e0b2df6765edebd7f"},
+    {file = "pydevd-pycharm-231.9011.38.tar.gz", hash = 
"sha256:0509e989a6a037ee71cf47ad1301e6304aedb6f1d2ca7e8cc80d53e8c8e2f97f"},
 ]
 
 [[package]]
 name = "pydevlake"
 version = "0.1.0"
 description = "Devlake plugin framework"
-category = "main"
 optional = false
 python-versions = "~3.9"
 files = []
@@ -455,7 +425,6 @@ url = "../../pydevlake"
 name = "pytest"
 version = "7.3.1"
 description = "pytest: simple powerful testing with Python"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -476,21 +445,20 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis 
(>=3.56)", "mock", "no
 
 [[package]]
 name = "requests"
-version = "2.28.2"
+version = "2.31.0"
 description = "Python HTTP for Humans."
-category = "main"
 optional = false
-python-versions = ">=3.7, <4"
+python-versions = ">=3.7"
 files = [
-    {file = "requests-2.28.2-py3-none-any.whl", hash = 
"sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"},
-    {file = "requests-2.28.2.tar.gz", hash = 
"sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"},
+    {file = "requests-2.31.0-py3-none-any.whl", hash = 
"sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+    {file = "requests-2.31.0.tar.gz", hash = 
"sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
 ]
 
 [package.dependencies]
 certifi = ">=2017.4.17"
 charset-normalizer = ">=2,<4"
 idna = ">=2.5,<4"
-urllib3 = ">=1.21.1,<1.27"
+urllib3 = ">=1.21.1,<3"
 
 [package.extras]
 socks = ["PySocks (>=1.5.6,!=1.5.7)"]
@@ -500,7 +468,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
 name = "six"
 version = "1.16.0"
 description = "Python 2 and 3 compatibility utilities"
-category = "main"
 optional = false
 python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
 files = [
@@ -512,7 +479,6 @@ files = [
 name = "sqlalchemy"
 version = "1.4.41"
 description = "Database Abstraction Library"
-category = "main"
 optional = false
 python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
 files = [
@@ -560,7 +526,7 @@ files = [
 ]
 
 [package.dependencies]
-greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and 
platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine 
== \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or 
python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= 
\"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and 
platform_machine == \"win32\" or python_version >= \"3\" and platform_machine 
== \"WIN32\""}
+greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and 
(platform_machine == \"win32\" or platform_machine == \"WIN32\" or 
platform_machine == \"AMD64\" or platform_machine == \"amd64\" or 
platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or 
platform_machine == \"aarch64\")"}
 
 [package.extras]
 aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
@@ -587,7 +553,6 @@ sqlcipher = ["sqlcipher3-binary"]
 name = "sqlalchemy2-stubs"
 version = "0.0.2a34"
 description = "Typing Stubs for SQLAlchemy 1.4"
-category = "main"
 optional = false
 python-versions = ">=3.6"
 files = [
@@ -602,7 +567,6 @@ typing-extensions = ">=3.7.4"
 name = "sqlmodel"
 version = "0.0.8"
 description = "SQLModel, SQL databases in Python, designed for simplicity, 
compatibility, and robustness."
-category = "main"
 optional = false
 python-versions = ">=3.6.1,<4.0.0"
 files = [
@@ -617,14 +581,13 @@ sqlalchemy2-stubs = "*"
 
 [[package]]
 name = "termcolor"
-version = "2.2.0"
+version = "2.3.0"
 description = "ANSI color formatting for output in terminal"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "termcolor-2.2.0-py3-none-any.whl", hash = 
"sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"},
-    {file = "termcolor-2.2.0.tar.gz", hash = 
"sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"},
+    {file = "termcolor-2.3.0-py3-none-any.whl", hash = 
"sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"},
+    {file = "termcolor-2.3.0.tar.gz", hash = 
"sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"},
 ]
 
 [package.extras]
@@ -634,7 +597,6 @@ tests = ["pytest", "pytest-cov"]
 name = "tomli"
 version = "2.0.1"
 description = "A lil' TOML parser"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
@@ -644,34 +606,33 @@ files = [
 
 [[package]]
 name = "typing-extensions"
-version = "4.5.0"
+version = "4.6.3"
 description = "Backported and Experimental Type Hints for Python 3.7+"
-category = "main"
 optional = false
 python-versions = ">=3.7"
 files = [
-    {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = 
"sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
-    {file = "typing_extensions-4.5.0.tar.gz", hash = 
"sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
+    {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = 
"sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"},
+    {file = "typing_extensions-4.6.3.tar.gz", hash = 
"sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"},
 ]
 
 [[package]]
 name = "urllib3"
-version = "1.26.15"
+version = "2.0.2"
 description = "HTTP library with thread-safe connection pooling, file post, 
and more."
-category = "main"
 optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
+python-versions = ">=3.7"
 files = [
-    {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = 
"sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
-    {file = "urllib3-1.26.15.tar.gz", hash = 
"sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
+    {file = "urllib3-2.0.2-py3-none-any.whl", hash = 
"sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"},
+    {file = "urllib3-2.0.2.tar.gz", hash = 
"sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"},
 ]
 
 [package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", 
"pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
-socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl 
(>=17.1.0)", "urllib3-secure-extra"]
+socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
+zstd = ["zstandard (>=0.18.0)"]
 
 [metadata]
 lock-version = "2.0"
 python-versions = "~3.9"
-content-hash = 
"f028a55435b72ab4a5d4a4055db1ca6815a750c8c8e4f00404cff16d0d724454"
+content-hash = 
"444d589b32be702bc068fced0e477034f6cee0084bdce2cd39da890cc51255b9"
diff --git a/backend/python/plugins/azuredevops/pyproject.toml 
b/backend/python/plugins/azuredevops/pyproject.toml
index b168c86f2..df7e8d255 100644
--- a/backend/python/plugins/azuredevops/pyproject.toml
+++ b/backend/python/plugins/azuredevops/pyproject.toml
@@ -23,7 +23,6 @@ readme = "README.md"
 [tool.poetry.dependencies]
 python = "~3.9"
 pydevlake = { path = "../../pydevlake", develop = true }
-iso8601 = "^1.1.0"
 
 
 [tool.poetry.group.dev.dependencies]
diff --git a/backend/python/plugins/azuredevops/tests/plugin_test.py 
b/backend/python/plugins/azuredevops/tests/plugin_test.py
index 2351f277f..e2826b64c 100644
--- a/backend/python/plugins/azuredevops/tests/plugin_test.py
+++ b/backend/python/plugins/azuredevops/tests/plugin_test.py
@@ -18,7 +18,7 @@ import pytest
 
 from pydevlake.testing import assert_valid_plugin, assert_plugin_run
 
-from azuredevops.models import AzureDevOpsConnection, 
AzureDevOpsTransformationRule
+from azuredevops.models import AzureDevOpsConnection, GitRepositoryConfig
 from azuredevops.main import AzureDevOpsPlugin
 
 
@@ -34,6 +34,6 @@ def test_valid_plugin_and_connection():
 
     plugin = AzureDevOpsPlugin()
     connection = AzureDevOpsConnection(id=1, name='test_connection', 
token=token)
-    tx_rule = AzureDevOpsTransformationRule(id=1, name='test_rule')
+    scope_config = GitRepositoryConfig(id=1, name='test_config')
 
-    assert_plugin_run(plugin, connection, tx_rule)
+    assert_plugin_run(plugin, connection, scope_config)
diff --git a/backend/python/plugins/azuredevops/tests/streams_test.py 
b/backend/python/plugins/azuredevops/tests/streams_test.py
index f8f5dfe65..da5889ee9 100644
--- a/backend/python/plugins/azuredevops/tests/streams_test.py
+++ b/backend/python/plugins/azuredevops/tests/streams_test.py
@@ -27,7 +27,7 @@ def context():
     return (
         ContextBuilder(AzureDevOpsPlugin())
         .with_connection(token='token')
-        .with_transformation_rule(deployment_pattern='deploy',
+        .with_scope_config(deployment_pattern='deploy',
                                   production_pattern='prod')
         .with_scope('johndoe/test-repo', 
url='https://github.com/johndoe/test-repo')
         .build()
diff --git a/backend/python/pydevlake/pydevlake/__init__.py 
b/backend/python/pydevlake/pydevlake/__init__.py
index 0c10c3e9f..aff946fce 100644
--- a/backend/python/pydevlake/pydevlake/__init__.py
+++ b/backend/python/pydevlake/pydevlake/__init__.py
@@ -33,11 +33,11 @@ def Field(*args, primary_key: bool=False, source: 
Optional[str]=None, **kwargs):
     return _Field(*args, **kwargs, primary_key=primary_key, 
schema_extra=schema_extra)
 
 
-from .model import ToolModel, ToolScope, DomainScope, Connection, 
TransformationRule, domain_id
+from .model import ToolModel, ToolScope, DomainScope, Connection, ScopeConfig, 
DomainType, domain_id
 from .logger import logger
 from .message import RemoteScopeGroup
-from .plugin import Plugin, ScopeTxRulePair
-from .stream import DomainType, Stream, Substream
+from .plugin import Plugin, ScopeConfigPair
+from .stream import Stream, Substream
 from .context import Context
 
 # the debugger hangs on startup during plugin registration (reason unknown), 
hence this workaround
diff --git a/backend/python/pydevlake/pydevlake/context.py 
b/backend/python/pydevlake/pydevlake/context.py
index a9d5f4d11..1c543d349 100644
--- a/backend/python/pydevlake/pydevlake/context.py
+++ b/backend/python/pydevlake/pydevlake/context.py
@@ -16,20 +16,20 @@
 
 from sqlalchemy.engine import Engine
 
-from pydevlake.model import Connection, TransformationRule, ToolScope
+from pydevlake.model import Connection, ScopeConfig, ToolScope
 
 
 class Context:
     def __init__(self,
                  engine: Engine,
-                 scope: ToolScope,
                  connection: Connection,
-                 transformation_rule: TransformationRule = None,
+                 scope: ToolScope,
+                 scope_config: ScopeConfig = None,
                  options: dict = None):
         self.engine = engine
-        self.scope = scope
         self.connection = connection
-        self.transformation_rule = transformation_rule
+        self.scope = scope
+        self.scope_config = scope_config
         self.options = options or {}
         self._engine = None
 
diff --git a/backend/python/pydevlake/pydevlake/ipc.py 
b/backend/python/pydevlake/pydevlake/ipc.py
index 9b0e851dd..7a9fd237f 100644
--- a/backend/python/pydevlake/pydevlake/ipc.py
+++ b/backend/python/pydevlake/pydevlake/ipc.py
@@ -26,8 +26,7 @@ from sqlalchemy.engine import Engine
 
 from pydevlake.context import Context
 from pydevlake.message import Message
-from pydevlake.stream import DomainType
-from pydevlake.model import SubtaskRun
+from pydevlake.model import DomainType, SubtaskRun
 
 
 def plugin_method(func):
@@ -89,17 +88,16 @@ class PluginCommands:
         self._plugin.test_connection(connection)
 
     @plugin_method
-    def make_pipeline(self, scope_tx_rule_pairs: list[tuple[dict, dict]], 
entities: list[str], connection: dict):
+    def make_pipeline(self, scope_config_pairs: list[tuple[dict, dict]], 
connection: dict):
         connection = self._plugin.connection_type(**connection)
-        scope_tx_rule_pairs = [
+        scope_config_pairs = [
             (
                 self._plugin.tool_scope_type(**raw_scope),
-                self._plugin.transformation_rule_type(**raw_tx_rule) if 
raw_tx_rule else None
+                self._plugin.scope_config_type(**raw_config)
             )
-            for raw_scope, raw_tx_rule in scope_tx_rule_pairs
+            for raw_scope, raw_config in scope_config_pairs
         ]
-        entities = [DomainType(e) for e in entities]
-        return self._plugin.make_pipeline(scope_tx_rule_pairs, entities, 
connection)
+        return self._plugin.make_pipeline(scope_config_pairs, connection)
 
     @plugin_method
     def plugin_info(self):
@@ -116,13 +114,10 @@ class PluginCommands:
         scope = self._plugin.tool_scope_type(**scope_dict)
         connection_dict = data['connection']
         connection = self._plugin.connection_type(**connection_dict)
-        raw_tx_rule = data.get('transformation_rule')
-        if self._plugin.transformation_rule_type and raw_tx_rule:
-            transformation_rule = 
self._plugin.transformation_rule_type(**raw_tx_rule)
-        else:
-            transformation_rule = None
+        scope_config_dict = data['scope_config']
+        scope_config = self._plugin.scope_config_type(**scope_config_dict)
         options = data.get('options', {})
-        return Context(create_db_engine(db_url), scope, connection, 
transformation_rule, options)
+        return Context(create_db_engine(db_url), connection, scope, 
scope_config, options)
 
 def create_db_engine(db_url) -> Engine:
     # SQLAlchemy doesn't understand postgres:// scheme
diff --git a/backend/python/pydevlake/pydevlake/message.py 
b/backend/python/pydevlake/pydevlake/message.py
index 44228f56c..54e078235 100644
--- a/backend/python/pydevlake/pydevlake/message.py
+++ b/backend/python/pydevlake/pydevlake/message.py
@@ -53,8 +53,7 @@ class DynamicModelInfo(Message):
         for prop in schema['properties'].values():
             if 'type' not in prop and 'enum' in prop:
                 prop['type'] = 'string'
-        return DynamicModelInfo(
-            json_schema=schema,
+        return DynamicModelInfo(            json_schema=schema,
             table_name=model_class.__tablename__
         )
 
@@ -63,8 +62,8 @@ class PluginInfo(Message):
     name: str
     description: str
     connection_model_info: DynamicModelInfo
-    transformation_rule_model_info: Optional[DynamicModelInfo]
     scope_model_info: DynamicModelInfo
+    scope_config_model_info: Optional[DynamicModelInfo]
     tool_model_infos: list[DynamicModelInfo]
     migration_scripts: list[MigrationScript]
     plugin_path: str
diff --git a/backend/python/pydevlake/pydevlake/migration.py 
b/backend/python/pydevlake/pydevlake/migration.py
index e64eb9115..375fe91b5 100644
--- a/backend/python/pydevlake/pydevlake/migration.py
+++ b/backend/python/pydevlake/pydevlake/migration.py
@@ -34,6 +34,13 @@ class Execute(BaseModel):
     dialect: Optional[Dialect] = None
 
 
+class AddColumn(BaseModel):
+    type: Literal["add_column"] = "add_column"
+    table: str
+    column: str
+    column_type: str
+
+
 class DropColumn(BaseModel):
     type: Literal["drop_column"] = "drop_column"
     table: str
@@ -45,8 +52,21 @@ class DropTable(BaseModel):
     table: str
 
 
+class RenameColumn(BaseModel):
+    type: Literal["rename_column"] = "rename_column"
+    table: str
+    old_name: str
+    new_name: str
+
+
+class RenameTable(BaseModel):
+    type: Literal["rename_table"] = "rename_table"
+    old_name: str
+    new_name: str
+
+
 Operation = Annotated[
-    Union[Execute, DropColumn, DropTable],
+    Union[Execute, AddColumn, DropColumn, RenameColumn, DropTable, 
RenameTable],
     Field(discriminator="type")
 ]
 
@@ -68,18 +88,36 @@ class MigrationScriptBuilder:
         """
         self.operations.append(Execute(sql=sql, dialect=dialect))
 
+    def add_column(self, table: str, column: str, type: str):
+        """
+        Adds a column to a table if it does not exist.
+        """
+        self.operations.append(AddColumn(table=table, column=column, 
column_type=type))
+
     def drop_column(self, table: str, column: str):
         """
-        Drops a column from a table.
+        Drops a column from a table if it exist.
         """
         self.operations.append(DropColumn(table=table, column=column))
 
+    def rename_column(self, table: str, old_name: str, new_name: str):
+        """
+        Renames a column in a table.
+        """
+        self.operations.append(RenameColumn(table=table, old_name=old_name, 
new_name=new_name))
+
     def drop_table(self, table: str):
         """
-        Drops a table.
+        Drops a table if it exists.
         """
         self.operations.append(DropTable(table=table))
 
+    def rename_table(self, old_name: str, new_name: str):
+        """
+        Renames a table if it exists and the new name is not already taken.
+        """
+        self.operations.append(RenameTable(old_name=old_name, 
new_name=new_name))
+
 
 def migration(version: int, name: Optional[str] = None):
     """
diff --git a/backend/python/pydevlake/pydevlake/model.py 
b/backend/python/pydevlake/pydevlake/model.py
index 81b10deaf..8d196d1c2 100644
--- a/backend/python/pydevlake/pydevlake/model.py
+++ b/backend/python/pydevlake/pydevlake/model.py
@@ -18,6 +18,7 @@ import os
 from typing import Iterable, Optional
 from inspect import getmodule
 from datetime import datetime
+from enum import Enum
 
 import inflect
 from pydantic import AnyUrl, SecretStr, validator
@@ -70,9 +71,18 @@ class Connection(ToolTable, Model):
             return None
         return proxy
 
+class DomainType(Enum):
+    CODE = "CODE"
+    TICKET = "TICKET"
+    CODE_REVIEW = "CODEREVIEW"
+    CROSS = "CROSS"
+    CICD = "CICD"
+    CODE_QUALITY = "CODEQUALITY"
 
-class TransformationRule(ToolTable, Model):
+
+class ScopeConfig(ToolTable, Model):
     name: str
+    domain_types: list[DomainType] = Field(default_factory=list, 
alias="entities")
 
 
 class RawModel(SQLModel):
diff --git a/backend/python/pydevlake/pydevlake/plugin.py 
b/backend/python/pydevlake/pydevlake/plugin.py
index fecdc0f11..03a2c5981 100644
--- a/backend/python/pydevlake/pydevlake/plugin.py
+++ b/backend/python/pydevlake/pydevlake/plugin.py
@@ -27,11 +27,11 @@ from pydevlake.logger import logger
 from pydevlake.ipc import PluginCommands
 from pydevlake.context import Context
 from pydevlake.stream import Stream, DomainType
-from pydevlake.model import ToolScope, DomainScope, Connection, 
TransformationRule
+from pydevlake.model import ToolScope, DomainScope, Connection, ScopeConfig
 from pydevlake.migration import MIGRATION_SCRIPTS
 
 
-ScopeTxRulePair = tuple[ToolScope, Optional[TransformationRule]]
+ScopeConfigPair = tuple[ToolScope, ScopeConfig]
 
 
 class Plugin(ABC):
@@ -64,7 +64,7 @@ class Plugin(ABC):
         pass
 
     @property
-    def transformation_rule_type(self) -> Type[TransformationRule]:
+    def scope_config_type(self) -> Type[ScopeConfig]:
         return None
 
     @abstractmethod
@@ -128,15 +128,15 @@ class Plugin(ABC):
             remote_scopes = self.remote_scope_groups(connection)
         return msg.RemoteScopes(__root__=remote_scopes)
 
-    def make_pipeline(self, scope_tx_rule_pairs: list[ScopeTxRulePair],
-                      entity_types: list[DomainType], connection: Connection):
+    def make_pipeline(self, scope_config_pairs: list[ScopeConfigPair],
+                      connection: Connection) -> msg.PipelineData:
         """
         Make a simple pipeline using the scopes declared by the plugin.
         """
         entity_types = entity_types or list(DomainType)
-        plan = self.make_pipeline_plan(scope_tx_rule_pairs, entity_types, 
connection)
+        plan = self.make_pipeline_plan(scope_config_pairs, connection)
         domain_scopes = []
-        for tool_scope, _ in scope_tx_rule_pairs:
+        for tool_scope, _ in scope_config_pairs:
             for scope in self.domain_scopes(tool_scope):
                 scope.id = tool_scope.domain_id()
                 domain_scopes.append(
@@ -150,24 +150,24 @@ class Plugin(ABC):
             scopes=domain_scopes
         )
 
-    def make_pipeline_plan(self, scope_tx_rule_pairs: list[ScopeTxRulePair],
-                           entity_types: list[DomainType], connection: 
Connection) -> list[list[msg.PipelineTask]]:
+    def make_pipeline_plan(self, scope_config_pairs: list[ScopeConfigPair],
+                           connection: Connection) -> 
list[list[msg.PipelineTask]]:
         """
         Generate a pipeline plan with one stage per scope, plus optional 
additional stages.
         Redefine `extra_stages` to add stages at the end of this pipeline.
         """
         return [
-            *(self.make_pipeline_stage(scope, tx_rule, entity_types, 
connection) for scope, tx_rule in scope_tx_rule_pairs),
-            *self.extra_stages(scope_tx_rule_pairs, entity_types, connection)
+            *(self.make_pipeline_stage(scope, config, connection) for scope, 
config in scope_config_pairs),
+            *self.extra_stages(scope_config_pairs, connection)
         ]
 
-    def extra_stages(self, scope_tx_rule_pairs: list[ScopeTxRulePair],
-                     entity_types: list[DomainType], connection: Connection) 
-> list[list[msg.PipelineTask]]:
+    def extra_stages(self, scope_config_pairs: list[ScopeConfigPair],
+                     connection: Connection) -> list[list[msg.PipelineTask]]:
         """Override this method to add extra stages to the pipeline plan"""
         return []
 
-    def make_pipeline_stage(self, scope: ToolScope, tx_rule: 
Optional[TransformationRule],
-                            entity_types: list[DomainType], connection: 
Connection) -> list[msg.PipelineTask]:
+    def make_pipeline_stage(self, scope: ToolScope, config: ScopeConfig,
+                            connection: Connection) -> list[msg.PipelineTask]:
         """
         Generate a pipeline stage for the given scope, plus optional 
additional tasks.
         Subtasks are selected from `entity_types` via `select_subtasks`.
@@ -177,28 +177,28 @@ class Plugin(ABC):
             msg.PipelineTask(
                 plugin=self.name,
                 skip_on_fail=False,
-                subtasks=self.select_subtasks(scope, entity_types),
+                subtasks=self.select_subtasks(scope, config),
                 options={
                     "scopeId": scope.id,
                     "scopeName": scope.name,
                     "connectionId": connection.id
                 }
             ),
-            *self.extra_tasks(scope, tx_rule, entity_types, connection)
+            *self.extra_tasks(scope, config, connection)
         ]
 
-    def extra_tasks(self, scope: ToolScope, tx_rule: 
Optional[TransformationRule],
-                    entity_types: list[DomainType], connection: Connection) -> 
list[msg.PipelineTask]:
+    def extra_tasks(self, scope: ToolScope, config: ScopeConfig,
+                    connection: Connection) -> list[msg.PipelineTask]:
         """Override this method to add tasks to the given scope stage"""
         return []
 
-    def select_subtasks(self, scope: ToolScope, entity_types: 
list[DomainType]) -> list[str]:
+    def select_subtasks(self, scope: ToolScope, config: ScopeConfig) -> 
list[str]:
         """
         Returns the list of subtasks names that should be run for given scope 
and entity types.
         """
         subtasks = []
         for stream in self._streams.values():
-            if set(stream.domain_types).intersection(entity_types) and 
stream.should_run_on(scope):
+            if set(stream.domain_types).intersection(config.domain_types) and 
stream.should_run_on(scope):
                 for subtask in stream.subtasks:
                     subtasks.append(subtask.name)
         return subtasks
@@ -222,18 +222,14 @@ class Plugin(ABC):
             )
             for subtask in self.subtasks
         ]
-        if self.transformation_rule_type:
-            tx_rule_model_info = 
msg.DynamicModelInfo.from_model(self.transformation_rule_type)
-        else:
-            tx_rule_model_info = None
         return msg.PluginInfo(
             name=self.name,
             description=self.description,
             plugin_path=self._plugin_path(),
             extension="datasource",
             
connection_model_info=msg.DynamicModelInfo.from_model(self.connection_type),
-            transformation_rule_model_info=tx_rule_model_info,
             
scope_model_info=msg.DynamicModelInfo.from_model(self.tool_scope_type),
+            
scope_config_model_info=msg.DynamicModelInfo.from_model(self.scope_config_type),
             
tool_model_infos=[msg.DynamicModelInfo.from_model(stream.tool_model) for stream 
in self._streams.values()],
             subtask_metas=subtask_metas,
             migration_scripts=MIGRATION_SCRIPTS
diff --git a/backend/python/pydevlake/pydevlake/stream.py 
b/backend/python/pydevlake/pydevlake/stream.py
index 91591bd6c..035d2c9fe 100644
--- a/backend/python/pydevlake/pydevlake/stream.py
+++ b/backend/python/pydevlake/pydevlake/stream.py
@@ -16,23 +16,13 @@
 
 from typing import Iterable, Type
 from abc import abstractmethod
-from enum import Enum
 
 from pydevlake.context import Context
 from pydevlake.subtasks import Collector, Extractor, Convertor, 
SubstreamCollector
-from pydevlake.model import RawModel, ToolModel, ToolScope, DomainModel
+from pydevlake.model import RawModel, ToolModel, ToolScope, DomainModel, 
DomainType
 from pydevlake.extractor import autoextract
 
 
-class DomainType(Enum):
-    CODE = "CODE"
-    TICKET = "TICKET"
-    CODE_REVIEW = "CODEREVIEW"
-    CROSS = "CROSS"
-    CICD = "CICD"
-    CODE_QUALITY = "CODEQUALITY"
-
-
 class Stream:
     def __init__(self, plugin_name: str):
         self.plugin_name = plugin_name
diff --git a/backend/python/pydevlake/pydevlake/testing/testing.py 
b/backend/python/pydevlake/pydevlake/testing/testing.py
index 41ac87ac2..fb2a990a1 100644
--- a/backend/python/pydevlake/pydevlake/testing/testing.py
+++ b/backend/python/pydevlake/pydevlake/testing/testing.py
@@ -22,8 +22,8 @@ from sqlmodel import create_engine
 from pydevlake.context import Context
 from pydevlake.plugin import Plugin
 from pydevlake.message import RemoteScopeGroup, PipelineTask
-from pydevlake.model import DomainModel, Connection, DomainScope, ToolModel, 
ToolScope, TransformationRule
-from pydevlake.stream import DomainType, Stream
+from pydevlake.model import DomainModel, Connection, DomainScope, ToolModel, 
ToolScope, ScopeConfig, DomainType
+from pydevlake.stream import Stream
 
 
 class ContextBuilder:
@@ -33,7 +33,7 @@ class ContextBuilder:
         self.plugin = plugin
         self.connection = None
         self.scope = None
-        self.transformation_rule = None
+        self.scope_config = None
 
     def with_connection(self, id=1, name='test_connection', **kwargs):
         self.connection = self.plugin.connection_type(id=id, name=name, 
**kwargs)
@@ -45,17 +45,21 @@ class ContextBuilder:
             self.scope.connection_id = self.connection.id
         return self
 
-    def with_transformation_rule(self, id=1, name='test_rule', **kwargs):
-        self.transformation_rule = self.plugin.transformation_rule_type(id=id, 
name=name, **kwargs)
+    def with_scope_config(self, id=1, name='test_config', **kwargs):
+        self.scope_config = self.plugin.scope_config_type(id=id, name=name, 
**kwargs)
         return self
 
     def build(self):
-        return Context(
-            engine=create_engine('sqlite:///:memory:'),
-            scope=self.scope,
-            connection=self.connection,
-            transformation_rule=self.transformation_rule
-        )
+        return make_context(self.connection, self.scope, self.scope_config)
+
+
+def make_context(connection, scope, scope_config):
+    return Context(
+        engine=create_engine('sqlite:///:memory:'),
+        scope=scope,
+        connection=connection,
+        scope_config=scope_config
+    )
 
 
 def assert_stream_convert(plugin: Union[Plugin, Type[Plugin]], stream_name: 
str,
@@ -76,11 +80,11 @@ def assert_stream_convert(plugin: Union[Plugin, 
Type[Plugin]], stream_name: str,
         assert res == exp
 
 
-def assert_stream_run(stream: Stream, connection: Connection, scope: 
ToolScope, transformation_rule: Optional[TransformationRule] = None):
+def assert_stream_run(stream: Stream, connection: Connection, scope: 
ToolScope, scope_config: ScopeConfig):
     """
     Test that a stream can run all 3 steps without error.
     """
-    ctx = 
ContextBuilder().with_connection(connection).with_scope(scope).with_transformation_rule(transformation_rule).build()
+    ctx = make_context(connection, scope, scope_config)
     stream.collector.run(ctx)
     stream.extractor.run(ctx)
     stream.convertor.run(ctx)
@@ -107,9 +111,9 @@ def assert_valid_tool_scope_type(plugin: Plugin):
     assert issubclass(tool_scope_type, ToolScope), 'tool_scope_type must be a 
subclass of ToolScope'
 
 
-def assert_valid_transformation_rule_type(plugin: Plugin):
-    transformation_rule_type = plugin.transformation_rule_type
-    assert issubclass(transformation_rule_type, TransformationRule), 
'transformation_rule_type must be a subclass of TransformationRule'
+def assert_valid_scope_config_type(plugin: Plugin):
+    scope_config_type = plugin.scope_config_type
+    assert issubclass(scope_config_type, ScopeConfig), 'scope_config_type must 
be a subclass of ScopeConfig'
 
 
 def assert_valid_streams(plugin: Plugin):
@@ -165,9 +169,9 @@ def assert_valid_remote_scopes(plugin: Plugin, connection: 
Connection, group_id:
     return tool_scopes
 
 
-def assert_valid_pipeline_plan(plugin: Plugin, connection: Connection, 
tool_scope: ToolScope, transformation_rule: Optional[TransformationRule] = 
None) -> list[list[PipelineTask]]:
+def assert_valid_pipeline_plan(plugin: Plugin, connection: Connection, 
tool_scope: ToolScope, scope_config: ScopeConfig) -> list[list[PipelineTask]]:
     plan = plugin.make_pipeline_plan(
-        [(tool_scope, transformation_rule)],
+        [(tool_scope, scope_config)],
         [domain_type.value for domain_type in DomainType],
         connection
     )
@@ -182,18 +186,18 @@ def assert_valid_plugin(plugin: Plugin):
     assert_valid_description(plugin)
     assert_valid_connection_type(plugin)
     assert_valid_tool_scope_type(plugin)
-    assert_valid_transformation_rule_type(plugin)
+    assert_valid_scope_config_type(plugin)
     assert_valid_streams(plugin)
 
 
-def assert_plugin_run(plugin: Plugin, connection: Connection, 
transformation_rule: Optional[TransformationRule] = None):
+def assert_plugin_run(plugin: Plugin, connection: Connection, scope_config: 
ScopeConfig):
     assert_valid_plugin(plugin)
     assert_valid_connection(plugin, connection)
     groups = assert_valid_remote_scope_groups(plugin, connection)
     scope = assert_valid_remote_scopes(plugin, connection, groups[0].id)[0]
     assert_valid_domain_scopes(plugin, scope)
-    assert_valid_pipeline_plan(plugin, connection, scope, transformation_rule)
+    assert_valid_pipeline_plan(plugin, connection, scope, scope_config)
     for stream in plugin.streams:
         if isinstance(stream, type):
             stream = stream(plugin.name)
-        assert_stream_run(stream, connection, scope, transformation_rule)
+        assert_stream_run(stream, connection, scope, scope_config)
diff --git a/backend/python/test/fakeplugin/fakeplugin/main.py 
b/backend/python/test/fakeplugin/fakeplugin/main.py
index 213641b66..0c2461f71 100644
--- a/backend/python/test/fakeplugin/fakeplugin/main.py
+++ b/backend/python/test/fakeplugin/fakeplugin/main.py
@@ -20,7 +20,7 @@ import json
 
 from pydantic import SecretStr
 
-from pydevlake import Plugin, Connection, TransformationRule, Stream, 
ToolModel, ToolScope, RemoteScopeGroup, DomainType, Field
+from pydevlake import Plugin, Connection, Stream, ToolModel, ToolScope, 
ScopeConfig, RemoteScopeGroup, DomainType, Field
 from pydevlake.domain_layer.devops import CicdScope, CICDPipeline, CICDStatus, 
CICDResult, CICDType
 
 VALID_TOKEN = "this_is_a_valid_token"
@@ -48,10 +48,7 @@ class FakePipelineStream(Stream):
             yield json.loads(p.json()), {}
 
     def convert(self, pipeline: FakePipeline, ctx):
-        if ctx.transformation_rule:
-            env = ctx.transformation_rule.env
-        else:
-            env = "unknown"
+        env = ctx.scope_config.env
         yield CICDPipeline(
             name=pipeline.id,
             status=self.convert_status(pipeline.state),
@@ -103,7 +100,7 @@ class FakeProject(ToolScope, table=True):
     url: str
 
 
-class FakeTransformationRule(TransformationRule):
+class FakeScopeConfig(ScopeConfig):
     env: str
 
 
@@ -117,8 +114,8 @@ class FakePlugin(Plugin):
         return FakeProject
 
     @property
-    def transformation_rule_type(self):
-        return FakeTransformationRule
+    def scope_config_type(self):
+        return FakeScopeConfig
 
     def domain_scopes(self, project: FakeProject):
         project_name = "_".join(project.name.lower().split(" "))
diff --git a/backend/server/services/remote/models/migration.go 
b/backend/server/services/remote/models/migration.go
index 7d980354d..3f5aebc78 100644
--- a/backend/server/services/remote/models/migration.go
+++ b/backend/server/services/remote/models/migration.go
@@ -48,13 +48,29 @@ func (o ExecuteOperation) Execute(dal dal.Dal) errors.Error 
{
 
 var _ Operation = (*ExecuteOperation)(nil)
 
+type AddColumnOperation struct {
+       Table      string         `json:"table"`
+       Column     string         `json:"column"`
+       ColumnType dal.ColumnType `json:"column_type"`
+}
+
+func (o AddColumnOperation) Execute(dal dal.Dal) errors.Error {
+       if dal.HasColumn(o.Table, o.Column) {
+               return dal.DropColumns(o.Table, o.Column)
+       }
+       return dal.AddColumn(o.Table, o.Column, o.ColumnType)
+}
+
 type DropColumnOperation struct {
        Table  string `json:"table"`
        Column string `json:"column"`
 }
 
 func (o DropColumnOperation) Execute(dal dal.Dal) errors.Error {
-       return dal.DropColumns(o.Table, o.Column)
+       if dal.HasColumn(o.Table, o.Column) {
+               return dal.DropColumns(o.Table, o.Column)
+       }
+       return nil
 }
 
 var _ Operation = (*DropColumnOperation)(nil)
@@ -65,11 +81,34 @@ type DropTableOperation struct {
 }
 
 func (o DropTableOperation) Execute(dal dal.Dal) errors.Error {
-       return dal.DropTables(o.Table)
+       if dal.HasTable(o.Table) {
+               return dal.DropTables(o.Table)
+       }
+       return nil
 }
 
 var _ Operation = (*DropTableOperation)(nil)
 
+type RenameTableOperation struct {
+       OldName string `json:"old_name"`
+       NewName string `json:"new_name"`
+}
+
+func (o RenameTableOperation) Execute(dal dal.Dal) errors.Error {
+       if !dal.HasTable(o.OldName) {
+               return nil
+       }
+       if dal.HasTable(o.NewName) {
+               err := dal.DropTables(o.NewName)
+               if err != nil {
+                       return err
+               }
+       }
+       return dal.RenameTable(o.OldName, o.NewName)
+}
+
+var _ Operation = (*RenameTableOperation)(nil)
+
 type RemoteMigrationScript struct {
        operations []Operation
        version    uint64
@@ -102,10 +141,14 @@ func (s *RemoteMigrationScript) UnmarshalJSON(data 
[]byte) error {
                switch operationType {
                case "execute":
                        operation = &ExecuteOperation{}
+               case "add_column":
+                       operation = &AddColumnOperation{}
                case "drop_column":
                        operation = &DropColumnOperation{}
                case "drop_table":
                        operation = &DropTableOperation{}
+               case "rename_table":
+                       operation = &RenameTableOperation{}
                default:
                        return errors.BadInput.New("unsupported operation type")
                }
diff --git a/backend/server/services/remote/models/models.go 
b/backend/server/services/remote/models/models.go
index c3bc47b51..2868703aa 100644
--- a/backend/server/services/remote/models/models.go
+++ b/backend/server/services/remote/models/models.go
@@ -33,23 +33,23 @@ const (
 )
 
 type PluginInfo struct {
-       Name                        string                  `json:"name" 
validate:"required"`
-       Description                 string                  `json:"description"`
-       ConnectionModelInfo         *DynamicModelInfo       
`json:"connection_model_info" validate:"required"`
-       TransformationRuleModelInfo *DynamicModelInfo       
`json:"transformation_rule_model_info"`
-       ScopeModelInfo              *DynamicModelInfo       
`json:"scope_model_info" validate:"required"`
-       ToolModelInfos              []*DynamicModelInfo     
`json:"tool_model_infos"`
-       MigrationScripts            []RemoteMigrationScript 
`json:"migration_scripts"`
-       PluginPath                  string                  `json:"plugin_path" 
validate:"required"`
-       SubtaskMetas                []SubtaskMeta           
`json:"subtask_metas"`
-       Extension                   PluginExtension         `json:"extension"`
+       Name                 string                  `json:"name" 
validate:"required"`
+       Description          string                  `json:"description"`
+       ConnectionModelInfo  *DynamicModelInfo       
`json:"connection_model_info" validate:"required"`
+       ScopeConfigModelInfo *DynamicModelInfo       
`json:"scope_config_model_info"`
+       ScopeModelInfo       *DynamicModelInfo       `json:"scope_model_info" 
validate:"required"`
+       ToolModelInfos       []*DynamicModelInfo     `json:"tool_model_infos"`
+       MigrationScripts     []RemoteMigrationScript `json:"migration_scripts"`
+       PluginPath           string                  `json:"plugin_path" 
validate:"required"`
+       SubtaskMetas         []SubtaskMeta           `json:"subtask_metas"`
+       Extension            PluginExtension         `json:"extension"`
 }
 
 // Type aliases used by the API helper for better readability
 type (
-       RemoteScope          any
-       RemoteTransformation any
-       RemoteConnection     any
+       RemoteScope       any
+       RemoteScopeConfig any
+       RemoteConnection  any
 )
 
 type DynamicModelInfo struct {
@@ -62,17 +62,17 @@ func (d DynamicModelInfo) LoadDynamicTabler(parentModel 
any) (*models.DynamicTab
 }
 
 type ScopeModel struct {
-       common.NoPKModel     `swaggerignore:"true"`
-       Id                   string `gorm:"primarykey;type:varchar(255)" 
json:"id"`
-       ConnectionId         uint64 `gorm:"primaryKey" json:"connectionId"`
-       Name                 string `json:"name" validate:"required"`
-       TransformationRuleId uint64 `json:"transformationRuleId"`
+       common.NoPKModel `swaggerignore:"true"`
+       Id               string `gorm:"primarykey;type:varchar(255)" json:"id"`
+       ConnectionId     uint64 `gorm:"primaryKey" json:"connectionId"`
+       Name             string `json:"name" validate:"required"`
+       ScopeConfigId    uint64 `json:"scopeConfigId"`
 }
 
-type TransformationModel struct {
-       common.Model
-       ConnectionId uint64 `json:"connectionId"`
-       Name         string `json:"name"`
+type ScopeConfigModel struct {
+       common.ScopeConfig `mapstructure:",squash"`
+       ConnectionId       uint64 `json:"connectionId"`
+       Name               string `json:"name"`
 }
 
 type SubtaskMeta struct {
diff --git a/backend/server/services/remote/plugin/default_api.go 
b/backend/server/services/remote/plugin/default_api.go
index 31fc4a569..817a05d4f 100644
--- a/backend/server/services/remote/plugin/default_api.go
+++ b/backend/server/services/remote/plugin/default_api.go
@@ -26,26 +26,26 @@ import (
 )
 
 type pluginAPI struct {
-       invoker    bridge.Invoker
-       connType   *models.DynamicTabler
-       txRuleType *models.DynamicTabler
-       scopeType  *models.DynamicTabler
-       helper     *api.ConnectionApiHelper
+       invoker         bridge.Invoker
+       connType        *models.DynamicTabler
+       scopeType       *models.DynamicTabler
+       scopeConfigType *models.DynamicTabler
+       helper          *api.ConnectionApiHelper
 }
 
 func GetDefaultAPI(
        invoker bridge.Invoker,
        connType *models.DynamicTabler,
-       txRuleType *models.DynamicTabler,
+       scopeConfigType *models.DynamicTabler,
        scopeType *models.DynamicTabler,
        helper *api.ConnectionApiHelper,
 ) map[string]map[string]plugin.ApiResourceHandler {
        papi := &pluginAPI{
-               invoker:    invoker,
-               connType:   connType,
-               txRuleType: txRuleType,
-               scopeType:  scopeType,
-               helper:     helper,
+               invoker:         invoker,
+               connType:        connType,
+               scopeConfigType: scopeConfigType,
+               scopeType:       scopeType,
+               helper:          helper,
        }
 
        resources := map[string]map[string]plugin.ApiResourceHandler{
@@ -70,6 +70,14 @@ func GetDefaultAPI(
                        "PATCH":  papi.UpdateScope,
                        "DELETE": papi.DeleteScope,
                },
+               "connections/:connectionId/scope-configs": {
+                       "POST": papi.PostScopeConfigs,
+                       "GET":  papi.ListScopeConfigs,
+               },
+               "connections/:connectionId/scope-configs/:id": {
+                       "GET":   papi.GetScopeConfig,
+                       "PATCH": papi.PatchScopeConfig,
+               },
                "connections/:connectionId/remote-scopes": {
                        "GET": papi.GetRemoteScopes,
                },
@@ -78,27 +86,17 @@ func GetDefaultAPI(
                },
        }
 
-       if txRuleType != nil {
-               resources["connections/:connectionId/transformation_rules"] = 
map[string]plugin.ApiResourceHandler{
-                       "POST": papi.PostTransformationRules,
-                       "GET":  papi.ListTransformationRules,
-               }
-               resources["connections/:connectionId/transformation_rules/:id"] 
= map[string]plugin.ApiResourceHandler{
-                       "GET":   papi.GetTransformationRule,
-                       "PATCH": papi.PatchTransformationRule,
-               }
-       }
        scopeHelper = createScopeHelper(papi)
        return resources
 }
 
-func createScopeHelper(pa *pluginAPI) 
*api.GenericScopeApiHelper[remoteModel.RemoteConnection, 
remoteModel.RemoteScope, remoteModel.RemoteTransformation] {
+func createScopeHelper(pa *pluginAPI) 
*api.GenericScopeApiHelper[remoteModel.RemoteConnection, 
remoteModel.RemoteScope, remoteModel.RemoteScopeConfig] {
        params := &api.ReflectionParameters{
                ScopeIdFieldName:  "Id",
                ScopeIdColumnName: "id",
                RawScopeParamName: "scope_id",
        }
-       return api.NewGenericScopeHelper[remoteModel.RemoteConnection, 
remoteModel.RemoteScope, remoteModel.RemoteTransformation](
+       return api.NewGenericScopeHelper[remoteModel.RemoteConnection, 
remoteModel.RemoteScope, remoteModel.RemoteScopeConfig](
                basicRes,
                nil,
                connectionHelper,
diff --git a/backend/server/services/remote/plugin/doc/open_api.go 
b/backend/server/services/remote/plugin/doc/open_api.go
index 1d0e2db7c..8a754f917 100644
--- a/backend/server/services/remote/plugin/doc/open_api.go
+++ b/backend/server/services/remote/plugin/doc/open_api.go
@@ -19,13 +19,14 @@ package doc
 
 import (
        "encoding/json"
-       "github.com/apache/incubator-devlake/core/config"
        "io"
        "os"
        "path/filepath"
        "strings"
        "text/template"
 
+       "github.com/apache/incubator-devlake/core/config"
+
        "github.com/apache/incubator-devlake/core/errors"
        "github.com/apache/incubator-devlake/server/services/remote/models"
 )
@@ -39,9 +40,9 @@ func GenerateOpenApiSpec(pluginInfo *models.PluginInfo) 
(*string, errors.Error)
        if err != nil {
                return nil, errors.Default.Wrap(err, "scope schema is not valid 
JSON")
        }
-       txRuleSchema, err := 
json.Marshal(pluginInfo.TransformationRuleModelInfo.JsonSchema)
+       scopeConfigSchema, err := 
json.Marshal(pluginInfo.ScopeConfigModelInfo.JsonSchema)
        if err != nil {
-               return nil, errors.Default.Wrap(err, "transformation rule 
schema is not valid JSON")
+               return nil, errors.Default.Wrap(err, "scope config schema is 
not valid JSON")
        }
        specTemplate, tmplErr := specTemplate()
        if tmplErr != nil {
@@ -49,10 +50,10 @@ func GenerateOpenApiSpec(pluginInfo *models.PluginInfo) 
(*string, errors.Error)
        }
        writer := &strings.Builder{}
        err = specTemplate.Execute(writer, map[string]interface{}{
-               "PluginName":               pluginInfo.Name,
-               "ConnectionSchema":         string(connectionSchema),
-               "ScopeSchema":              string(scopeSchema),
-               "TransformationRuleSchema": string(txRuleSchema),
+               "PluginName":        pluginInfo.Name,
+               "ConnectionSchema":  string(connectionSchema),
+               "ScopeSchema":       string(scopeSchema),
+               "ScopeConfigSchema": string(scopeConfigSchema),
        })
        if err != nil {
                return nil, errors.Default.Wrap(err, "could not execute swagger 
doc template")
diff --git a/backend/server/services/remote/plugin/init.go 
b/backend/server/services/remote/plugin/init.go
index 3401acba6..699547c4d 100644
--- a/backend/server/services/remote/plugin/init.go
+++ b/backend/server/services/remote/plugin/init.go
@@ -28,7 +28,7 @@ import (
 
 var (
        connectionHelper *api.ConnectionApiHelper
-       scopeHelper      *api.GenericScopeApiHelper[models.RemoteConnection, 
models.RemoteScope, models.RemoteTransformation]
+       scopeHelper      *api.GenericScopeApiHelper[models.RemoteConnection, 
models.RemoteScope, models.RemoteScopeConfig]
        basicRes         context.BasicRes
        vld              *validator.Validate
 )
diff --git a/backend/server/services/remote/plugin/plugin_extensions.go 
b/backend/server/services/remote/plugin/plugin_extensions.go
index 73cc68220..c51f79a3a 100644
--- a/backend/server/services/remote/plugin/plugin_extensions.go
+++ b/backend/server/services/remote/plugin/plugin_extensions.go
@@ -19,12 +19,9 @@ package plugin
 
 import (
        "encoding/json"
-       "fmt"
 
-       "github.com/apache/incubator-devlake/core/dal"
        "github.com/apache/incubator-devlake/core/errors"
        "github.com/apache/incubator-devlake/core/plugin"
-       "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
        "github.com/apache/incubator-devlake/server/services/remote/bridge"
        "github.com/apache/incubator-devlake/server/services/remote/models"
 )
@@ -50,30 +47,17 @@ func (p remoteDatasourcePlugin) 
MakeDataSourcePipelinePlanV200(connectionId uint
        }
 
        db := basicRes.GetDal()
-       var toolScopeTxRulePairs = make([]interface{}, len(bpScopes))
+       var toolScopeConfigPairs = make([]interface{}, len(bpScopes))
        for i, bpScope := range bpScopes {
-               wrappedToolScope := p.scopeTabler.New()
-               err = api.CallDB(db.First, wrappedToolScope, dal.Where("id = 
?", bpScope.Id))
-               if err != nil {
-                       return nil, nil, errors.Default.Wrap(err, 
fmt.Sprintf("error getting scope %s", bpScope.Name))
-               }
-               toolScope := models.ScopeModel{}
-               err := wrappedToolScope.To(&toolScope)
-               if err != nil {
-                       return nil, nil, err
-               }
-               txRule, err := p.getTxRule(db, toolScope)
+               toolScope, scopeConfig, err := p.getScopeAndConfig(db, 
connectionId, bpScope.Id)
                if err != nil {
                        return nil, nil, err
                }
-               toolScopeTxRulePairs[i] = 
[]interface{}{wrappedToolScope.Unwrap(), txRule}
+               toolScopeConfigPairs[i] = []interface{}{toolScope, scopeConfig}
        }
 
-       // TODO: @camille: no need to pass the entities separately as they are 
already in the scope config (tx rule)
-       entities := []string{}
-
        plan_data := models.PipelineData{}
-       err = p.invoker.Call("make-pipeline", bridge.DefaultContext, 
toolScopeTxRulePairs, entities, connection.Unwrap()).Get(&plan_data)
+       err = p.invoker.Call("make-pipeline", bridge.DefaultContext, 
toolScopeConfigPairs, connection.Unwrap()).Get(&plan_data)
        if err != nil {
                return nil, nil, err
        }
diff --git a/backend/server/services/remote/plugin/plugin_impl.go 
b/backend/server/services/remote/plugin/plugin_impl.go
index 02a613748..b86659914 100644
--- a/backend/server/services/remote/plugin/plugin_impl.go
+++ b/backend/server/services/remote/plugin/plugin_impl.go
@@ -34,25 +34,25 @@ import (
 
 type (
        remotePluginImpl struct {
-               name                     string
-               subtaskMetas             []plugin.SubTaskMeta
-               pluginPath               string
-               description              string
-               invoker                  bridge.Invoker
-               connectionTabler         *coreModels.DynamicTabler
-               scopeTabler              *coreModels.DynamicTabler
-               transformationRuleTabler *coreModels.DynamicTabler
-               toolModelTablers         []*coreModels.DynamicTabler
-               migrationScripts         []plugin.MigrationScript
-               resources                
map[string]map[string]plugin.ApiResourceHandler
-               openApiSpec              string
+               name              string
+               subtaskMetas      []plugin.SubTaskMeta
+               pluginPath        string
+               description       string
+               invoker           bridge.Invoker
+               connectionTabler  *coreModels.DynamicTabler
+               scopeTabler       *coreModels.DynamicTabler
+               scopeConfigTabler *coreModels.DynamicTabler
+               toolModelTablers  []*coreModels.DynamicTabler
+               migrationScripts  []plugin.MigrationScript
+               resources         
map[string]map[string]plugin.ApiResourceHandler
+               openApiSpec       string
        }
        RemotePluginTaskData struct {
-               DbUrl              string                 `json:"db_url"`
-               Scope              interface{}            `json:"scope"`
-               Connection         interface{}            `json:"connection"`
-               TransformationRule interface{}            
`json:"transformation_rule"`
-               Options            map[string]interface{} `json:"options"`
+               DbUrl       string                 `json:"db_url"`
+               Scope       interface{}            `json:"scope"`
+               Connection  interface{}            `json:"connection"`
+               ScopeConfig interface{}            `json:"scope_config"`
+               Options     map[string]interface{} `json:"options"`
        }
 )
 
@@ -61,18 +61,14 @@ func newPlugin(info *models.PluginInfo, invoker 
bridge.Invoker) (*remotePluginIm
        if err != nil {
                return nil, errors.Default.Wrap(err, fmt.Sprintf("Couldn't load 
Connection type for plugin %s", info.Name))
        }
-
-       var txRuleTabler *coreModels.DynamicTabler
-       if info.TransformationRuleModelInfo != nil {
-               txRuleTabler, err = 
info.TransformationRuleModelInfo.LoadDynamicTabler(models.TransformationModel{})
-               if err != nil {
-                       return nil, errors.Default.Wrap(err, 
fmt.Sprintf("Couldn't load TransformationRule type for plugin %s", info.Name))
-               }
-       }
        scopeTabler, err := 
info.ScopeModelInfo.LoadDynamicTabler(models.ScopeModel{})
        if err != nil {
                return nil, errors.Default.Wrap(err, fmt.Sprintf("Couldn't load 
Scope type for plugin %s", info.Name))
        }
+       scopeConfigTabler, err := 
info.ScopeConfigModelInfo.LoadDynamicTabler(models.ScopeConfigModel{})
+       if err != nil {
+               return nil, errors.Default.Wrap(err, fmt.Sprintf("Couldn't load 
ScopeConfig type for plugin %s", info.Name))
+       }
        toolModelTablers := make([]*coreModels.DynamicTabler, 
len(info.ToolModelInfos))
        for i, toolModelInfo := range info.ToolModelInfos {
                toolModelTabler, err := 
toolModelInfo.LoadDynamicTabler(common.NoPKModel{})
@@ -91,17 +87,17 @@ func newPlugin(info *models.PluginInfo, invoker 
bridge.Invoker) (*remotePluginIm
                scripts = append(scripts, &script)
        }
        p := remotePluginImpl{
-               name:                     info.Name,
-               invoker:                  invoker,
-               pluginPath:               info.PluginPath,
-               description:              info.Description,
-               connectionTabler:         connectionTabler,
-               scopeTabler:              scopeTabler,
-               transformationRuleTabler: txRuleTabler,
-               toolModelTablers:         toolModelTablers,
-               migrationScripts:         scripts,
-               resources:                GetDefaultAPI(invoker, 
connectionTabler, txRuleTabler, scopeTabler, connectionHelper),
-               openApiSpec:              *openApiSpec,
+               name:              info.Name,
+               invoker:           invoker,
+               pluginPath:        info.PluginPath,
+               description:       info.Description,
+               connectionTabler:  connectionTabler,
+               scopeTabler:       scopeTabler,
+               scopeConfigTabler: scopeConfigTabler,
+               toolModelTablers:  toolModelTablers,
+               migrationScripts:  scripts,
+               resources:         GetDefaultAPI(invoker, connectionTabler, 
scopeConfigTabler, scopeTabler, connectionHelper),
+               openApiSpec:       *openApiSpec,
        }
        remoteBridge := bridge.NewBridge(invoker)
        for _, subtask := range info.SubtaskMetas {
@@ -153,45 +149,41 @@ func (p *remotePluginImpl) PrepareTaskData(taskCtx 
plugin.TaskContext, options m
        }
 
        db := taskCtx.GetDal()
+       scope, scopeConfig, err := p.getScopeAndConfig(db, connectionId, 
scopeId)
+       if err != nil {
+               return nil, err
+       }
+
+       return RemotePluginTaskData{
+               DbUrl:       dbUrl,
+               Scope:       scope,
+               Connection:  connection,
+               ScopeConfig: scopeConfig,
+               Options:     options,
+       }, nil
+}
+
+func (p *remotePluginImpl) getScopeAndConfig(db dal.Dal, connectionId uint64, 
scopeId string) (interface{}, interface{}, errors.Error) {
        wrappedScope := p.scopeTabler.New()
-       err = api.CallDB(db.First, wrappedScope, dal.Where("connection_id = ? 
AND id = ?", connectionId, scopeId))
+       err := api.CallDB(db.First, wrappedScope, dal.Where("connection_id = ? 
AND id = ?", connectionId, scopeId))
        if err != nil {
-               return nil, errors.BadInput.New("Invalid scope id")
+               return nil, nil, errors.BadInput.New("Invalid scope id")
        }
-       var scope models.ScopeModel
+       scope := models.ScopeModel{}
        err = wrappedScope.To(&scope)
        if err != nil {
-               return nil, err
+               return nil, nil, errors.BadInput.Wrap(err, "Invalid scope")
        }
-
-       txRule, err := p.getTxRule(db, scope)
+       wrappedScopeConfig := p.scopeConfigTabler.New()
+       err = api.CallDB(db.First, wrappedScopeConfig, 
dal.From(p.scopeConfigTabler.TableName()), dal.Where("id = ?", 
scope.ScopeConfigId))
        if err != nil {
-               return nil, err
+               return nil, nil, err
        }
 
-       return RemotePluginTaskData{
-               DbUrl:              dbUrl,
-               Scope:              wrappedScope.Unwrap(),
-               Connection:         connection,
-               TransformationRule: txRule,
-               Options:            options,
-       }, nil
-}
-
-func (p *remotePluginImpl) getTxRule(db dal.Dal, scope models.ScopeModel) 
(interface{}, errors.Error) {
-       if scope.TransformationRuleId > 0 {
-               if p.transformationRuleTabler == nil {
-                       return nil, errors.Default.New(fmt.Sprintf("Cannot load 
transformation rule %v: plugin %s has no transformation rule model", 
scope.TransformationRuleId, p.name))
-               }
-               wrappedTxRule := p.transformationRuleTabler.New()
-               err := api.CallDB(db.First, wrappedTxRule, 
dal.From(p.transformationRuleTabler.TableName()), dal.Where("id = ?", 
scope.TransformationRuleId))
-               if err != nil {
-                       return nil, err
-               }
-               return wrappedTxRule.Unwrap(), nil
-       } else {
-               return nil, nil
+       if err != nil {
+               return nil, nil, err
        }
+       return wrappedScope.Unwrap(), wrappedScopeConfig.Unwrap(), nil
 }
 
 func (p *remotePluginImpl) Description() string {
@@ -221,11 +213,9 @@ func (p *remotePluginImpl) RunAutoMigrations() 
errors.Error {
        if err != nil {
                return err
        }
-       if p.transformationRuleTabler != nil {
-               err = api.CallDB(db.AutoMigrate, 
p.transformationRuleTabler.New())
-               if err != nil {
-                       return err
-               }
+       err = api.CallDB(db.AutoMigrate, p.scopeConfigTabler.New())
+       if err != nil {
+               return err
        }
        for _, toolModelTabler := range p.toolModelTablers {
                err = api.CallDB(db.AutoMigrate, toolModelTabler.New())
diff --git a/backend/server/services/remote/plugin/scope_api.go 
b/backend/server/services/remote/plugin/scope_api.go
index 43dbf2b0e..9ff150a5a 100644
--- a/backend/server/services/remote/plugin/scope_api.go
+++ b/backend/server/services/remote/plugin/scope_api.go
@@ -104,7 +104,7 @@ func (pa *pluginAPI) DeleteScope(input 
*plugin.ApiResourceInput) (*plugin.ApiRes
 
 // convertScopeResponse adapt the "remote" scopes to a serializable 
api.ScopeRes. This code is needed because squashed mapstructure don't work
 // with dynamic/runtime structs used by remote plugins
-func convertScopeResponse(scopes ...*api.ScopeRes[models.RemoteScope, 
models.RemoteTransformation]) ([]map[string]any, errors.Error) {
+func convertScopeResponse(scopes ...*api.ScopeRes[models.RemoteScope, 
models.RemoteScopeConfig]) ([]map[string]any, errors.Error) {
        responses := make([]map[string]any, len(scopes))
        for i, scope := range scopes {
                resMap := map[string]any{}
diff --git a/backend/server/services/remote/plugin/transformation_rule_api.go 
b/backend/server/services/remote/plugin/scope_config_api.go
similarity index 60%
rename from backend/server/services/remote/plugin/transformation_rule_api.go
rename to backend/server/services/remote/plugin/scope_config_api.go
index a7a1a0d9b..42f1f7cf6 100644
--- a/backend/server/services/remote/plugin/transformation_rule_api.go
+++ b/backend/server/services/remote/plugin/scope_config_api.go
@@ -27,80 +27,80 @@ import (
        "github.com/apache/incubator-devlake/helpers/pluginhelper/api"
 )
 
-func (pa *pluginAPI) PostTransformationRules(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+func (pa *pluginAPI) PostScopeConfigs(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
        connectionId, _ := strconv.ParseUint(input.Params["connectionId"], 10, 
64)
        if connectionId == 0 {
                return nil, errors.BadInput.New("invalid connectionId")
        }
-       txRule := pa.txRuleType.New()
+       scopeConfig := pa.scopeConfigType.New()
        input.Body[`connectionId`] = connectionId
-       err := api.DecodeMapStruct(input.Body, txRule, false)
+       err := api.DecodeMapStruct(input.Body, scopeConfig, false)
        if err != nil {
-               return nil, errors.BadInput.Wrap(err, "error in decoding 
transformation rule")
+               return nil, errors.BadInput.Wrap(err, "error in decoding scope 
config")
        }
        db := basicRes.GetDal()
-       err = api.CallDB(db.Create, txRule)
+       err = api.CallDB(db.Create, scopeConfig)
        if err != nil {
                return nil, err
        }
-       return &plugin.ApiResourceOutput{Body: txRule.Unwrap(), Status: 
http.StatusOK}, nil
+       return &plugin.ApiResourceOutput{Body: scopeConfig.Unwrap(), Status: 
http.StatusOK}, nil
 }
 
-func (pa *pluginAPI) PatchTransformationRule(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+func (pa *pluginAPI) PatchScopeConfig(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
        connectionId, trId, err := extractTrParam(input.Params)
        if err != nil {
                return nil, err
        }
 
-       txRule := pa.txRuleType.New()
+       scopeConfig := pa.scopeConfigType.New()
        db := basicRes.GetDal()
-       err = api.CallDB(db.First, txRule, dal.Where("connection_id = ? AND id 
= ?", connectionId, trId))
+       err = api.CallDB(db.First, scopeConfig, dal.Where("connection_id = ? 
AND id = ?", connectionId, trId))
        if err != nil {
-               return nil, errors.Default.Wrap(err, "no transformation rule 
with given id")
+               return nil, errors.Default.Wrap(err, "no scope config with 
given id")
        }
 
        input.Body[`connectionId`] = connectionId
        input.Body[`id`] = trId
-       err = api.DecodeMapStruct(input.Body, txRule, false)
+       err = api.DecodeMapStruct(input.Body, scopeConfig, false)
        if err != nil {
                return nil, errors.Default.Wrap(err, "decoding error")
        }
 
-       err = api.CallDB(db.Update, txRule)
+       err = api.CallDB(db.Update, scopeConfig)
        if err != nil {
                return nil, err
        }
-       return &plugin.ApiResourceOutput{Body: txRule.Unwrap(), Status: 
http.StatusOK}, nil
+       return &plugin.ApiResourceOutput{Body: scopeConfig.Unwrap(), Status: 
http.StatusOK}, nil
 }
 
-func (pa *pluginAPI) GetTransformationRule(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
-       txRule := pa.txRuleType.New()
+func (pa *pluginAPI) GetScopeConfig(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+       scopeConfig := pa.scopeConfigType.New()
        db := basicRes.GetDal()
        connectionId, trId, err := extractTrParam(input.Params)
        if err != nil {
                return nil, err
        }
-       err = api.CallDB(db.First, txRule, dal.Where("connection_id = ? AND id 
= ?", connectionId, trId))
+       err = api.CallDB(db.First, scopeConfig, dal.Where("connection_id = ? 
AND id = ?", connectionId, trId))
        if err != nil {
-               return nil, errors.Default.Wrap(err, "no transformation rule 
with given id")
+               return nil, errors.Default.Wrap(err, "no scope config with 
given id")
        }
 
-       return &plugin.ApiResourceOutput{Body: txRule.Unwrap()}, nil
+       return &plugin.ApiResourceOutput{Body: scopeConfig.Unwrap()}, nil
 }
 
-func (pa *pluginAPI) ListTransformationRules(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
-       txRules := pa.txRuleType.NewSlice()
+func (pa *pluginAPI) ListScopeConfigs(input *plugin.ApiResourceInput) 
(*plugin.ApiResourceOutput, errors.Error) {
+       scopeConfigs := pa.scopeConfigType.NewSlice()
        limit, offset := api.GetLimitOffset(input.Query, "pageSize", "page")
        if limit > 100 {
                return nil, errors.BadInput.New("pageSize cannot exceed 100")
        }
 
        db := basicRes.GetDal()
-       err := api.CallDB(db.All, txRules, dal.Limit(limit), dal.Offset(offset))
+       err := api.CallDB(db.All, scopeConfigs, dal.Limit(limit), 
dal.Offset(offset))
        if err != nil {
                return nil, err
        }
-       return &plugin.ApiResourceOutput{Body: txRules.Unwrap()}, nil
+       return &plugin.ApiResourceOutput{Body: scopeConfigs.Unwrap()}, nil
 }
 
 func extractTrParam(params map[string]string) (connectionId uint64, 
transformationId uint64, err errors.Error) {
diff --git a/backend/server/services/remote/plugin/scope_db_helper.go 
b/backend/server/services/remote/plugin/scope_db_helper.go
index e44d425f3..4d063593f 100644
--- a/backend/server/services/remote/plugin/scope_db_helper.go
+++ b/backend/server/services/remote/plugin/scope_db_helper.go
@@ -31,7 +31,7 @@ import (
 )
 
 type ScopeDatabaseHelperImpl struct {
-       api.ScopeDatabaseHelper[models.RemoteConnection, models.RemoteScope, 
models.RemoteTransformation]
+       api.ScopeDatabaseHelper[models.RemoteConnection, models.RemoteScope, 
models.RemoteScopeConfig]
        pa         *pluginAPI
        db         dal.Dal
        params     *api.ReflectionParameters
@@ -106,25 +106,26 @@ func (s *ScopeDatabaseHelperImpl) 
DeleteScope(connectionId uint64, scopeId strin
        return api.CallDB(s.db.Delete, rawScope, dal.Where("connection_id = ? 
AND id = ?", connectionId, scopeId))
 }
 
-func (s *ScopeDatabaseHelperImpl) GetTransformationRule(ruleId uint64) 
(models.RemoteTransformation, errors.Error) {
-       rule := s.pa.txRuleType.New()
-       err := api.CallDB(s.db.First, rule, dal.Where("id = ?", ruleId))
+func (s *ScopeDatabaseHelperImpl) GetScopeConfig(configId uint64) 
(*models.RemoteScopeConfig, errors.Error) {
+       config := s.pa.scopeConfigType.New()
+       err := api.CallDB(s.db.First, config, dal.Where("id = ?", configId))
        if err != nil {
-               return rule, err
+               return nil, err
        }
-       return rule.Unwrap(), nil
+       unwrapped := config.Unwrap().(models.RemoteScopeConfig)
+       return &unwrapped, nil
 }
 
-func (s *ScopeDatabaseHelperImpl) ListTransformationRules(ruleIds []uint64) 
([]*models.RemoteTransformation, errors.Error) {
-       rules := s.pa.txRuleType.NewSlice()
-       err := api.CallDB(s.db.All, rules, dal.Where("id IN (?)", ruleIds))
+func (s *ScopeDatabaseHelperImpl) ListScopeConfigs(configIds []uint64) 
([]*models.RemoteScopeConfig, errors.Error) {
+       configs := s.pa.scopeConfigType.NewSlice()
+       err := api.CallDB(s.db.All, configs, dal.Where("id IN (?)", configIds))
        if err != nil {
                return nil, err
        }
-       var result []*models.RemoteTransformation
-       for _, rule := range rules.UnwrapSlice() {
-               rule := rule.(models.RemoteTransformation)
-               result = append(result, &rule)
+       var result []*models.RemoteScopeConfig
+       for _, config := range configs.UnwrapSlice() {
+               config := config.(models.RemoteScopeConfig)
+               result = append(result, &config)
        }
        return result, nil
 }
@@ -146,4 +147,4 @@ func (s *ScopeDatabaseHelperImpl) save(scopes 
[]*models.RemoteScope, createdAt *
        return nil
 }
 
-var _ api.ScopeDatabaseHelper[models.RemoteConnection, models.RemoteScope, 
models.RemoteTransformation] = &ScopeDatabaseHelperImpl{}
+var _ api.ScopeDatabaseHelper[models.RemoteConnection, models.RemoteScope, 
models.RemoteScopeConfig] = &ScopeDatabaseHelperImpl{}
diff --git a/backend/test/e2e/remote/helper.go 
b/backend/test/e2e/remote/helper.go
index d2fc4ebae..8af1da556 100644
--- a/backend/test/e2e/remote/helper.go
+++ b/backend/test/e2e/remote/helper.go
@@ -44,22 +44,23 @@ type (
                Token string `json:"token"`
        }
        FakeProject struct {
-               Id                   string `json:"id"`
-               Name                 string `json:"name"`
-               ConnectionId         uint64 `json:"connectionId"`
-               TransformationRuleId uint64 `json:"transformationRuleId"`
-               Url                  string `json:"url"`
+               Id            string `json:"id"`
+               Name          string `json:"name"`
+               ConnectionId  uint64 `json:"connectionId"`
+               ScopeConfigId uint64 `json:"scopeConfigId"`
+               Url           string `json:"url"`
        }
-       FakeTxRule struct {
-               Id   uint64 `json:"id"`
-               Name string `json:"name"`
-               Env  string `json:"env"`
+       FakeScopeConfig struct {
+               Id       uint64   `json:"id"`
+               Name     string   `json:"name"`
+               Env      string   `json:"env"`
+               Entities []string `json:"entities"`
        }
        BlueprintTestParams struct {
                connection *helper.Connection
                projects   []models.ApiOutputProject
                blueprints []models.Blueprint
-               rule       *FakeTxRule
+               config     *FakeScopeConfig
                scope      *FakeProject
        }
 )
@@ -89,30 +90,30 @@ func CreateTestConnection(client *helper.DevlakeClient) 
*helper.Connection {
        return connection
 }
 
-func CreateTestScope(client *helper.DevlakeClient, rule *FakeTxRule, 
connectionId uint64) *FakeProject {
+func CreateTestScope(client *helper.DevlakeClient, config *FakeScopeConfig, 
connectionId uint64) *FakeProject {
        scopes := helper.Cast[[]FakeProject](client.CreateScope(PLUGIN_NAME,
                connectionId,
                FakeProject{
-                       Id:                   "p1",
-                       Name:                 "Project 1",
-                       ConnectionId:         connectionId,
-                       Url:                  "http://fake.org/api/project/p1";,
-                       TransformationRuleId: rule.Id,
+                       Id:            "p1",
+                       Name:          "Project 1",
+                       ConnectionId:  connectionId,
+                       Url:           "http://fake.org/api/project/p1";,
+                       ScopeConfigId: config.Id,
                },
        ))
        return &scopes[0]
 }
 
-func CreateTestTransformationRule(client *helper.DevlakeClient, connectionId 
uint64) *FakeTxRule {
-       rule := 
helper.Cast[FakeTxRule](client.CreateTransformationRule(PLUGIN_NAME, 
connectionId, FakeTxRule{Name: "Tx rule", Env: "test env"}))
-       return &rule
+func CreateTestScopeConfig(client *helper.DevlakeClient, connectionId uint64) 
*FakeScopeConfig {
+       config := 
helper.Cast[FakeScopeConfig](client.CreateScopeConfig(PLUGIN_NAME, 
connectionId, FakeScopeConfig{Name: "Scope config", Env: "test env", Entities: 
[]string{"CICD"}}))
+       return &config
 }
 
 func CreateTestBlueprints(t *testing.T, client *helper.DevlakeClient, count 
int) *BlueprintTestParams {
        t.Helper()
        connection := CreateTestConnection(client)
-       rule := CreateTestTransformationRule(client, connection.ID)
-       scope := CreateTestScope(client, rule, connection.ID)
+       config := CreateTestScopeConfig(client, connection.ID)
+       scope := CreateTestScope(client, config, connection.ID)
        var bps []models.Blueprint
        var projects []models.ApiOutputProject
        for i := 1; i <= count; i++ {
@@ -149,7 +150,7 @@ func CreateTestBlueprints(t *testing.T, client 
*helper.DevlakeClient, count int)
                connection: connection,
                projects:   projects,
                blueprints: bps,
-               rule:       rule,
+               config:     config,
                scope:      scope,
        }
 }
diff --git a/backend/test/e2e/remote/python_plugin_test.go 
b/backend/test/e2e/remote/python_plugin_test.go
index 04237e439..2e6e8e83d 100644
--- a/backend/test/e2e/remote/python_plugin_test.go
+++ b/backend/test/e2e/remote/python_plugin_test.go
@@ -81,8 +81,8 @@ func TestRemoteScopes(t *testing.T) {
 func TestCreateScope(t *testing.T) {
        client := CreateClient(t)
        conn := CreateTestConnection(client)
-       rule := CreateTestTransformationRule(client, conn.ID)
-       scope := CreateTestScope(client, rule, conn.ID)
+       scopeConfig := CreateTestScopeConfig(client, conn.ID)
+       scope := CreateTestScope(client, scopeConfig, conn.ID)
 
        scopes := client.ListScopes(PLUGIN_NAME, conn.ID, false)
        require.Equal(t, 1, len(scopes))
@@ -100,8 +100,8 @@ func TestCreateScope(t *testing.T) {
 func TestRunPipeline(t *testing.T) {
        client := CreateClient(t)
        conn := CreateTestConnection(client)
-       rule := CreateTestTransformationRule(client, conn.ID)
-       scope := CreateTestScope(client, rule, conn.ID)
+       scopeConfig := CreateTestScopeConfig(client, conn.ID)
+       scope := CreateTestScope(client, scopeConfig, conn.ID)
        pipeline := client.RunPipeline(models.NewPipeline{
                Name: "remote_test",
                Plan: []plugin.PipelineStage{
@@ -151,29 +151,31 @@ func TestBlueprintV200_withBlueprintDeletion(t 
*testing.T) {
        require.Equal(t, params.blueprints[1].ID, bpsList.Blueprints[0].ID)
 }
 
-func TestCreateTxRule(t *testing.T) {
+func TestCreateScopeConfig(t *testing.T) {
        client := CreateClient(t)
        connection := CreateTestConnection(client)
+       scopeConfig := FakeScopeConfig{Name: "Scope config", Env: "test env", 
Entities: []string{"CICD"}}
 
-       res := client.CreateTransformationRule(PLUGIN_NAME, connection.ID, 
FakeTxRule{Name: "Tx rule", Env: "test env"})
-       txRule := helper.Cast[FakeTxRule](res)
+       res := client.CreateScopeConfig(PLUGIN_NAME, connection.ID, scopeConfig)
+       scopeConfig = helper.Cast[FakeScopeConfig](res)
 
-       res = client.GetTransformationRule(PLUGIN_NAME, connection.ID, 
txRule.Id)
-       txRule = helper.Cast[FakeTxRule](res)
-       require.Equal(t, "Tx rule", txRule.Name)
-       require.Equal(t, "test env", txRule.Env)
+       res = client.GetScopeConfig(PLUGIN_NAME, connection.ID, scopeConfig.Id)
+       scopeConfig = helper.Cast[FakeScopeConfig](res)
+       require.Equal(t, "Scope config", scopeConfig.Name)
+       require.Equal(t, "test env", scopeConfig.Env)
+       require.Equal(t, []string{"CICD"}, scopeConfig.Entities)
 }
 
-func TestUpdateTxRule(t *testing.T) {
+func TestUpdateScopeConfig(t *testing.T) {
        client := CreateClient(t)
        connection := CreateTestConnection(client)
-       res := client.CreateTransformationRule(PLUGIN_NAME, connection.ID, 
FakeTxRule{Name: "old name", Env: "old env"})
-       oldTxRule := helper.Cast[FakeTxRule](res)
+       res := client.CreateScopeConfig(PLUGIN_NAME, connection.ID, 
FakeScopeConfig{Name: "old name", Env: "old env", Entities: []string{}})
+       oldscopeConfig := helper.Cast[FakeScopeConfig](res)
 
-       client.PatchTransformationRule(PLUGIN_NAME, connection.ID, 
oldTxRule.Id, FakeTxRule{Name: "new name", Env: "new env"})
+       client.PatchScopeConfig(PLUGIN_NAME, connection.ID, oldscopeConfig.Id, 
FakeScopeConfig{Name: "new name", Env: "new env", Entities: []string{"CICD"}})
 
-       res = client.GetTransformationRule(PLUGIN_NAME, connection.ID, 
oldTxRule.Id)
-       txRule := helper.Cast[FakeTxRule](res)
-       require.Equal(t, "new name", txRule.Name)
-       require.Equal(t, "new env", txRule.Env)
+       res = client.GetScopeConfig(PLUGIN_NAME, connection.ID, 
oldscopeConfig.Id)
+       scopeConfig := helper.Cast[FakeScopeConfig](res)
+       require.Equal(t, "new name", scopeConfig.Name)
+       require.Equal(t, "new env", scopeConfig.Env)
 }
diff --git a/backend/test/helper/api.go b/backend/test/helper/api.go
index f1a859edf..2bca9019d 100644
--- a/backend/test/helper/api.go
+++ b/backend/test/helper/api.go
@@ -201,36 +201,36 @@ func (d *DevlakeClient) DeleteScope(pluginName string, 
connectionId uint64, scop
        }, http.MethodDelete, 
fmt.Sprintf("%s/plugins/%s/connections/%d/scopes/%s?delete_data_only=%v", 
d.Endpoint, pluginName, connectionId, scopeId, deleteDataOnly), nil, nil)
 }
 
-func (d *DevlakeClient) CreateTransformationRule(pluginName string, 
connectionId uint64, rules any) any {
+func (d *DevlakeClient) CreateScopeConfig(pluginName string, connectionId 
uint64, scopeConfig any) any {
        return sendHttpRequest[any](d.testCtx, d.timeout, debugInfo{
                print:      true,
                inlineJson: false,
-       }, http.MethodPost, 
fmt.Sprintf("%s/plugins/%s/connections/%d/transformation_rules",
-               d.Endpoint, pluginName, connectionId), nil, rules)
+       }, http.MethodPost, 
fmt.Sprintf("%s/plugins/%s/connections/%d/scope-configs",
+               d.Endpoint, pluginName, connectionId), nil, scopeConfig)
 }
 
-func (d *DevlakeClient) PatchTransformationRule(pluginName string, 
connectionId uint64, txRuleId uint64, rule any) any {
+func (d *DevlakeClient) PatchScopeConfig(pluginName string, connectionId 
uint64, scopeConfigId uint64, scopeConfig any) any {
        return sendHttpRequest[any](d.testCtx, d.timeout, debugInfo{
                print:      true,
                inlineJson: false,
-       }, http.MethodPatch, 
fmt.Sprintf("%s/plugins/%s/connections/%d/transformation_rules/%d",
-               d.Endpoint, pluginName, connectionId, txRuleId), nil, rule)
+       }, http.MethodPatch, 
fmt.Sprintf("%s/plugins/%s/connections/%d/scope-configs/%d",
+               d.Endpoint, pluginName, connectionId, scopeConfigId), nil, 
scopeConfig)
 }
 
-func (d *DevlakeClient) ListTransformationRules(pluginName string, 
connectionId uint64) []any {
+func (d *DevlakeClient) ListScopeConfigs(pluginName string, connectionId 
uint64) []any {
        return sendHttpRequest[[]any](d.testCtx, d.timeout, debugInfo{
                print:      true,
                inlineJson: false,
-       }, http.MethodGet, 
fmt.Sprintf("%s/plugins/%s/connections/%d/transformation_rules?pageSize=20&page=1",
+       }, http.MethodGet, 
fmt.Sprintf("%s/plugins/%s/connections/%d/scope-configs?pageSize=20&page=1",
                d.Endpoint, pluginName, connectionId), nil, nil)
 }
 
-func (d *DevlakeClient) GetTransformationRule(pluginName string, connectionId 
uint64, txRuleId uint64) any {
+func (d *DevlakeClient) GetScopeConfig(pluginName string, connectionId uint64, 
scopeConfigId uint64) any {
        return sendHttpRequest[any](d.testCtx, d.timeout, debugInfo{
                print:      true,
                inlineJson: false,
-       }, http.MethodGet, 
fmt.Sprintf("%s/plugins/%s/connections/%d/transformation_rules/%d",
-               d.Endpoint, pluginName, connectionId, txRuleId), nil, nil)
+       }, http.MethodGet, 
fmt.Sprintf("%s/plugins/%s/connections/%d/scope-configs/%d",
+               d.Endpoint, pluginName, connectionId, scopeConfigId), nil, nil)
 }
 
 func (d *DevlakeClient) RemoteScopes(query RemoteScopesQuery) 
RemoteScopesOutput {

Reply via email to