This is an automated email from the ASF dual-hosted git repository.
hez pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-devlake.git
The following commit(s) were added to refs/heads/main by this push:
new 835ca30d7 test: Support for remote debugging of Python src code (#4778)
835ca30d7 is described below
commit 835ca30d7a7c36893b172a072f48a45e14f9a9f1
Author: Keon Amini <[email protected]>
AuthorDate: Mon Mar 27 15:56:11 2023 -0500
test: Support for remote debugging of Python src code (#4778)
* test: Support for remote debugging of Python src code
* refactor: change ENABLE_PYTHON_DEBUGGER to USE_PYTHON_DEBUGGER for more
future flexiblity
---
backend/python/{pydevlake => }/README.md | 19 +-
backend/python/plugins/azuredevops/README.md | 1 +
backend/python/pydevlake/README.md | 338 +--------------------
.../python/pydevlake/pydevlake/helpers/debugger.py | 43 +++
backend/python/pydevlake/pydevlake/ipc.py | 4 +
backend/python/pydevlake/pydevlake/plugin.py | 1 -
backend/test/integration/helper/client.go | 17 ++
backend/test/integration/remote/helper.go | 118 +++++++
.../test/integration/remote/python_plugin_test.go | 129 +-------
9 files changed, 219 insertions(+), 451 deletions(-)
diff --git a/backend/python/pydevlake/README.md b/backend/python/README.md
similarity index 89%
copy from backend/python/pydevlake/README.md
copy to backend/python/README.md
index 7c7d2b36c..85093847e 100644
--- a/backend/python/pydevlake/README.md
+++ b/backend/python/README.md
@@ -1,6 +1,7 @@
# Pydevlake
-A framework to write data collection plugins for
[DevLake](https://devlake.apache.org/).
+A framework to write data collection plugins for
[DevLake](https://devlake.apache.org/). The framework source code
+can be found in [here](./pydevlake) and the plugin source code
[here](./pydevlake).
# How to create a new plugin
@@ -335,3 +336,19 @@ go run server/services/remote/run/run.go -c 1 -p
python/plugins/myplugin/myplug
This script takes a connection id (`-c` flag) and the path to your plugin
`main.py` file (`-p` flag).
You can also send options as a JSON object (`-o` flag).
+
+# Automated tests
+Make sure you have unit-tests written for your plugin code. The test files
should end with `_test.py`, and are discovered and
+executed by the `run_tests.sh` script by the CICD automation. The test files
should be placed inside the plugin project directory.
+
+
+# Debugging Python plugins
+You need to have a Python remote-debugger installed to debug the Python code.
This capability is controlled by the environment
+variable `USE_PYTHON_DEBUGGER` which is empty by default. The allowed
debuggers as of now are:
+
+- pycharm
+
+You will further have to set the environment variables `PYTHON_DEBUG_HOST`
(The hostname/IP on which your debugger is running relative to the environment
+in which the plugin is running) and `PYTHON_DEBUG_PORT` (The corresponding
debugger port). The variables should be set in the
+Go integration tests written in `backend/test/integration/remote` or Docker
container/server env configuration. Once done,
+set breakpoints in the Python plugin code in your IDE, turn on the debugger in
it, and those breakpoints should get hit.
\ No newline at end of file
diff --git a/backend/python/plugins/azuredevops/README.md
b/backend/python/plugins/azuredevops/README.md
index e69de29bb..6cfdd508c 100644
--- a/backend/python/plugins/azuredevops/README.md
+++ b/backend/python/plugins/azuredevops/README.md
@@ -0,0 +1 @@
+# Azure Devops Python Plugin
\ No newline at end of file
diff --git a/backend/python/pydevlake/README.md
b/backend/python/pydevlake/README.md
index 7c7d2b36c..3f19bd2d7 100644
--- a/backend/python/pydevlake/README.md
+++ b/backend/python/pydevlake/README.md
@@ -1,337 +1,3 @@
-# Pydevlake
+# The framework for Python plugins
-A framework to write data collection plugins for
[DevLake](https://devlake.apache.org/).
-
-
-# How to create a new plugin
-
-## Create plugin project
-
-
-Make sure you have [Poetry](https://python-poetry.org/docs/#installation)
installed.
-Move to `python/plugins` and execute `poetry new myplugin`.
-This will generate a new directory for your plugin.
-
-In the `pyproject.toml` file and add the following line at the end of the
`[tool.poetry.dependencies]` section:
-```
-pydevlake = { path = "../../pydevlake", develop = false }
-```
-
-Now run `poetry install`.
-
-## Create `main` file
-
-Create a `main.py` file with the following content:
-
-```python
-from pydevlake import Plugin, Connection
-
-
-class MyPluginConnection(Connection):
- pass
-
-
-class MyPlugin(Plugin):
- @property
- def connection_type(self):
- return MyPluginConnection
-
- def test_connection(self, connection: MyPluginConnection):
- pass
-
- @property
- def streams(self):
- return []
-
-
-if __name__ == '__main__':
- MyPlugin.start()
-```
-
-This file is the entry point to your plugin.
-It specifies three things:
-- the parameters that your plugin needs to collect data, e.g. the url and
credentials to connect to the datasource or custom options
-- how to validate that some given parameters allows to connect to the
datasource, e.g. test whether the credentials are correct
-- the list of data streams that this plugin can collect
-
-
-### Connection parameters
-
-The parameters of your plugin are defined as class attributes of the
connection class.
-For example to add a `url` parameter of type `str` edit `MyPLuginConnection`
as follow:
-
-```python
-class MyPluginConnection(Connection):
- url: str
-```
-
-
-## Add a new data stream
-
-A data stream groups the logic for:
-- collecting the raw data from the datasource
-- extracting this raw data into a tool-specific model
-- converting the tool model into an equivalent [DevLake domain
model](https://devlake.apache.org/docs/next/DataModels/DevLakeDomainLayerSchema)
-
-
-### Create a tool model
-
-Create a `models.py` file.
-Then create a class that modelizes the data your stream is going to collect.
-
-```python
-from pydevlake.model import ToolModel
-
-class User(ToolModel, table=True):
- id: str = Field(primary_key=True)
- name: str
- email: str
-```
-
-Your tool model must declare at least one attribute as a primary key, like
`id` in the example above.
-It must inherit from `ToolModel`, which in turn inherit from `SQLModel`, the
base class of an [ORM of the same name](https://sqlmodel.tiangolo.com/).
-You can use `SQLModel` features like [declaring relationships with other
models](https://sqlmodel.tiangolo.com/tutorial/relationship-attributes/).
-
-
-### Create the stream class
-
-Create a new file for your first stream in a `streams` directory.
-
-```python
-from pydevlake import Stream, DomainType
-from pydevlake.domain_layer.crossdomain import User as DomainUser
-
-from myplugin.models import User as ToolUser
-
-
-class Users(Stream):
- tool_model = ToolUser
- domain_types = [DomainType.CROSS]
-
- def collect(self, state, context) -> Iterable[Tuple[object, dict]]:
- pass
-
- def convert(self, user: ToolUser, context) -> Iterable[DomainUser]:
- pass
-```
-
-This stream will collect raw user data, e.g. as parsed JSON objects, extract
this raw data as your
-tool-specific user model, then convert them into domain-layer user models.
-
-The `tool_model` class attribute declares the tool model class that is
extracted by this strem.
-The `domain_types` class attribute is a list of domain types this stream is
about.
-
-The `collect` method takes a `state` dictionary and a context object and
yields tuples of raw data and new state.
-The last state that the plugin yielded for a given connection will be reused
during the next collection.
-The plugin can use this `state` to store information necessary to perform
incremental collection of data.
-
-
-The `convert` method takes a tool-specific user model and convert it into
domain level user models.
-Here the two models align quite well, the conversion is trivial:
-
-```python
-def convert(self, user: ToolUser, context: Context) -> Iterable[DomainUser]:
- yield DomainUser(
- id=user.id,
- name=user.name
- email=user.email
- )
-```
-
-### Create an API wrapper
-
-Lets assume that your datasource is a REST API.
-We can create the following class to define it.
-
-```python
-from pydevlake.api import API
-
-
-class MyAPI(API):
- def __init__(self, url: str):
- self.url = url
-
- def users(self):
- return self.get(f'{self.url}/users')
-```
-
-By inheriting `API` you get access to facilities to wrap REST APIs.
-Here the `users` method will return a `Response` object that contains the
results of calling `GET` on `<url>/users`.
-
-Now we can go back to our stream file and implement `collect`:
-
-```python
-from myplugin.api import MyAPI
-
-...
-
- def collect(self, state, context) -> Iterable[Tuple[object, dict]]:
- api = MyAPI(context.connection.url)
- for user in api.users().json():
- yield user, state
-
-...
-```
-
-If the API responds with a list of JSON object with properties matching your
`User` model attributes, you're done!.
-Indeed extraction has a default implementation that takes of this common case.
-This is why it is important to make tool models that align with the data you
collect.
-
-If this is not the case, e.g. the attribute case mismatch, you can redefine
the `extract` method:
-
-```python
-...
-
-class Users(Stream):
- ...
-
- def extract(self, raw_data: dict) -> ToolModel:
- return ToolUser(
- id=raw_data["ID"],
- name=raw_data["Name"],
- email=raw_data["Email"]
- )
-
- ...
-```
-
-
-#### Request and response hook
-
-For each request sent and response received by your API wrapper,
-you can register hooks. Hooks allows you to implement
-authentication, pagination, and generic API error handling.
-
-For example, lets assume that we are dealing with an API that
-require user to authenticate via a token set in a request header.
-
-A request hook is declared in your API with a `@request_hook` decorator.
-
-```python
-...
-class MyAPI(API):
- def __init__(self, url, token):
- self.url = url
- self.token = token
-
- ...
- @request_hook
- def authenticate(self, request):
- if self.token:
- request.headers['Token'] = self.token
- ...
-```
-
-Here the method `authenticate` is a hook that is run on each request.
-Similarly you can declare response hooks with `@response_hook`.
-Multiple hooks are executed in the order of their declaration.
-The `API` base class declares some hooks that are executed first.
-
-
-#### Pagination
-
-One usage of a response hook is for handling paginated results.
-A response hook can be used to wrap the `Response` object in a
-`PagedResponse` object that support iteration and fetching other pages.
-This response hook is actually defined in `API` base class and expect
-your API wrapper to declare a `paginator` property.
-
-You can subclass `Paginator` to provide API specific logic or reuse an
-existing implementation such as `TokenPaginator`.
-A token paginator assumes the API paginated responses are JSON object with one
-property that is an array of items and another that contains the token to the
next
-page.
-
-For example, the following paginator fetch items from the `'results'`
attribute,
-the next page from the `'nextPage'` attribute and will issue requests with a
`page`
-query parameter.
-
-```
-...
-class MyAPI(API):
- ...
- paginator = TokenPaginator(
- items_attr='results',
- next_page_token_attr='nextPage',
- next_page_token_param='page'
- )
- ...
-```
-
-## Substreams
-
-With REST APIs, you often need to fetch a stream of items, and then to collect
additional
-data for each of those items.
-
-For example you might want to collect all `UserComments` written by each user
collected via the `Users` stream.
-
-To handle such cases, `UserComments` would inherit from `Substream` instead of
`Stream`.
-A substream needs to specify which is his parent stream. The `collect` method
-of a substream will be called with each item collected from the parent stream.
-
-```python
-...
-from pydevlake import Substream
-from myplugin.streams.users import Users
-
-class UserComments(Substream):
- parent_stream = Users # Must specify the parent stream
- ...
- def collect(self, state: dict, context, user: User):
- """
- This method will be called for each user collected from parent stream
Users.
- """
- for json in
MyPluginAPI(context.connection.token).user_comments(user.id):
- yield json, state
- ...
-```
-
-
-# Test the plugin standalone
-
-To test your plugin manually, you can run your `main.py` file with different
commands.
-You can find all those commands with `--help` cli flag:
-
-```console
-poetry run myplugin/main.py --help
-```
-
-For testing, the interesting commands are `collect`/`extract`/`convert`.
-Each takes a context and a stream name.
-The context is a JSON object that must at least contain:
-- a `db_url`, e.g. you can use `"sqlite+pysqlite:///:memory:"` for an
in-memory DB
-- a `connection` object containing the same attributes than your plugin
connection type
-
-Also, python plugins communicate with go side over an extra file descriptor 3,
so you should
-redirect to stdout when testing your plugin.
-
-```
-console
-CTX='{"db_url":"sqlite+pysqlite:///:memory:", "connection": {...your
connection attrs here...}}'
-poetry run myplugin/main.py $CTX users 3>&1
-```
-
-
-# Test the plugin with DevLake
-
-To test your plugin together with DevLake, you first need to create a
connection for your plugin and get its id.
-One easy way to do that is to run DevLake with `make dev` and then to create
the connection with a POST
-request to your plugin connection API:
-
-```console
-curl -X 'POST' \
- 'http://localhost:8080/plugins/myplugin/connections' \
- -d '{...connection JSON object...}'
-```
-
-You should get the created connection with his id (which is 1 for the first
created connection) in the response.
-
-Now that a connection for your plugin exists in DevLake database, we can try
to run your plugin using `backend/server/services/remote/run/run.go` script:
-
-```console
-cd backend
-go run server/services/remote/run/run.go -c 1 -p
python/plugins/myplugin/myplugin/main.py
-```
-
-This script takes a connection id (`-c` flag) and the path to your plugin
`main.py` file (`-p` flag).
-You can also send options as a JSON object (`-o` flag).
+This code handles calls from the main Golang server code into Python plugins
via IPC.
\ No newline at end of file
diff --git a/backend/python/pydevlake/pydevlake/helpers/debugger.py
b/backend/python/pydevlake/pydevlake/helpers/debugger.py
new file mode 100644
index 000000000..78d8666b9
--- /dev/null
+++ b/backend/python/pydevlake/pydevlake/helpers/debugger.py
@@ -0,0 +1,43 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from pydevlake import logger
+
+
+def __start__():
+ debugger = os.getenv("USE_PYTHON_DEBUGGER", default="").lower()
+ if debugger == "":
+ return
+ # The hostname of the machine from which you're debugging (e.g. your IDE's
host).
+ host = os.getenv("PYTHON_DEBUG_HOST", default="localhost")
+ # The port of the machine from which you're debugging (e.g. your IDE's
host)
+ port = int(os.getenv("PYTHON_DEBUG_PORT", default=32000))
+ print("========== Enabling remote debugging on ", host, ":", port, "
==========")
+ if debugger == "pycharm":
+ try:
+ import pydevd_pycharm as pydevd
+ try:
+ pydevd.settrace(host=host, port=port, suspend=False,
stdoutToServer=True, stderrToServer=True)
+ except TimeoutError as e:
+ logger.error(f"Failed to connect to pycharm debugger on
{host}:{port}. Make sure it is running")
+ except ImportError as e:
+ logger.error("Pycharm debugger library is not installed")
+ else:
+ logger.error(f"Unsupported Python debugger specified: {debugger}")
+
+
+__start__()
diff --git a/backend/python/pydevlake/pydevlake/ipc.py
b/backend/python/pydevlake/pydevlake/ipc.py
index faa4d035f..7aea247fd 100644
--- a/backend/python/pydevlake/pydevlake/ipc.py
+++ b/backend/python/pydevlake/pydevlake/ipc.py
@@ -24,6 +24,10 @@ from pydevlake.message import Message
def plugin_method(func):
+ # keep this to enable debugging, and don't move this elsewhere or it can
cause crashes if it gets called during the bootstrap process.
+ # noinspection PyUnresolvedReferences
+ from pydevlake.helpers import debugger
+
def open_send_channel() -> TextIO:
fd = 3
return os.fdopen(fd, 'w')
diff --git a/backend/python/pydevlake/pydevlake/plugin.py
b/backend/python/pydevlake/pydevlake/plugin.py
index 001b57a96..8ea9d57ff 100644
--- a/backend/python/pydevlake/pydevlake/plugin.py
+++ b/backend/python/pydevlake/pydevlake/plugin.py
@@ -4,7 +4,6 @@
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
-
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
diff --git a/backend/test/integration/helper/client.go
b/backend/test/integration/helper/client.go
index c906e3b88..750326e6b 100644
--- a/backend/test/integration/helper/client.go
+++ b/backend/test/integration/helper/client.go
@@ -155,6 +155,23 @@ func (d *DevlakeClient) SetTimeout(timeout time.Duration) {
d.timeout = timeout
}
+// AwaitPluginAvailability wait for this plugin to become available on the
server given a timeout. Returns false if this condition does not get met.
+func (d *DevlakeClient) AwaitPluginAvailability(pluginName string, timeout
time.Duration) bool {
+ timeoutCh := time.After(timeout)
+ for {
+ select {
+ case <-timeoutCh:
+ return false
+ default:
+ _, err := plugin.GetPlugin(pluginName)
+ if err == nil {
+ return true
+ }
+ time.Sleep(250 * time.Millisecond)
+ }
+ }
+}
+
// RunPlugin manually execute a plugin directly (local server only)
func (d *DevlakeClient) RunPlugin(ctx context.Context, pluginName string,
pluginTask plugin.PluginTask, options map[string]interface{}, subtaskNames
...string) errors.Error {
if len(subtaskNames) == 0 {
diff --git a/backend/test/integration/remote/helper.go
b/backend/test/integration/remote/helper.go
new file mode 100644
index 000000000..c185778ad
--- /dev/null
+++ b/backend/test/integration/remote/helper.go
@@ -0,0 +1,118 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package test
+
+import (
+ "fmt"
+ "github.com/apache/incubator-devlake/core/config"
+ "github.com/apache/incubator-devlake/core/plugin"
+ "github.com/apache/incubator-devlake/test/integration/helper"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+)
+
+const (
+ PLUGIN_NAME = "fake"
+ TOKEN = "this_is_a_valid_token"
+ FAKE_PLUGIN_DIR = "python/test/fakeplugin"
+)
+
+type (
+ FakePluginConnection struct {
+ Id uint64 `json:"id"`
+ Name string `json:"name"`
+ Token string `json:"token"`
+ }
+ FakeProject struct {
+ Id string `json:"id"`
+ Name string `json:"name"`
+ ConnectionId uint64 `json:"connection_id"`
+ TransformationRuleId uint64 `json:"transformation_rule_id"`
+ }
+ FakeTxRule struct {
+ Id uint64 `json:"id"`
+ Name string `json:"name"`
+ Env string `json:"env"`
+ }
+)
+
+func SetupEnv() {
+ fmt.Println("Setup test env")
+ helper.Init()
+ path := filepath.Join(helper.ProjectRoot, FAKE_PLUGIN_DIR, "start.sh")
// make sure the path is correct
+ _, err := os.Stat(path)
+ if err != nil {
+ panic(err)
+ }
+ _ = os.Setenv("REMOTE_PLUGINS_STARTUP_PATH", path)
+ _ = os.Setenv("ENABLE_REMOTE_PLUGINS", "true")
+}
+
+func ConnectLocalServer(t *testing.T) *helper.DevlakeClient {
+ fmt.Println("Connect to server")
+ client := helper.ConnectLocalServer(t, &helper.LocalClientConfig{
+ ServerPort: 8089,
+ DbURL: config.GetConfig().GetString("E2E_DB_URL"),
+ CreateServer: true,
+ TruncateDb: true,
+ Plugins: map[string]plugin.PluginMeta{},
+ })
+ client.SetTimeout(30 * time.Second)
+ return client
+}
+
+func CreateClient(t *testing.T) *helper.DevlakeClient {
+ SetupEnv()
+ client := ConnectLocalServer(t)
+ client.AwaitPluginAvailability(PLUGIN_NAME, 60*time.Second)
+ return client
+}
+
+func CreateTestConnection(client *helper.DevlakeClient) *helper.Connection {
+ connection := client.CreateConnection(PLUGIN_NAME,
+ FakePluginConnection{
+ Name: "Test connection",
+ Token: TOKEN,
+ },
+ )
+ return connection
+}
+
+func CreateTestScope(client *helper.DevlakeClient, connectionId uint64) any {
+ res := client.CreateTransformationRule(PLUGIN_NAME, FakeTxRule{Name:
"Tx rule", Env: "test env"})
+ rule, ok := res.(map[string]interface{})
+ if !ok {
+ panic("Cannot cast transform rule")
+ }
+ ruleId := uint64(rule["id"].(float64))
+
+ scope := client.CreateScope(PLUGIN_NAME,
+ connectionId,
+ FakeProject{
+ Id: "12345",
+ Name: "Test project",
+ ConnectionId: connectionId,
+ TransformationRuleId: ruleId,
+ },
+ )
+
+ client.SetTimeout(1)
+ return scope
+}
diff --git a/backend/test/integration/remote/python_plugin_test.go
b/backend/test/integration/remote/python_plugin_test.go
index 670461e74..8deaf43b2 100644
--- a/backend/test/integration/remote/python_plugin_test.go
+++ b/backend/test/integration/remote/python_plugin_test.go
@@ -18,114 +18,17 @@ limitations under the License.
package test
import (
- "fmt"
- "github.com/apache/incubator-devlake/core/config"
- "github.com/apache/incubator-devlake/test/integration/helper"
- "os"
- "path/filepath"
- "testing"
- "time"
-
"github.com/apache/incubator-devlake/core/models"
"github.com/apache/incubator-devlake/core/plugin"
+ "github.com/apache/incubator-devlake/test/integration/helper"
"github.com/stretchr/testify/require"
+ "testing"
)
-const (
- PLUGIN_NAME = "fake"
- TOKEN = "this_is_a_valid_token"
- FAKE_PLUGIN_DIR = "python/test/fakeplugin"
-)
-
-type FakePluginConnection struct {
- Id uint64 `json:"id"`
- Name string `json:"name"`
- Token string `json:"token"`
-}
-
-type FakeProject struct {
- Id string `json:"id"`
- Name string `json:"name"`
- ConnectionId uint64 `json:"connection_id"`
- TransformationRuleId uint64 `json:"transformation_rule_id"`
-}
-
-type FakeTxRule struct {
- Id uint64 `json:"id"`
- Name string `json:"name"`
- Env string `json:"env"`
-}
-
-func setupEnv() {
- fmt.Println("Setup test env")
- helper.Init()
- path := filepath.Join(helper.ProjectRoot, FAKE_PLUGIN_DIR, "start.sh")
// make sure the path is correct
- _, err := os.Stat(path)
- if err != nil {
- panic(err)
- }
- _ = os.Setenv("REMOTE_PLUGINS_STARTUP_PATH", path)
- _ = os.Setenv("ENABLE_REMOTE_PLUGINS", "true")
-}
-
-func connectLocalServer(t *testing.T) *helper.DevlakeClient {
- fmt.Println("Connect to server")
- client := helper.ConnectLocalServer(t, &helper.LocalClientConfig{
- ServerPort: 8089,
- DbURL: config.GetConfig().GetString("E2E_DB_URL"),
- CreateServer: true,
- TruncateDb: true,
- Plugins: map[string]plugin.PluginMeta{},
- })
- client.SetTimeout(60 * time.Second)
- // Wait for plugin registration
- time.Sleep(3 * time.Second)
- return client
-}
-
-func createClient(t *testing.T) *helper.DevlakeClient {
- setupEnv()
- return connectLocalServer(t)
-}
-
-func createTestConnection(client *helper.DevlakeClient) *helper.Connection {
- connection := client.CreateConnection(PLUGIN_NAME,
- FakePluginConnection{
- Name: "Test connection",
- Token: TOKEN,
- },
- )
-
- client.SetTimeout(1)
- return connection
-}
-
-func createTestScope(client *helper.DevlakeClient, connectionId uint64) any {
- res := client.CreateTransformationRule(PLUGIN_NAME, FakeTxRule{Name:
"Tx rule", Env: "test env"})
- rule, ok := res.(map[string]interface{})
- if !ok {
- panic("Cannot cast transform rule")
- }
- ruleId := uint64(rule["id"].(float64))
-
- scope := client.CreateScope(PLUGIN_NAME,
- connectionId,
- FakeProject{
- Id: "12345",
- Name: "Test project",
- ConnectionId: connectionId,
- TransformationRuleId: ruleId,
- },
- )
-
- client.SetTimeout(1)
- return scope
-}
-
func TestCreateConnection(t *testing.T) {
- client := createClient(t)
+ client := CreateClient(t)
- createTestConnection(client)
+ CreateTestConnection(client)
conns := client.ListConnections(PLUGIN_NAME)
require.Equal(t, 1, len(conns))
@@ -133,8 +36,8 @@ func TestCreateConnection(t *testing.T) {
}
func TestRemoteScopeGroups(t *testing.T) {
- client := createClient(t)
- connection := createTestConnection(client)
+ client := CreateClient(t)
+ connection := CreateTestConnection(client)
output := client.RemoteScopes(helper.RemoteScopesQuery{
PluginName: PLUGIN_NAME,
@@ -150,8 +53,8 @@ func TestRemoteScopeGroups(t *testing.T) {
}
func TestRemoteScopes(t *testing.T) {
- client := createClient(t)
- connection := createTestConnection(client)
+ client := CreateClient(t)
+ connection := CreateTestConnection(client)
output := client.RemoteScopes(helper.RemoteScopesQuery{
PluginName: PLUGIN_NAME,
@@ -168,10 +71,10 @@ func TestRemoteScopes(t *testing.T) {
}
func TestCreateScope(t *testing.T) {
- client := createClient(t)
+ client := CreateClient(t)
var connectionId uint64 = 1
- createTestScope(client, connectionId)
+ CreateTestScope(client, connectionId)
scopes := client.ListScopes(PLUGIN_NAME, connectionId)
require.Equal(t, 1, len(scopes))
@@ -179,10 +82,10 @@ func TestCreateScope(t *testing.T) {
func TestRunPipeline(t *testing.T) {
t.SkipNow() //Fix later
- client := createClient(t)
- conn := createTestConnection(client)
+ client := CreateClient(t)
+ conn := CreateTestConnection(client)
- createTestScope(client, conn.ID)
+ CreateTestScope(client, conn.ID)
pipeline := client.RunPipeline(models.NewPipeline{
Name: "remote_test",
@@ -207,13 +110,13 @@ func TestRunPipeline(t *testing.T) {
func TestBlueprintV200(t *testing.T) {
t.SkipNow() //Fix later
- client := createClient(t)
- connection := createTestConnection(client)
+ client := CreateClient(t)
+ connection := CreateTestConnection(client)
projectName := "Test project"
client.CreateProject(&helper.ProjectConfig{
ProjectName: projectName,
})
- createTestScope(client, connection.ID)
+ CreateTestScope(client, connection.ID)
blueprint := client.CreateBasicBlueprintV2(
"Test blueprint",