dstandish commented on a change in pull request #18447: URL: https://github.com/apache/airflow/pull/18447#discussion_r716962976
########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,144 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Interact with AWS Redshift, using the boto3 library.""" + +from typing import Callable, Dict, Optional, Tuple, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): Review comment: What if we just call the module redshift_sql but the hook Redshift hook Then there are two redshift hooks until 3.0 After 3.0 the old one is renamed ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,144 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Interact with AWS Redshift, using the boto3 library.""" + +from typing import Callable, Dict, Optional, Tuple, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): Review comment: What if we just call the module redshift_sql but the hook RedshiftHook Then there are two redshift hooks until 3.0 After 3.0 the old one is renamed And I guess we rename the new module at 3.0 too? ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,144 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Interact with AWS Redshift, using the boto3 library.""" + +from typing import Callable, Dict, Optional, Tuple, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): Review comment: What if we just call the module redshift_sql but the hook RedshiftHook Then there are two redshift hooks until 3.0, but in different modules After 3.0 the old one is renamed And I guess we rename the new module at 3.0 too? ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,144 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Interact with AWS Redshift, using the boto3 library.""" + +from typing import Callable, Dict, Optional, Tuple, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): Review comment: It's a good point re IAM auth. What if we just call the module redshift_sql but the hook RedshiftHook Then there are two redshift hooks until 3.0, but in different modules After 3.0 the old one is renamed And I guess we rename the new module at 3.0 too? ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,131 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Execute statements against Amazon Redshift, using redshift_connector.""" +try: + from functools import cached_property +except ImportError: + from cached_property import cached_property +from typing import Dict, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): + """ + Execute statements against Amazon Redshift, using redshift_connector + + This hook requires the redshift_conn_id connection. This connection must + be initialized with the host, port, login, password. Additional connection + options can be passed to extra as a JSON string. + + :param redshift_conn_id: reference to + :ref:`Amazon Redshift connection id<howto/connection:redshift>` + :type redshift_conn_id: str + + .. note:: + get_sqlalchemy_engine() and get_uri() depend on sqlalchemy-amazon-redshift + """ + + conn_name_attr = 'redshift_conn_id' + default_conn_name = 'redshift_default' + conn_type = 'redshift+redshift_connector' + hook_name = 'Amazon Redshift' + supports_autocommit = True + + @staticmethod + def get_ui_field_behavior() -> Dict: + """Returns custom field behavior""" + return { + "hidden_fields": [], + "relabeling": {'login': 'User', 'schema': 'Database'}, + } + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + @cached_property + def conn(self): + return self.get_connection( + self.redshift_conn_id # type: ignore[attr-defined] # pylint: disable=no-member + ) + + def _get_conn_params(self) -> Dict[str, Union[str, int]]: + """Helper method to retrieve connection args""" + conn = self.conn + + conn_params: Dict[str, Union[str, int]] = {} + + if conn.login: + conn_params['user'] = conn.login + if conn.password: + conn_params['password'] = conn.password + if conn.host: + conn_params['host'] = conn.host + if conn.port: + conn_params['port'] = conn.port + if conn.schema: + conn_params['database'] = conn.schema + + return conn_params + + def get_uri(self) -> str: + """ + Override DbApiHook get_uri method for get_sqlalchemy_engine() + + .. note:: + Value passed to connection extra parameter will be excluded + from returned uri but passed to get_sqlalchemy_engine() + by default + """ + from sqlalchemy.engine.url import URL + + conn_params = self._get_conn_params() + + conn = self.conn + + conn_type = conn.conn_type or RedshiftStatementHook.conn_type + + if 'user' in conn_params: + conn_params['username'] = conn_params.pop('user') + + return URL(drivername=conn_type, **conn_params).__str__() Review comment: ```suggestion return str(URL(drivername=conn_type, **conn_params)) ``` I believe `str()` is preferred to calling `__str__()` directly ########## File path: airflow/providers/amazon/aws/operators/redshift.py ########## @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Optional + +from airflow.models import BaseOperator +from airflow.providers.amazon.aws.hooks.redshift_statement import RedshiftStatementHook + + +class RedshiftOperator(BaseOperator): + """ + Executes SQL Statements against an Amazon Redshift cluster + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:RedshiftOperator` + + :param sql: the sql code to be executed + :type sql: Can receive a str representing a sql statement, + a list of str (sql statements) + :param redshift_conn_id: reference to + :ref:`Amazon Redshift connection id<howto/connection:redshift>` + :type redshift_conn_id: str + :param parameters: (optional) the parameters to render the SQL query with. + :type parameters: dict or iterable + :param autocommit: if True, each command is automatically committed. + (default value: False) + :type autocommit: bool + """ + + template_fields = ('sql',) + template_ext = ('.sql',) + + def __init__( + self, + *, + sql: Any, + redshift_conn_id: str = 'redshift_default', + parameters: Optional[dict] = None, + autocommit: bool = True, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.redshift_conn_id = redshift_conn_id + self.sql = sql + self.autocommit = autocommit + self.parameters = parameters + + def get_hook(self) -> RedshiftStatementHook: + """Create and return RedshiftStatementHook. + :return RedshiftStatementHook: A RedshiftStatementHook instance. + """ + return RedshiftStatementHook(redshift_conn_id=self.redshift_conn_id) + + def execute(self, context: dict) -> None: + """Execute a statement against Amazon Redshift""" + self.log.info(f"Executing statement: {self.sql}") + hook = self.get_hook() + hook.run(self.sql, autocommit=self.autocommit, parameters=self.parameters) Review comment: please consider this an optional suggestion, but i noticed that a `handler` param was added to DBAPIHook.run you could add this param to this operator too, so it could be passed to hook.run ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,131 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Execute statements against Amazon Redshift, using redshift_connector.""" +try: + from functools import cached_property +except ImportError: + from cached_property import cached_property +from typing import Dict, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): + """ + Execute statements against Amazon Redshift, using redshift_connector + + This hook requires the redshift_conn_id connection. This connection must + be initialized with the host, port, login, password. Additional connection + options can be passed to extra as a JSON string. + + :param redshift_conn_id: reference to + :ref:`Amazon Redshift connection id<howto/connection:redshift>` + :type redshift_conn_id: str + + .. note:: + get_sqlalchemy_engine() and get_uri() depend on sqlalchemy-amazon-redshift + """ + + conn_name_attr = 'redshift_conn_id' + default_conn_name = 'redshift_default' + conn_type = 'redshift+redshift_connector' + hook_name = 'Amazon Redshift' + supports_autocommit = True + + @staticmethod + def get_ui_field_behavior() -> Dict: + """Returns custom field behavior""" + return { + "hidden_fields": [], + "relabeling": {'login': 'User', 'schema': 'Database'}, + } + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) Review comment: you could leave this out ########## File path: airflow/providers/amazon/aws/example_dags/example_redshift.py ########## @@ -0,0 +1,65 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +This is an example dag for using `RedshiftOperator` to authenticate with Amazon Redshift +using IAM authentication then executing a simple select statement Review comment: there's nothing in the example that is specific to IAM so perhaps best to remove that part of this comment. ########## File path: tests/providers/amazon/aws/hooks/test_redshift_statement.py ########## @@ -0,0 +1,72 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import json +import unittest +from unittest import mock + +from airflow.models import Connection +from airflow.providers.amazon.aws.hooks.redshift_statement import RedshiftStatementHook + + +class TestRedshiftStatementHookConn(unittest.TestCase): + def setUp(self): + super().setUp() + + self.connection = Connection(login='login', password='password', host='host', port=5439, schema="dev") + + class UnitTestRedshiftStatementHook(RedshiftStatementHook): + conn_name_attr = "redshift_conn_id" + conn_type = 'redshift+redshift_connector' + + self.db_hook = UnitTestRedshiftStatementHook() + self.db_hook.get_connection = mock.Mock() + self.db_hook.get_connection.return_value = self.connection + + def test_get_uri(self): + uri_shouldbe = 'redshift+redshift_connector://login:password@host:5439/dev' + x = self.db_hook.get_uri() + assert uri_shouldbe == x Review comment: ```suggestion assert x == uri_shoudbe ``` the convention with pytest is `assert actual == expected` ########## File path: airflow/providers/amazon/aws/hooks/redshift_statement.py ########## @@ -0,0 +1,131 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Execute statements against Amazon Redshift, using redshift_connector.""" +try: + from functools import cached_property +except ImportError: + from cached_property import cached_property +from typing import Dict, Union + +import redshift_connector +from redshift_connector import Connection as RedshiftConnection + +from airflow.hooks.dbapi import DbApiHook + + +class RedshiftStatementHook(DbApiHook): + """ + Execute statements against Amazon Redshift, using redshift_connector + + This hook requires the redshift_conn_id connection. This connection must + be initialized with the host, port, login, password. Additional connection + options can be passed to extra as a JSON string. + + :param redshift_conn_id: reference to + :ref:`Amazon Redshift connection id<howto/connection:redshift>` + :type redshift_conn_id: str + + .. note:: + get_sqlalchemy_engine() and get_uri() depend on sqlalchemy-amazon-redshift + """ + + conn_name_attr = 'redshift_conn_id' + default_conn_name = 'redshift_default' + conn_type = 'redshift+redshift_connector' + hook_name = 'Amazon Redshift' + supports_autocommit = True + + @staticmethod + def get_ui_field_behavior() -> Dict: + """Returns custom field behavior""" + return { + "hidden_fields": [], + "relabeling": {'login': 'User', 'schema': 'Database'}, + } + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + @cached_property + def conn(self): + return self.get_connection( + self.redshift_conn_id # type: ignore[attr-defined] # pylint: disable=no-member Review comment: i think pylint is removed from airflow now... ########## File path: tests/providers/amazon/aws/operators/test_redshift.py ########## @@ -0,0 +1,57 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest +from unittest import mock + +from airflow.models.dag import DAG +from airflow.providers.amazon.aws.operators.redshift import RedshiftOperator +from airflow.utils import timezone + +DEFAULT_DATE = timezone.datetime(2015, 1, 1) +DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() +DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] +TEST_DAG_ID = 'unit_test_dag' + + +class TestRedshiftOperator(unittest.TestCase): + def setUp(self): + super().setUp() + args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} + dag = DAG(TEST_DAG_ID, default_args=args) + self.dag = dag + + @mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook") + def test_redshift_operator(self, mock_get_hook): + sql = """ + CREATE TABLE IF NOT EXISTS test_airflow ( + dummy VARCHAR(50) + ); + """ + operator = RedshiftOperator(task_id='redshift_operator', sql=sql, dag=self.dag) + operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) + + @mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook") + def test_redshift_operator_test_multi(self, mock_get_hook): + sql = [ + "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))", + "TRUNCATE TABLE test_airflow", + "INSERT INTO test_airflow VALUES ('X')", + ] + operator = RedshiftOperator(task_id='redshift_operator_test_multi', sql=sql, dag=self.dag) + operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) Review comment: ```suggestion import unittest from unittest import mock from unittest.mock import MagicMock from airflow.providers.amazon.aws.operators.redshift import RedshiftOperator class TestRedshiftOperator(unittest.TestCase): @mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook") def test_redshift_operator(self, mock_get_hook): hook = MagicMock() mock_run = hook.run mock_get_hook.return_value = hook sql = MagicMock() operator = RedshiftOperator(task_id='test', sql=sql) operator.execute(None) mock_run.assert_called_once_with( sql, autocommit=True, parameters=None, ) ``` Ok so why have I suggested this rewriting... If you look at your test, the behavior of the operator is not verified. `get_hook` will return a mock, and nothing that done to the mock will cause an error, and the calls are not verified. So for example, if I swap out the list of string like so it still passes: ```python @mock.patch("airflow.providers.amazon.aws.operators.redshift.RedshiftOperator.get_hook") def test_redshift_operator_test_multi(self, mock_get_hook): def fun(): print('hello') sql = fun operator = RedshiftOperator(task_id='redshift_operator_test_multi', sql=sql, dag=self.dag) operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) ``` So the there is no difference between your test for str and the one for list[str]; and neither of them really verify the behavior of the operator. Further, you don't really need to verify that hook.run takes list of str or str -- that is behavior of DBAPIHook and presumably verified elsewhere. We just care that hook.run is called with the values passed to the operator. So that's what I have tried to do here. And I called execute directly instead of run because it's simpler and means we can chop some of the boilerplate. You could improve upon my suggestion by parameterizing it such that it verifies that autocommit and parameters are forward to hook.run as appropriate ########## File path: tests/providers/amazon/aws/hooks/test_redshift_statement.py ########## @@ -0,0 +1,72 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import json +import unittest +from unittest import mock + +from airflow.models import Connection +from airflow.providers.amazon.aws.hooks.redshift_statement import RedshiftStatementHook + + +class TestRedshiftStatementHookConn(unittest.TestCase): + def setUp(self): + super().setUp() + + self.connection = Connection(login='login', password='password', host='host', port=5439, schema="dev") + + class UnitTestRedshiftStatementHook(RedshiftStatementHook): + conn_name_attr = "redshift_conn_id" + conn_type = 'redshift+redshift_connector' + + self.db_hook = UnitTestRedshiftStatementHook() + self.db_hook.get_connection = mock.Mock() + self.db_hook.get_connection.return_value = self.connection + + def test_get_uri(self): + uri_shouldbe = 'redshift+redshift_connector://login:password@host:5439/dev' + x = self.db_hook.get_uri() + assert uri_shouldbe == x Review comment: ```suggestion assert x == uri_shoudbe ``` the convention with pytest is `assert actual == expected` nit pick here, but would also be easier to read / more conventional if you called it `expected` or `uri_expected` instead of `uri_shouldbe` -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected]
