uranusjr commented on code in PR #25268:
URL: https://github.com/apache/airflow/pull/25268#discussion_r956896391
##########
airflow/providers/google/cloud/hooks/cloud_memorystore.py:
##########
@@ -752,8 +753,7 @@ def list_instances(
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Lists all Memcached instances owned by a project in either the
specified location (region) or all
- locations.
+ Lists all Memcached instances owned by a projeca specified location
(region) or all locations.
Review Comment:
```suggestion
List Memcached instances owned by a project at the specified
location (region) or all locations.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -61,7 +61,9 @@ def __init__(self, *args, **kwargs) -> None:
def set_autocommit(self, conn: MySQLConnectionTypes, autocommit: bool) ->
None:
"""
- The MySQLdb (mysqlclient) client uses an `autocommit` method rather
+ Mysqlclient uses an `autocommit` method rather than an `autocommit`
property.
Review Comment:
```suggestion
Set *autocommit*.
```
##########
airflow/providers/google/cloud/hooks/cloud_memorystore.py:
##########
@@ -392,8 +394,7 @@ def list_instances(
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Lists all Redis instances owned by a project in either the specified
location (region) or all
- locations.
+ Lists all Redis instances owned by a project at the specified location
(region) or all locations.
Review Comment:
```suggestion
List Redis instances owned by a project at the specified location
(region) or all locations.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -75,7 +77,9 @@ def set_autocommit(self, conn: MySQLConnectionTypes,
autocommit: bool) -> None:
def get_autocommit(self, conn: MySQLConnectionTypes) -> bool:
"""
- The MySQLdb (mysqlclient) client uses a `get_autocommit` method
+ Mysqlclient uses a `get_autocommit` method rather than an `autocommit`
property.
+
+ Mysqlclient client uses a `get_autocommit` method
rather than an `autocommit` property to get the autocommit setting
Review Comment:
```suggestion
*mysqlclient* uses an *get_autocommit* method rather than an
*autocommit*
property, so we need to override this to support it.
```
##########
airflow/providers/databricks/hooks/databricks_base.py:
##########
@@ -195,21 +194,23 @@ def _parse_host(host: str) -> str:
def _get_retry_object(self) -> Retrying:
"""
- Instantiates a retry object
+ Instantiates a retry object.
Review Comment:
```suggestion
Instantiate a retry object.
```
##########
airflow/utils/process_utils.py:
##########
@@ -190,6 +192,8 @@ def execute_in_subprocess_with_kwargs(cmd: List[str],
**kwargs) -> None:
def execute_interactive(cmd: List[str], **kwargs) -> None:
"""
+ Runs the new command as a subprocess.
Review Comment:
```suggestion
Run the new command as a subprocess.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -202,6 +208,8 @@ def bulk_dump(self, table: str, tmp_file: str) -> None:
@staticmethod
def _serialize_cell(cell: object, conn: Optional[Connection] = None) ->
object:
"""
+ Mysqldb converts argument to a literal.
Review Comment:
```suggestion
Convert argument to a literal.
```
##########
airflow/providers/google/cloud/hooks/cloud_memorystore.py:
##########
@@ -851,9 +851,10 @@ def update_parameters(
metadata: Sequence[Tuple[str, str]] = (),
):
"""
- Updates the defined Memcached Parameters for an existing Instance.
This method only stages the
- parameters, it must be followed by apply_parameters to apply the
parameters to nodes of
- the Memcached Instance.
+ Updates the defined Memcached Parameters for an existing Instance.
Review Comment:
```suggestion
Update the defined Memcached Parameters for an existing Instance.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -75,7 +77,9 @@ def set_autocommit(self, conn: MySQLConnectionTypes,
autocommit: bool) -> None:
def get_autocommit(self, conn: MySQLConnectionTypes) -> bool:
"""
- The MySQLdb (mysqlclient) client uses a `get_autocommit` method
+ Mysqlclient uses a `get_autocommit` method rather than an `autocommit`
property.
Review Comment:
```suggestion
Whether *autocommit* is active.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -61,7 +61,9 @@ def __init__(self, *args, **kwargs) -> None:
def set_autocommit(self, conn: MySQLConnectionTypes, autocommit: bool) ->
None:
"""
- The MySQLdb (mysqlclient) client uses an `autocommit` method rather
+ Mysqlclient uses an `autocommit` method rather than an `autocommit`
property.
+
+ Mysqlclient client uses an `autocommit` method rather
than an `autocommit` property to set the autocommit setting
Review Comment:
```suggestion
*mysqlclient* uses an *autocommit* method rather than an *autocommit*
property, so we need to override this to support it.
```
##########
airflow/providers/mysql/hooks/mysql.py:
##########
@@ -214,6 +222,8 @@ def _serialize_cell(cell: object, conn:
Optional[Connection] = None) -> object:
def get_iam_token(self, conn: Connection) -> Tuple[str, int]:
"""
+ Awshook to retrieve a temporary password to connect to MySQL Port.
Review Comment:
```suggestion
Retrieve a temporary password to connect to MySQL.
```
##########
airflow/utils/process_utils.py:
##########
@@ -271,8 +275,9 @@ def kill_child_processes_by_pids(pids_to_kill: List[int],
timeout: int = 5) -> N
@contextmanager
def patch_environ(new_env_variables: Dict[str, str]) -> Generator[None, None,
None]:
"""
- Sets environment variables in context. After leaving the context, it
restores its original state.
+ Sets environment variables in context.
Review Comment:
```suggestion
Set environment variables in context.
```
##########
airflow/providers/qubole/sensors/qubole.py:
##########
@@ -68,8 +68,9 @@ def poke(self, context: 'Context') -> bool:
class QuboleFileSensor(QuboleSensor):
"""
- Wait for a file or folder to be present in cloud storage
- and check for its presence via QDS APIs
+ Wait for a file or folder to be present in cloud storage.
+
+ Check for file or folder presence via QDS APIs
Review Comment:
```suggestion
Check for file or folder presence via QDS APIs.
```
##########
airflow/utils/process_utils.py:
##########
@@ -316,7 +321,7 @@ def check_if_pidfile_process_is_running(pid_file: str,
process_name: str):
def set_new_process_group() -> None:
"""
- Tries to set current process to a new process group
+ Tries to set current process to a new process group.
Review Comment:
```suggestion
Try to set current process to a new process group.
```
##########
airflow/providers/databricks/hooks/databricks_base.py:
##########
@@ -195,21 +194,23 @@ def _parse_host(host: str) -> str:
def _get_retry_object(self) -> Retrying:
"""
- Instantiates a retry object
+ Instantiates a retry object.
:return: instance of Retrying class
"""
return Retrying(**self.retry_args)
def _a_get_retry_object(self) -> AsyncRetrying:
"""
- Instantiates an async retry object
+ Instantiates an async retry object.
Review Comment:
```suggestion
Instantiate an async retry object.
```
##########
airflow/providers/amazon/aws/transfers/redshift_to_s3.py:
##########
@@ -29,7 +29,7 @@
class RedshiftToS3Operator(BaseOperator):
"""
- Executes an UNLOAD command to s3 as a CSV with headers
+ Executes an UNLOAD command to s3 as a CSV with headers.
Review Comment:
```suggestion
Execute an UNLOAD command to s3 as a CSV with headers.
```
##########
airflow/providers/databricks/hooks/databricks_base.py:
##########
@@ -340,7 +341,7 @@ async def _a_get_aad_token(self, resource: str) -> str:
def _get_aad_headers(self) -> dict:
"""
- Fills AAD headers if necessary (SPN is outside of the workspace)
+ Fills AAD headers if necessary (SPN is outside of the workspace).
Review Comment:
```suggestion
Fill AAD headers if necessary (SPN is outside of the workspace).
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]