ferruzzi commented on code in PR #48468:
URL: https://github.com/apache/airflow/pull/48468#discussion_r2025397155


##########
providers/amazon/src/airflow/providers/amazon/aws/sensors/bedrock.py:
##########
@@ -368,3 +370,105 @@ def execute(self, context: Context) -> Any:
             )
         else:
             super().execute(context=context)
+
+
+class BedrockBatchInferenceSensor(BedrockBaseSensor[BedrockAgentHook]):
+    """
+    Poll the batch inference job status until it reaches a terminal state; 
fails if creation fails.
+
+    .. seealso::
+        For more information on how to use this sensor, take a look at the 
guide:
+        :ref:`howto/sensor:BedrockBatchInferenceSensor`
+
+    :param job_arn: The Amazon Resource Name (ARN) of the batch inference job. 
(templated)
+    :param target_state: A BedrockBatchInferenceSensor.TargetState; defaults 
to 'SCHEDULED' (templated)
+
+    :param deferrable: If True, the sensor will operate in deferrable more. 
This mode requires aiobotocore
+        module to be installed.
+        (default: False, but can be overridden in config file by setting 
default_deferrable to True)
+    :param poke_interval: Polling period in seconds to check for the status of 
the job. (default: 5)
+    :param max_retries: Number of times before returning the current state 
(default: 24)
+    :param aws_conn_id: The Airflow connection used for AWS credentials.
+        If this is ``None`` or empty then the default boto3 behaviour is used. 
If
+        running Airflow in a distributed manner and aws_conn_id is None or
+        empty, then default boto3 configuration would be used (and must be
+        maintained on each worker node).
+    :param region_name: AWS region_name. If not specified then the default 
boto3 behaviour is used.
+    :param verify: Whether or not to verify SSL certificates. See:
+        
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html
+    :param botocore_config: Configuration dictionary (key-values) for botocore 
client. See:
+        
https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html
+    """
+
+    class SuccessState:
+        """
+        Target state for the BedrockBatchInferenceSensor.
+
+        Bedrock adds batch inference jobs to a queue, and they may take some 
time to complete.
+        If you want to wait for the job to complete, use 
TargetState.COMPLETED, but if you only want
+        to wait until the service confirms that the job is in the queue, use 
TargetState.SCHEDULED.
+
+        The normal successful progression of states is:
+            Submitted > Validating > Scheduled > InProgress > 
PartiallyCompleted > Completed
+        """
+
+        SCHEDULED = "scheduled"
+        COMPLETED = "completed"
+
+    INTERMEDIATE_STATES: tuple[str, ...]  # Defined in __init__ based on 
target state
+    FAILURE_STATES: tuple[str, ...] = ("Failed", "Stopped", 
"PartiallyCompleted", "Expired")
+    SUCCESS_STATES: tuple[str, ...]  # Defined in __init__ based on target 
state
+    FAILURE_MESSAGE = "Bedrock batch inference job sensor failed."
+    INVALID_SUCCESS_STATE_MESSAGE = "success_state must be an instance of 
TargetState."
+
+    aws_hook_class = BedrockHook
+
+    template_fields: Sequence[str] = aws_template_fields("job_arn", 
"success_state")
+
+    def __init__(
+        self,
+        *,
+        job_arn: str,
+        success_state: SuccessState | str = SuccessState.SCHEDULED,

Review Comment:
   Gah,     I renamed it at one point and missed a reference, thanks.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to