TheNeuralBit commented on code in PR #17448:
URL: https://github.com/apache/beam/pull/17448#discussion_r858088473


##########
sdks/python/apache_beam/ml/inference/base.py:
##########
@@ -83,14 +84,29 @@ def get_inference_runner(self) -> InferenceRunner:
 
 
 class RunInference(beam.PTransform):
-  """An extensible transform for running inferences."""
-  def __init__(self, model_loader: ModelLoader, clock=None):
+  """An extensible transform for running inferences.
+  Args:
+      model_loader: An implementation of InferenceRunner.
+      clock: A clock implementing get_current_time_in_microseconds.
+      close_to_resource: A string representing the resource location hints.
+  """
+  def __init__(self,
+               model_loader: ModelLoader,
+               clock:_Clock=None,
+               close_to_resource:str=None):
     self._model_loader = model_loader
     self._clock = clock
+    self._close_to_resource = close_to_resource

Review Comment:
   Should this be a property of the model loader? That's what has knowledge of 
the resource, right?



##########
sdks/python/apache_beam/ml/inference/base.py:
##########
@@ -83,14 +84,29 @@ def get_inference_runner(self) -> InferenceRunner:
 
 
 class RunInference(beam.PTransform):
-  """An extensible transform for running inferences."""
-  def __init__(self, model_loader: ModelLoader, clock=None):
+  """An extensible transform for running inferences.
+  Args:
+      model_loader: An implementation of InferenceRunner.
+      clock: A clock implementing get_current_time_in_microseconds.
+      close_to_resource: A string representing the resource location hints.
+  """
+  def __init__(self,
+               model_loader: ModelLoader,
+               clock:_Clock=None,

Review Comment:
   Unrelated: How about defining an interface like NanosecondClock to use here? 
Then _Clock can just be the default implementation



##########
sdks/python/apache_beam/ml/inference/base.py:
##########
@@ -83,14 +84,29 @@ def get_inference_runner(self) -> InferenceRunner:
 
 
 class RunInference(beam.PTransform):
-  """An extensible transform for running inferences."""
-  def __init__(self, model_loader: ModelLoader, clock=None):
+  """An extensible transform for running inferences.
+  Args:
+      model_loader: An implementation of InferenceRunner.
+      clock: A clock implementing get_current_time_in_microseconds.
+      close_to_resource: A string representing the resource location hints.
+  """
+  def __init__(self,
+               model_loader: ModelLoader,
+               clock:_Clock=None,
+               close_to_resource:str=None):
     self._model_loader = model_loader
     self._clock = clock
+    self._close_to_resource = close_to_resource
 
   # TODO(BEAM-14208): Add batch_size back off in the case there
   # are functional reasons large batch sizes cannot be handled.
   def expand(self, pcoll: beam.PCollection) -> beam.PCollection:
+    # TODO(BEAM-13690): Do this unconditionally one resolved.
+    if resources.ResourceHint.is_registered('close_to_resources') and 
self._close_to_resource:
+      pcoll |= (
+          'CloseToResources' >> beam.Map(lambda x: x).with_resource_hints(
+              close_to_resources=self._close_to_resource))

Review Comment:
   Why not add this resource hint to the _RunInferenceDoFn instead of adding an 
identity ParDo?



##########
sdks/python/apache_beam/ml/inference/base.py:
##########
@@ -83,14 +84,29 @@ def get_inference_runner(self) -> InferenceRunner:
 
 
 class RunInference(beam.PTransform):
-  """An extensible transform for running inferences."""
-  def __init__(self, model_loader: ModelLoader, clock=None):
+  """An extensible transform for running inferences.
+  Args:
+      model_loader: An implementation of InferenceRunner.

Review Comment:
   ```suggestion
         model_loader: An implementation of ModelLoader.
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to