zhaoyongjie commented on a change in pull request #15279:
URL: https://github.com/apache/superset/pull/15279#discussion_r659252244
##########
File path: superset/common/query_context.py
##########
@@ -97,6 +101,62 @@ def __init__( # pylint: disable=too-many-arguments
"result_format": self.result_format,
}
+ def processing_time_offset(
+ self, df: pd.DataFrame, query_object: QueryObject,
+ ) -> Tuple[pd.DataFrame, List[str]]:
+ # ensure query_object is immutable
+ query_object_clone = copy.copy(query_object)
+ rv_sql = []
+
+ time_offset = query_object.time_offset
+ outer_from_dttm = query_object.from_dttm
+ outer_to_dttm = query_object.to_dttm
+ for offset in time_offset:
+ try:
+ query_object_clone.from_dttm = get_past_or_future(
+ offset, outer_from_dttm,
+ )
+ query_object_clone.to_dttm = get_past_or_future(offset,
outer_to_dttm,)
+ except ValueError as ex:
+ raise QueryObjectValidationError(str(ex))
+ # make sure subquery use main query where clause
+ query_object_clone.inner_from_dttm = outer_from_dttm
+ query_object_clone.inner_to_dttm = outer_to_dttm
+ query_object_clone.time_offset = []
Review comment:
I do some refactoring for the function of `get_df_payload`. Now the main
query and the extra query should be cached.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]