This is an automated email from the ASF dual-hosted git repository.

beto pushed a commit to branch semantic-layer-feature
in repository https://gitbox.apache.org/repos/asf/superset.git


The following commit(s) were added to refs/heads/semantic-layer-feature by this 
push:
     new 54226b96c3a Add tests for mapper
54226b96c3a is described below

commit 54226b96c3a96bdc209498cd60695b0ed715f407
Author: Beto Dealmeida <[email protected]>
AuthorDate: Fri Feb 6 12:26:30 2026 -0500

    Add tests for mapper
---
 .github/workflows/superset-python-unittest.yml  |    1 +
 tests/unit_tests/semantic_layers/mapper_test.py | 1138 +++++++++++++++++++++++
 2 files changed, 1139 insertions(+)

diff --git a/.github/workflows/superset-python-unittest.yml 
b/.github/workflows/superset-python-unittest.yml
index 1dcf8a82237..a7b5cdb9037 100644
--- a/.github/workflows/superset-python-unittest.yml
+++ b/.github/workflows/superset-python-unittest.yml
@@ -52,6 +52,7 @@ jobs:
           SUPERSET_SECRET_KEY: not-a-secret
         run: |
           pytest --durations-min=0.5 --cov=superset/sql/ 
./tests/unit_tests/sql/ --cache-clear --cov-fail-under=100
+          pytest --durations-min=0.5 --cov=superset/semantic_layers/ 
./tests/unit_tests/semantic_layers/ --cache-clear --cov-fail-under=100
       - name: Upload code coverage
         uses: codecov/codecov-action@v5
         with:
diff --git a/tests/unit_tests/semantic_layers/mapper_test.py 
b/tests/unit_tests/semantic_layers/mapper_test.py
index 9e5c10d50a6..8e5dd06fe58 100644
--- a/tests/unit_tests/semantic_layers/mapper_test.py
+++ b/tests/unit_tests/semantic_layers/mapper_test.py
@@ -1584,3 +1584,1141 @@ def test_get_results_with_multiple_dimensions(
     )
 
     pd.testing.assert_frame_equal(result.df, expected_df)
+
+
+def test_get_results_no_datasource() -> None:
+    """
+    Test that get_results raises error when datasource is missing.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=None,
+        metrics=["total_sales"],
+        columns=["category"],
+    )
+
+    with pytest.raises(ValueError, match="QueryObject must have a datasource 
defined"):
+        get_results(query_object)
+
+
+def test_get_results_with_duplicate_columns(
+    mock_datasource: MagicMock,
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test get_results handles duplicate columns from merge gracefully.
+    """
+    # Create main dataframe
+    main_df = pd.DataFrame(
+        {
+            "category": ["Electronics", "Books"],
+            "total_sales": [1000.0, 500.0],
+        }
+    )
+
+    # Create offset dataframe with an extra column that will cause duplicate
+    offset_df = pd.DataFrame(
+        {
+            "category": ["Electronics", "Books"],
+            "total_sales": [950.0, 480.0],
+            "category__duplicate": ["X", "Y"],  # Simulate a duplicate column
+        }
+    )
+
+    mock_main_result = SemanticResult(
+        requests=[SemanticRequest(type="SQL", definition="MAIN")],
+        results=main_df.copy(),
+    )
+
+    mock_offset_result = SemanticResult(
+        requests=[SemanticRequest(type="SQL", definition="OFFSET")],
+        results=offset_df.copy(),
+    )
+
+    mock_datasource.implementation.get_dataframe = mocker.Mock(
+        side_effect=[mock_main_result, mock_offset_result]
+    )
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+        time_offsets=["1 week ago"],
+    )
+
+    result = get_results(query_object)
+
+    # Verify duplicate columns are dropped
+    assert "category__duplicate" not in result.df.columns
+
+
+def test_get_results_empty_requests(
+    mock_datasource: MagicMock,
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test get_results with empty requests list.
+    """
+    main_df = pd.DataFrame(
+        {
+            "category": ["Electronics"],
+            "total_sales": [1000.0],
+        }
+    )
+
+    mock_result = SemanticResult(
+        requests=[],  # Empty requests
+        results=main_df,
+    )
+
+    mock_datasource.implementation.get_dataframe = 
mocker.Mock(return_value=mock_result)
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+    )
+
+    result = get_results(query_object)
+
+    # Query string should be empty when no requests
+    assert result.query == ""
+
+
+def test_normalize_column_adhoc_not_in_dimensions() -> None:
+    """
+    Test _normalize_column raises error for AdhocColumn with sqlExpression not 
in dims.
+    """
+    from superset.semantic_layers.mapper import _normalize_column
+    from superset.superset_typing import AdhocColumn
+
+    dimension_names = {"category", "region"}
+    adhoc_column: AdhocColumn = {
+        "isColumnReference": True,
+        "sqlExpression": "unknown_dimension",
+    }
+
+    with pytest.raises(ValueError, match="Adhoc dimensions are not supported"):
+        _normalize_column(adhoc_column, dimension_names)
+
+
+def test_normalize_column_adhoc_missing_sql_expression() -> None:
+    """
+    Test _normalize_column raises error for AdhocColumn without sqlExpression.
+    """
+    from superset.semantic_layers.mapper import _normalize_column
+    from superset.superset_typing import AdhocColumn
+
+    dimension_names = {"category", "region"}
+    adhoc_column: AdhocColumn = {
+        "isColumnReference": True,
+    }
+
+    with pytest.raises(ValueError, match="Adhoc dimensions are not supported"):
+        _normalize_column(adhoc_column, dimension_names)
+
+
+def test_normalize_column_adhoc_valid(mock_datasource: MagicMock) -> None:
+    """
+    Test _normalize_column with valid AdhocColumn reference.
+    """
+    from superset.semantic_layers.mapper import _normalize_column
+    from superset.superset_typing import AdhocColumn
+
+    dimension_names = {"category", "region"}
+    adhoc_column: AdhocColumn = {
+        "isColumnReference": True,
+        "sqlExpression": "category",
+    }
+
+    result = _normalize_column(adhoc_column, dimension_names)
+    assert result == "category"
+
+
+def test_get_filters_from_query_object_with_filter_clauses(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test filter extraction with filter clauses including TEMPORAL_RANGE skip.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+        filter=[
+            {
+                "op": FilterOperator.TEMPORAL_RANGE.value,
+                "col": "order_date",
+                "val": "Last 7 days",
+            },
+            {
+                "op": FilterOperator.EQUALS.value,
+                "col": "category",
+                "val": "Electronics",
+            },
+        ],
+    )
+
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    result = _get_filters_from_query_object(query_object, None, all_dimensions)
+
+    # Should return a set of filters
+    # TEMPORAL_RANGE should be skipped when granularity is set
+    # The category EQUALS filter should be converted
+    assert isinstance(result, set)
+    # Should have at least time filters (from from_dttm/to_dttm)
+    assert len(result) >= 2
+
+
+def test_get_time_filter_unknown_granularity(mock_datasource: MagicMock) -> 
None:
+    """
+    Test _get_time_filter returns empty set when granularity is not in 
dimensions.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="unknown_time_column",  # Not in dimensions
+    )
+
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    result = _get_time_filter(query_object, None, all_dimensions)
+
+    assert result == set()
+
+
+def test_get_time_filter_missing_bounds(mock_datasource: MagicMock) -> None:
+    """
+    Test _get_time_filter returns empty set when time bounds are missing.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=None,  # Missing
+        to_dttm=None,  # Missing
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+    )
+
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    result = _get_time_filter(query_object, None, all_dimensions)
+
+    assert result == set()
+
+
+def test_get_time_bounds_with_offset_fallback_to_time_range(
+    mock_datasource: MagicMock,
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_time_bounds falls back to time_range parsing when bounds missing.
+    """
+    mocker.patch(
+        "superset.semantic_layers.mapper.get_since_until_from_query_object",
+        return_value=(datetime(2025, 10, 1), datetime(2025, 10, 15)),
+    )
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=None,  # Missing
+        to_dttm=None,  # Missing
+        metrics=["total_sales"],
+        columns=["category"],
+        time_range="Last 14 days",
+    )
+
+    from_dttm, to_dttm = _get_time_bounds(query_object, "1 week ago")
+
+    # Should have calculated offset bounds
+    assert from_dttm is not None
+    assert to_dttm is not None
+
+
+def test_get_time_bounds_with_offset_no_bounds(
+    mock_datasource: MagicMock,
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_time_bounds returns None when no bounds available.
+    """
+    mocker.patch(
+        "superset.semantic_layers.mapper.get_since_until_from_query_object",
+        return_value=(None, None),
+    )
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=None,
+        to_dttm=None,
+        metrics=["total_sales"],
+        columns=["category"],
+    )
+
+    from_dttm, to_dttm = _get_time_bounds(query_object, "1 week ago")
+
+    assert from_dttm is None
+    assert to_dttm is None
+
+
+def test_convert_query_object_filter_temporal_range_with_value() -> None:
+    """
+    Test conversion of TEMPORAL_RANGE filter with valid string value.
+    """
+    all_dimensions = {
+        "order_date": Dimension(
+            "order_date", "order_date", STRING, "order_date", "Order date"
+        )
+    }
+    filter_: ValidatedQueryObjectFilterClause = {
+        "op": FilterOperator.TEMPORAL_RANGE.value,
+        "col": "order_date",
+        "val": "2025-01-01 : 2025-12-31",
+    }
+
+    result = _convert_query_object_filter(filter_, all_dimensions)
+
+    assert result == {
+        Filter(
+            type=PredicateType.WHERE,
+            column=all_dimensions["order_date"],
+            operator=Operator.GREATER_THAN_OR_EQUAL,
+            value="2025-01-01",
+        ),
+        Filter(
+            type=PredicateType.WHERE,
+            column=all_dimensions["order_date"],
+            operator=Operator.LESS_THAN,
+            value="2025-12-31",
+        ),
+    }
+
+
+def test_get_order_adhoc_with_none_sql_expression(mock_datasource: MagicMock) 
-> None:
+    """
+    Test order extraction skips adhoc expression with None sqlExpression.
+    """
+    all_metrics = {
+        metric.name: metric for metric in 
mock_datasource.implementation.metrics
+    }
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        orderby=[
+            ({"label": "custom", "sqlExpression": None}, True),  # None 
sqlExpression
+        ],
+    )
+
+    result = _get_order_from_query_object(query_object, all_metrics, 
all_dimensions)
+
+    # Should be empty - the adhoc with None sqlExpression is skipped
+    assert result == []
+
+
+def test_get_order_unknown_element(mock_datasource: MagicMock) -> None:
+    """
+    Test order extraction skips unknown elements.
+    """
+    all_metrics = {
+        metric.name: metric for metric in 
mock_datasource.implementation.metrics
+    }
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        orderby=[
+            ("unknown_column", True),  # Not in dimensions or metrics
+        ],
+    )
+
+    result = _get_order_from_query_object(query_object, all_metrics, 
all_dimensions)
+
+    # Should be empty - unknown element is skipped
+    assert result == []
+
+
+def test_get_group_limit_filters_with_granularity_no_time_dimension(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test group limit filters when granularity doesn't match any dimension.
+    """
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        inner_from_dttm=datetime(2025, 9, 22),
+        inner_to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="unknown_time_col",  # Not in dimensions
+    )
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should return None since no filters could be created
+    assert result is None
+
+
+def test_get_group_limit_filters_with_fetch_values_predicate(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test group limit filters include fetch values predicate.
+    """
+    mock_datasource.fetch_values_predicate = "tenant_id = 123"
+
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        inner_from_dttm=datetime(2025, 9, 22),
+        inner_to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+        apply_fetch_values_predicate=True,
+    )
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    assert result is not None
+    assert (
+        AdhocFilter(
+            type=PredicateType.WHERE,
+            definition="tenant_id = 123",
+        )
+        in result
+    )
+
+
+def test_get_group_limit_filters_with_filter_clauses(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test group limit filters include converted filter clauses.
+    """
+    all_dimensions = {
+        dim.name: dim for dim in mock_datasource.implementation.dimensions
+    }
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        inner_from_dttm=datetime(2025, 9, 22),
+        inner_to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+        filter=[
+            {
+                "op": FilterOperator.TEMPORAL_RANGE.value,
+                "col": "order_date",
+                "val": "Last 7 days",
+            },
+            {
+                "op": FilterOperator.EQUALS.value,
+                "col": "category",
+                "val": "Electronics",
+            },
+        ],
+    )
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should return filters including time filters from inner bounds
+    # TEMPORAL_RANGE should be skipped
+    assert result is not None
+    assert isinstance(result, set)
+    assert len(result) >= 2  # At least inner time filters
+
+
+def test_validate_query_object_no_datasource() -> None:
+    """
+    Test validate_query_object returns False when no datasource.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=None,
+        metrics=["total_sales"],
+        columns=["category"],
+    )
+
+    result = validate_query_object(query_object)
+
+    assert result is False
+
+
+def test_validate_metrics_adhoc_error(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test validation error for adhoc metrics.
+    """
+    from superset.semantic_layers.mapper import _validate_metrics
+
+    mock_datasource = mocker.Mock()
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    sales_metric = Metric("total_sales", "total_sales", NUMBER, "SUM(amount)", 
"Sales")
+
+    mock_datasource.implementation.dimensions = {category_dim}
+    mock_datasource.implementation.metrics = {sales_metric}
+
+    # Manually create a query object with an adhoc metric
+    query_object = mocker.Mock()
+    query_object.datasource = mock_datasource
+    query_object.metrics = [{"label": "adhoc", "sqlExpression": "SUM(x)"}]
+
+    with pytest.raises(ValueError, match="Adhoc metrics are not supported"):
+        _validate_metrics(query_object)
+
+
+def test_validate_filters_adhoc_column_error(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test validation error for adhoc column in filter.
+    """
+    from superset.semantic_layers.mapper import _validate_filters
+
+    query_object = mocker.Mock()
+    query_object.filter = [
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": {"sqlExpression": "custom_col"},  # Adhoc column
+            "val": "test",
+        },
+    ]
+
+    with pytest.raises(ValueError, match="Adhoc columns are not supported"):
+        _validate_filters(query_object)
+
+
+def test_validate_filters_missing_operator_error(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test validation error for filter without operator.
+    """
+    from superset.semantic_layers.mapper import _validate_filters
+
+    query_object = mocker.Mock()
+    query_object.filter = [
+        {
+            "op": None,  # Missing operator
+            "col": "category",
+            "val": "test",
+        },
+    ]
+
+    with pytest.raises(ValueError, match="All filters must have an operator 
defined"):
+        _validate_filters(query_object)
+
+
+def test_validate_query_object_granularity_not_in_dimensions_error(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test validation error when time column not in dimensions.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="unknown_time_col",  # Not in dimensions
+    )
+
+    with pytest.raises(
+        ValueError, match="time column must be defined in the Semantic View"
+    ):
+        validate_query_object(query_object)
+
+
+def test_validate_query_object_adhoc_series_column_error(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test validation error for adhoc dimension in series columns.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        series_columns=[{"sqlExpression": "custom"}],  # Adhoc
+        series_limit=10,
+    )
+
+    with pytest.raises(
+        ValueError, match="Adhoc dimensions are not supported in series 
columns"
+    ):
+        validate_query_object(query_object)
+
+
+def test_validate_query_object_series_limit_metric_not_string_error(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test validation error when series_limit_metric is not a string.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        series_columns=["category"],
+        series_limit=10,
+        series_limit_metric={"sqlExpression": "SUM(x)"},  # Not a string
+    )
+
+    with pytest.raises(
+        ValueError, match="series limit metric must be defined in the Semantic 
View"
+    ):
+        validate_query_object(query_object)
+
+
+def test_validate_query_object_group_others_not_supported_error(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test validation error when group_others feature not supported.
+    """
+    mock_datasource = mocker.Mock()
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    sales_metric = Metric("total_sales", "total_sales", NUMBER, "SUM(amount)", 
"Sales")
+
+    mock_datasource.implementation.dimensions = {time_dim, category_dim}
+    mock_datasource.implementation.metrics = {sales_metric}
+    # Has GROUP_LIMIT but not GROUP_OTHERS
+    mock_datasource.implementation.features = frozenset(
+        {SemanticViewFeature.GROUP_LIMIT}
+    )
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        series_columns=["category"],
+        series_limit=10,
+        group_others_when_limit_reached=True,  # Not supported
+    )
+
+    with pytest.raises(
+        ValueError, match="Grouping others when limit is reached is not 
supported"
+    ):
+        validate_query_object(query_object)
+
+
+def test_validate_query_object_adhoc_orderby_not_supported_error(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test validation error when adhoc expressions in orderby not supported.
+    """
+    mock_datasource = mocker.Mock()
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    sales_metric = Metric("total_sales", "total_sales", NUMBER, "SUM(amount)", 
"Sales")
+
+    mock_datasource.implementation.dimensions = {category_dim}
+    mock_datasource.implementation.metrics = {sales_metric}
+    mock_datasource.implementation.features = (
+        frozenset()
+    )  # No ADHOC_EXPRESSIONS_IN_ORDERBY
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        orderby=[
+            ({"label": "custom", "sqlExpression": "RAND()"}, True),
+        ],
+    )
+
+    with pytest.raises(
+        ValueError, match="Adhoc expressions in order by are not supported"
+    ):
+        validate_query_object(query_object)
+
+
+def test_validate_query_object_orderby_undefined_element_error(
+    mock_datasource: MagicMock,
+) -> None:
+    """
+    Test validation error when orderby element not defined.
+    """
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        metrics=["total_sales"],
+        columns=["category"],
+        orderby=[
+            ("undefined_column", True),  # Not in dimensions or metrics
+        ],
+    )
+
+    with pytest.raises(ValueError, match="All order by elements must be 
defined"):
+        validate_query_object(query_object)
+
+
+def test_get_results_with_is_rowcount(
+    mock_datasource: MagicMock,
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test get_results uses get_row_count when is_rowcount is True.
+    """
+    main_df = pd.DataFrame({"count": [100]})
+
+    mock_result = SemanticResult(
+        requests=[SemanticRequest(type="SQL", definition="SELECT COUNT(*)")],
+        results=main_df,
+    )
+
+    mock_datasource.implementation.get_row_count = 
mocker.Mock(return_value=mock_result)
+    mock_datasource.implementation.get_dataframe = mocker.Mock()
+
+    query_object = ValidatedQueryObject(
+        datasource=mock_datasource,
+        from_dttm=datetime(2025, 10, 15),
+        to_dttm=datetime(2025, 10, 22),
+        metrics=["total_sales"],
+        columns=["category"],
+        granularity="order_date",
+        is_rowcount=True,
+    )
+
+    result = get_results(query_object)
+
+    # Should have called get_row_count, not get_dataframe
+    mock_datasource.implementation.get_row_count.assert_called_once()
+    mock_datasource.implementation.get_dataframe.assert_not_called()
+    pd.testing.assert_frame_equal(result.df, main_df)
+
+
+def test_get_filters_from_query_object_with_filter_loop(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_filters_from_query_object processes filter array correctly.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with filters
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"
+    query_object.from_dttm = datetime(2025, 10, 15)
+    query_object.to_dttm = datetime(2025, 10, 22)
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = [
+        # TEMPORAL_RANGE filter - should be skipped when granularity is set
+        {
+            "op": FilterOperator.TEMPORAL_RANGE.value,
+            "col": "order_date",
+            "val": "Last 7 days",
+        },
+        # EQUALS filter - should be converted
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "category",
+            "val": "Electronics",
+        },
+    ]
+
+    result = _get_filters_from_query_object(query_object, None, all_dimensions)
+
+    # Should have filters: time range filters + category equals filter
+    assert isinstance(result, set)
+    # Check that we have a category filter
+    category_filters = [
+        f
+        for f in result
+        if isinstance(f, Filter)
+        and f.column.name == "category"
+        and f.operator == Operator.EQUALS
+    ]
+    assert len(category_filters) == 1
+
+
+def test_convert_query_object_filter_temporal_range_non_string_value() -> None:
+    """
+    Test TEMPORAL_RANGE filter returns None when value is not a string.
+    """
+    all_dimensions = {
+        "order_date": Dimension(
+            "order_date", "order_date", STRING, "order_date", "Order date"
+        )
+    }
+    filter_: ValidatedQueryObjectFilterClause = {
+        "op": FilterOperator.TEMPORAL_RANGE.value,
+        "col": "order_date",
+        "val": ["2025-01-01", "2025-12-31"],  # List instead of string
+    }
+
+    result = _convert_query_object_filter(filter_, all_dimensions)
+
+    # Should return None because value is not a string
+    assert result is None
+
+
+def test_get_group_limit_filters_with_filter_loop(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_group_limit_filters processes filter array correctly.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with filters
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"
+    query_object.inner_from_dttm = datetime(2025, 9, 22)
+    query_object.inner_to_dttm = datetime(2025, 10, 22)
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = [
+        # TEMPORAL_RANGE filter - should be skipped when granularity is set
+        {
+            "op": FilterOperator.TEMPORAL_RANGE.value,
+            "col": "order_date",
+            "val": "Last 7 days",
+        },
+        # EQUALS filter - should be converted
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "category",
+            "val": "Electronics",
+        },
+    ]
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should have filters
+    assert result is not None
+    assert isinstance(result, set)
+    # Check that we have a category filter
+    category_filters = [
+        f
+        for f in result
+        if isinstance(f, Filter)
+        and f.column.name == "category"
+        and f.operator == Operator.EQUALS
+    ]
+    assert len(category_filters) == 1
+
+
+def test_validate_filters_empty(mocker: MockerFixture) -> None:
+    """
+    Test _validate_filters with empty filter list (the loop doesn't run).
+    """
+    from superset.semantic_layers.mapper import _validate_filters
+
+    query_object = mocker.Mock()
+    query_object.filter = []  # Empty filter list
+
+    # Should not raise any error
+    _validate_filters(query_object)
+
+
+def test_validate_granularity_valid(mocker: MockerFixture) -> None:
+    """
+    Test _validate_granularity with valid granularity and time grain.
+    """
+    from superset.semantic_layers.mapper import _validate_granularity
+
+    mock_datasource = mocker.Mock()
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date", Day)
+
+    mock_datasource.implementation.dimensions = {time_dim}
+
+    query_object = mocker.Mock()
+    query_object.datasource = mock_datasource
+    query_object.granularity = "order_date"
+    query_object.extras = {"time_grain_sqla": "P1D"}
+
+    # Should not raise any error - valid granularity with supported time grain
+    _validate_granularity(query_object)
+
+
+def test_validate_group_limit_valid(mocker: MockerFixture) -> None:
+    """
+    Test _validate_group_limit with valid group limit settings.
+    """
+    from superset.semantic_layers.mapper import _validate_group_limit
+
+    mock_datasource = mocker.Mock()
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    sales_metric = Metric("total_sales", "total_sales", NUMBER, "SUM(amount)", 
"Sales")
+
+    mock_datasource.implementation.dimensions = {category_dim}
+    mock_datasource.implementation.metrics = {sales_metric}
+    mock_datasource.implementation.features = frozenset(
+        {SemanticViewFeature.GROUP_LIMIT, SemanticViewFeature.GROUP_OTHERS}
+    )
+
+    query_object = mocker.Mock()
+    query_object.datasource = mock_datasource
+    query_object.series_limit = 10
+    query_object.series_columns = ["category"]
+    query_object.series_limit_metric = "total_sales"
+    query_object.group_others_when_limit_reached = True
+
+    # Should not raise any error - all settings are valid
+    _validate_group_limit(query_object)
+
+
+def test_get_filters_from_query_object_filter_returns_none(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_filters_from_query_object when _convert_query_object_filter 
returns None.
+    This covers the branch where the filter conversion fails and loop 
continues.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with a filter that will return None
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"
+    query_object.from_dttm = datetime(2025, 10, 15)
+    query_object.to_dttm = datetime(2025, 10, 22)
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = [
+        # Filter with unknown column - returns None from 
_convert_query_object_filter
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "unknown_column",
+            "val": "test",
+        },
+        # Valid filter - will be converted
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "category",
+            "val": "Electronics",
+        },
+    ]
+
+    result = _get_filters_from_query_object(query_object, None, all_dimensions)
+
+    # Should have filters (time filters + category, but not unknown_column)
+    assert isinstance(result, set)
+    # Check that we have a category filter
+    category_filters = [
+        f
+        for f in result
+        if isinstance(f, Filter)
+        and f.column.name == "category"
+        and f.operator == Operator.EQUALS
+    ]
+    assert len(category_filters) == 1
+
+
+def test_get_group_limit_filters_filter_returns_none(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_group_limit_filters when _convert_query_object_filter returns 
None.
+    This covers the branch where the filter conversion fails and loop 
continues.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with filters
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"
+    query_object.inner_from_dttm = datetime(2025, 9, 22)
+    query_object.inner_to_dttm = datetime(2025, 10, 22)
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = [
+        # Filter with unknown column - returns None from 
_convert_query_object_filter
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "unknown_column",
+            "val": "test",
+        },
+        # Valid filter - will be converted
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "category",
+            "val": "Electronics",
+        },
+    ]
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should have filters
+    assert result is not None
+    assert isinstance(result, set)
+    # Check that we have a category filter
+    category_filters = [
+        f
+        for f in result
+        if isinstance(f, Filter)
+        and f.column.name == "category"
+        and f.operator == Operator.EQUALS
+    ]
+    assert len(category_filters) == 1
+
+
+def test_validate_filters_with_valid_filters(mocker: MockerFixture) -> None:
+    """
+    Test _validate_filters with valid filters that pass validation.
+    This covers the branch where the loop completes without raising.
+    """
+    from superset.semantic_layers.mapper import _validate_filters
+
+    query_object = mocker.Mock()
+    query_object.filter = [
+        {
+            "op": FilterOperator.EQUALS.value,
+            "col": "category",  # String column, not dict
+            "val": "test",
+        },
+        {
+            "op": FilterOperator.IN.value,  # Has operator
+            "col": "region",
+            "val": ["US", "UK"],
+        },
+    ]
+
+    # Should not raise any error - filters are valid
+    _validate_filters(query_object)
+
+
+def test_get_group_limit_filters_granularity_missing_inner_from(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_group_limit_filters with granularity but missing inner_from_dttm.
+    Covers branch 704->729 where time_dimension exists but inner_from_dttm is 
None.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with granularity but missing inner_from_dttm
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"  # Granularity is set
+    query_object.inner_from_dttm = None  # Missing inner_from
+    query_object.inner_to_dttm = datetime(2025, 10, 22)  # But inner_to exists
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = []
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should return None since no filters were added (time filters require 
both bounds)
+    assert result is None
+
+
+def test_get_group_limit_filters_granularity_missing_inner_to(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_group_limit_filters with granularity but missing inner_to_dttm.
+    Covers branch 704->729 where time_dimension exists but inner_to_dttm is 
None.
+    """
+    # Create dimensions
+    time_dim = Dimension("order_date", "order_date", STRING, "order_date", 
"Date")
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"order_date": time_dim, "category": category_dim}
+
+    # Create mock query object with granularity but missing inner_to_dttm
+    query_object = mocker.Mock()
+    query_object.granularity = "order_date"  # Granularity is set
+    query_object.inner_from_dttm = datetime(2025, 9, 22)  # inner_from exists
+    query_object.inner_to_dttm = None  # But missing inner_to
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = []
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should return None since no filters were added (time filters require 
both bounds)
+    assert result is None
+
+
+def test_get_group_limit_filters_no_granularity(
+    mocker: MockerFixture,
+) -> None:
+    """
+    Test _get_group_limit_filters when granularity is None/empty.
+    This explicitly covers the branch 704->729 where granularity is Falsy.
+    """
+    # Create dimensions
+    category_dim = Dimension("category", "category", STRING, "category", 
"Category")
+    all_dimensions = {"category": category_dim}
+
+    # Create mock query object with no granularity
+    query_object = mocker.Mock()
+    query_object.granularity = None  # No granularity
+    query_object.inner_from_dttm = datetime(2025, 9, 22)
+    query_object.inner_to_dttm = datetime(2025, 10, 22)
+    query_object.extras = {}
+    query_object.apply_fetch_values_predicate = False
+    query_object.datasource = mocker.Mock()
+    query_object.datasource.fetch_values_predicate = None
+    query_object.filter = []
+
+    result = _get_group_limit_filters(query_object, all_dimensions)
+
+    # Should return None - no granularity means no time filters added
+    assert result is None


Reply via email to