Copilot commented on code in PR #36684:
URL: https://github.com/apache/superset/pull/36684#discussion_r2723793025


##########
superset-frontend/playwright/tests/experimental/dataset/create-dataset.spec.ts:
##########
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import { test, expect } from '../../../helpers/fixtures/testAssets';
+import type { TestAssets } from '../../../helpers/fixtures/testAssets';
+import type { Page, TestInfo } from '@playwright/test';
+import { ExplorePage } from '../../../pages/ExplorePage';
+import { CreateDatasetPage } from '../../../pages/CreateDatasetPage';
+import { DatasetListPage } from '../../../pages/DatasetListPage';
+import { ChartCreationPage } from '../../../pages/ChartCreationPage';
+import { ENDPOINTS } from '../../../helpers/api/dataset';
+import { waitForPost } from '../../../helpers/api/intercepts';
+import { expectStatusOneOf } from '../../../helpers/api/assertions';
+import { apiPostDatabase } from '../../../helpers/api/database';
+
+interface GsheetsSetupResult {
+  sheetName: string;
+  dbName: string;
+  createDatasetPage: CreateDatasetPage;
+}
+
+/**
+ * Sets up gsheets database and navigates to create dataset page.
+ * Skips test if gsheets connector unavailable.
+ * @param testInfo - Test info for parallelIndex to avoid name collisions in 
parallel runs
+ * @returns Setup result with names and page object, or null if test.skip() 
was called
+ */
+async function setupGsheetsDataset(
+  page: Page,
+  testAssets: TestAssets,
+  testInfo: TestInfo,
+): Promise<GsheetsSetupResult | null> {
+  // Public Google Sheet for testing (published to web, no auth required)
+  // This is a Netflix dataset that is publicly accessible via Google 
Visualization API
+  const sheetUrl =
+    
'https://docs.google.com/spreadsheets/d/19XNqckHGKGGPh83JGFdFGP4Bw9gdXeujq5EoIGwttdM/edit#gid=347941303';
+  // Include parallelIndex to avoid collisions when tests run in parallel
+  const uniqueSuffix = `${Date.now()}_${testInfo.parallelIndex}`;
+  const sheetName = `test_netflix_${uniqueSuffix}`;
+  const dbName = `test_gsheets_db_${uniqueSuffix}`;
+
+  // Create a Google Sheets database via API
+  // The catalog must be in `extra` as JSON with engine_params.catalog format
+  const catalogDict = { [sheetName]: sheetUrl };
+  const createDbRes = await apiPostDatabase(page, {
+    database_name: dbName,
+    engine: 'gsheets',
+    sqlalchemy_uri: 'gsheets://',
+    configuration_method: 'dynamic_form',
+    expose_in_sqllab: true,
+    extra: JSON.stringify({
+      engine_params: {
+        catalog: catalogDict,
+      },
+    }),
+  });
+
+  // Check if gsheets connector is available
+  if (!createDbRes.ok()) {
+    const errorBody = await createDbRes.json();
+    const errorText = JSON.stringify(errorBody);
+    // Skip test if gsheets connector not installed
+    if (
+      errorText.includes('gsheets') ||
+      errorText.includes('No such DB engine')
+    ) {
+      await test.info().attach('skip-reason', {
+        body: `Google Sheets connector unavailable: ${errorText}`,
+        contentType: 'text/plain',
+      });
+      test.skip();
+      return null;
+    }
+    throw new Error(`Failed to create gsheets database: ${errorText}`);
+  }
+
+  const createDbBody = await createDbRes.json();
+  const dbId = createDbBody.result?.id ?? createDbBody.id;
+  if (!dbId) {
+    throw new Error('Database creation did not return an ID');
+  }
+  testAssets.trackDatabase(dbId);
+
+  // Navigate to create dataset page
+  const createDatasetPage = new CreateDatasetPage(page);
+  await createDatasetPage.goto();
+  await createDatasetPage.waitForPageLoad();
+
+  // Select the Google Sheets database
+  await createDatasetPage.selectDatabase(dbName);
+
+  // Try to select the sheet - if not found due to timeout, skip
+  try {
+    await createDatasetPage.selectTable(sheetName);
+  } catch (error) {
+    // Only skip on TimeoutError (sheet not loaded); re-throw everything else
+    if (!(error instanceof Error) || error.name !== 'TimeoutError') {
+      throw error;
+    }
+    await test.info().attach('skip-reason', {
+      body: `Table "${sheetName}" not found in dropdown after timeout.`,
+      contentType: 'text/plain',
+    });
+    test.skip();
+    return null;
+  }
+
+  return { sheetName, dbName, createDatasetPage };
+}
+
+test('should create a dataset via wizard', async ({ page, testAssets }) => {
+  const setup = await setupGsheetsDataset(page, testAssets, test.info());
+  if (!setup) return; // test.skip() was called
+  const { sheetName, createDatasetPage } = setup;
+
+  // Set up response intercept to capture new dataset ID
+  const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, {
+    pathMatch: true,
+  });
+
+  // Click "Create and explore dataset" button
+  await createDatasetPage.clickCreateAndExploreDataset();
+
+  // Wait for dataset creation and capture ID for cleanup
+  const createResponse = expectStatusOneOf(
+    await createResponsePromise,
+    [200, 201],
+  );
+  const createBody = await createResponse.json();
+  const newDatasetId = createBody.result?.id ?? createBody.id;
+
+  if (newDatasetId) {
+    testAssets.trackDataset(newDatasetId);
+  }
+
+  // Verify we navigated to Chart Creation page with dataset pre-selected
+  await page.waitForURL(/.*\/chart\/add.*/);
+  const chartCreationPage = new ChartCreationPage(page);
+  await chartCreationPage.waitForPageLoad();
+
+  // Verify the dataset is pre-selected
+  await chartCreationPage.expectDatasetSelected(sheetName);
+
+  // Select a visualization type and create chart
+  await chartCreationPage.selectVizType('Table');
+
+  // Click "Create new chart" to go to Explore
+  await chartCreationPage.clickCreateNewChart();
+
+  // Verify we navigated to Explore page
+  await page.waitForURL(/.*\/explore\/.*/);
+  const explorePage = new ExplorePage(page);
+  await explorePage.waitForPageLoad();
+
+  // Verify the dataset name is shown in Explore
+  const loadedDatasetName = await explorePage.getDatasetName();
+  expect(loadedDatasetName).toContain(sheetName);
+});
+
+test('should create a dataset without exploring', async ({
+  page,
+  testAssets,
+}) => {
+  const setup = await setupGsheetsDataset(page, testAssets, test.info());
+  if (!setup) return; // test.skip() was called
+  const { sheetName, createDatasetPage } = setup;
+
+  // Set up response intercept to capture dataset ID
+  const createResponsePromise = waitForPost(page, ENDPOINTS.DATASET, {
+    pathMatch: true,
+  });
+
+  // Click "Create dataset" (not explore)
+  await createDatasetPage.clickCreateDataset();
+
+  // Capture dataset ID from response for cleanup
+  const createResponse = expectStatusOneOf(
+    await createResponsePromise,
+    [200, 201],
+  );
+  const createBody = await createResponse.json();
+  const datasetId = createBody.result?.id ?? createBody.id;
+  if (datasetId) {
+    testAssets.trackDataset(datasetId);
+  }
+
+  // Verify redirect to dataset list (not chart creation)
+  // Note: "Create dataset" action does not show a toast
+  // await page.waitForURL(/.*tablemodelview\/list.*/);

Review Comment:
   This commented-out line for URL verification should either be removed if 
it's not needed, or uncommented and fixed if it should be part of the test. 
Leaving commented code in the codebase can be confusing for future maintainers.
   ```suggestion
     await page.waitForURL(/.*tablemodelview\/list.*/);
   ```



##########
superset-frontend/playwright/tests/experimental/dataset/dataset-list.spec.ts:
##########
@@ -210,17 +232,431 @@ test('should duplicate a dataset with new name', async 
({ page }) => {
   await expect(datasetListPage.getDatasetRow(originalName)).toBeVisible();
   await expect(datasetListPage.getDatasetRow(duplicateName)).toBeVisible();
 
-  // API Verification: Compare original and duplicate datasets
-  const originalResponseData = await apiGetDataset(page, originalId!);
-  const originalDataFull = await originalResponseData.json();
-  const duplicateResponseData = await apiGetDataset(page, duplicateId);
-  const duplicateDataFull = await duplicateResponseData.json();
+  // API Verification: Fetch both datasets via detail API for consistent 
comparison
+  // (list API may return undefined for fields that detail API returns as null)
+  const [originalDetailRes, duplicateDetailRes] = await Promise.all([
+    apiGetDataset(page, originalId),
+    apiGetDataset(page, duplicateId),
+  ]);
+  const originalDetail = (await originalDetailRes.json()).result;
+  const duplicateDetail = (await duplicateDetailRes.json()).result;
 
   // Verify key properties were copied correctly
-  expect(duplicateDataFull.result.sql).toBe(originalDataFull.result.sql);
-  expect(duplicateDataFull.result.database.id).toBe(
-    originalDataFull.result.database.id,
-  );
+  expect(duplicateDetail.sql).toBe(originalDetail.sql);
+  expect(duplicateDetail.database.id).toBe(originalDetail.database.id);
+  expect(duplicateDetail.schema).toBe(originalDetail.schema);
   // Name should be different (the duplicate name)
-  expect(duplicateDataFull.result.table_name).toBe(duplicateName);
+  expect(duplicateDetail.table_name).toBe(duplicateName);
+});
+
+test('should export a dataset as a zip file', async ({
+  page,
+  datasetListPage,
+}) => {
+  // Use existing example dataset
+  const datasetName = 'members_channels_2';
+  const dataset = await getDatasetByName(page, datasetName);
+  expect(dataset).not.toBeNull();
+
+  // Verify dataset is visible in list
+  await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
+
+  // Set up API response intercept for export endpoint
+  // Note: We intercept the API response instead of relying on download events 
because
+  // Superset uses blob downloads (createObjectURL) which don't trigger 
Playwright's
+  // download event consistently, especially in app-prefix configurations.
+  const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT);
+
+  // Click export action button
+  await datasetListPage.clickExportAction(datasetName);
+
+  // Wait for export API response and validate zip contents
+  const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
+  await expectValidExportZip(exportResponse, { checkContentDisposition: true 
});
+});
+
+test('should export multiple datasets via bulk select action', async ({
+  page,
+  datasetListPage,
+  testAssets,
+}) => {
+  // Create 2 throwaway datasets for bulk export
+  const [dataset1, dataset2] = await Promise.all([
+    createTestDataset(page, testAssets, test.info(), {
+      prefix: 'bulk_export_1',
+    }),
+    createTestDataset(page, testAssets, test.info(), {
+      prefix: 'bulk_export_2',
+    }),
+  ]);
+
+  // Refresh to see new datasets
+  await datasetListPage.goto();
+  await datasetListPage.waitForTableLoad();
+
+  // Verify both datasets are visible in list
+  await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
+  await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
+
+  // Enable bulk select mode
+  await datasetListPage.clickBulkSelectButton();
+
+  // Select both datasets
+  await datasetListPage.selectDatasetCheckbox(dataset1.name);
+  await datasetListPage.selectDatasetCheckbox(dataset2.name);
+
+  // Set up API response intercept for export endpoint
+  const exportResponsePromise = waitForGet(page, ENDPOINTS.DATASET_EXPORT);
+
+  // Click bulk export action
+  await datasetListPage.clickBulkAction('Export');
+
+  // Wait for export API response and validate zip contains multiple datasets
+  const exportResponse = expectStatusOneOf(await exportResponsePromise, [200]);
+  await expectValidExportZip(exportResponse, { minDatasetCount: 2 });
+});
+
+test('should edit dataset name via modal', async ({
+  page,
+  datasetListPage,
+  testAssets,
+}) => {
+  // Create throwaway dataset for editing
+  const { id: datasetId, name: datasetName } = await createTestDataset(
+    page,
+    testAssets,
+    test.info(),
+    { prefix: 'test_edit' },
+  );
+
+  // Refresh to see new dataset
+  await datasetListPage.goto();
+  await datasetListPage.waitForTableLoad();
+
+  // Verify dataset is visible in list
+  await expect(datasetListPage.getDatasetRow(datasetName)).toBeVisible();
+
+  // Click edit action to open modal
+  await datasetListPage.clickEditAction(datasetName);
+
+  // Wait for edit modal to be ready
+  const editModal = new EditDatasetModal(page);
+  await editModal.waitForReady();
+
+  // Enable edit mode by clicking the lock icon
+  await editModal.enableEditMode();
+
+  // Edit the dataset name
+  const newName = `test_renamed_${Date.now()}`;
+  await editModal.fillName(newName);
+
+  // Set up response intercept for save
+  const saveResponsePromise = waitForPut(
+    page,
+    `${ENDPOINTS.DATASET}${datasetId}`,
+  );
+
+  // Click Save button
+  await editModal.clickSave();
+
+  // Handle the "Confirm save" dialog that appears for datasets with sync 
columns enabled
+  const confirmDialog = new ConfirmDialog(page);
+  await confirmDialog.clickOk();
+
+  // Wait for save to complete and verify success
+  expectStatusOneOf(await saveResponsePromise, [200, 201]);
+
+  // Modal should close
+  await editModal.waitForHidden();
+
+  // Verify success toast appears
+  const toast = new Toast(page);
+  await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
+
+  // Verify via API that name was saved
+  const updatedDatasetRes = await apiGetDataset(page, datasetId);
+  const updatedDataset = (await updatedDatasetRes.json()).result;
+  expect(updatedDataset.table_name).toBe(newName);
+});
+
+test('should bulk delete multiple datasets', async ({
+  page,
+  datasetListPage,
+  testAssets,
+}) => {
+  // Create 2 throwaway datasets for bulk delete
+  const [dataset1, dataset2] = await Promise.all([
+    createTestDataset(page, testAssets, test.info(), {
+      prefix: 'bulk_delete_1',
+    }),
+    createTestDataset(page, testAssets, test.info(), {
+      prefix: 'bulk_delete_2',
+    }),
+  ]);
+
+  // Refresh to see new datasets
+  await datasetListPage.goto();
+  await datasetListPage.waitForTableLoad();
+
+  // Verify both datasets are visible in list
+  await expect(datasetListPage.getDatasetRow(dataset1.name)).toBeVisible();
+  await expect(datasetListPage.getDatasetRow(dataset2.name)).toBeVisible();
+
+  // Enable bulk select mode
+  await datasetListPage.clickBulkSelectButton();
+
+  // Select both datasets
+  await datasetListPage.selectDatasetCheckbox(dataset1.name);
+  await datasetListPage.selectDatasetCheckbox(dataset2.name);
+
+  // Click bulk delete action
+  await datasetListPage.clickBulkAction('Delete');
+
+  // Delete confirmation modal should appear
+  const deleteModal = new DeleteConfirmationModal(page);
+  await deleteModal.waitForVisible();
+
+  // Type "DELETE" to confirm
+  await deleteModal.fillConfirmationInput('DELETE');
+
+  // Click the Delete button
+  await deleteModal.clickDelete();
+
+  // Modal should close
+  await deleteModal.waitForHidden();
+
+  // Verify success toast appears
+  const toast = new Toast(page);
+  await expect(toast.getSuccess()).toBeVisible();
+
+  // Verify both datasets are removed from list
+  await expect(datasetListPage.getDatasetRow(dataset1.name)).not.toBeVisible();
+  await expect(datasetListPage.getDatasetRow(dataset2.name)).not.toBeVisible();
+
+  // Verify via API that datasets no longer exist (404)
+  // Use polling since deletes may be async
+  await expect
+    .poll(async () => {
+      const response = await apiGetDataset(page, dataset1.id, {
+        failOnStatusCode: false,
+      });
+      return response.status();
+    })
+    .toBe(404);
+  await expect
+    .poll(async () => {
+      const response = await apiGetDataset(page, dataset2.id, {
+        failOnStatusCode: false,
+      });
+      return response.status();
+    })
+    .toBe(404);
+});
+
+// Import test uses a fixed dataset name from the zip fixture.
+// Uses test.describe only because Playwright's serial mode API requires it -
+// this prevents race conditions when parallel workers import the same fixture.
+// (Deviation from "avoid describe" guideline is necessary for functional 
reasons)
+test.describe('import dataset', () => {
+  test.describe.configure({ mode: 'serial' });
+  test('should import a dataset from a zip file', async ({
+    page,
+    datasetListPage,
+    testAssets,
+  }) => {
+    // Dataset name from fixture (test_netflix_1768502050965)
+    // Note: Fixture contains a Google Sheets dataset - test will skip if 
gsheets connector unavailable
+    const importedDatasetName = 'test_netflix_1768502050965';
+    const fixturePath = path.resolve(
+      __dirname,
+      '../../../fixtures/dataset_export.zip',
+    );
+
+    // Cleanup: Delete any existing dataset with the same name from previous 
runs
+    const existingDataset = await getDatasetByName(page, importedDatasetName);
+    if (existingDataset) {
+      await apiDeleteDataset(page, existingDataset.id, {
+        failOnStatusCode: false,
+      });
+    }
+
+    // Click the import button
+    await datasetListPage.clickImportButton();
+
+    // Wait for import modal to be ready
+    const importModal = new ImportDatasetModal(page);
+    await importModal.waitForReady();
+
+    // Upload the fixture zip file
+    await importModal.uploadFile(fixturePath);
+
+    // Set up response intercept to catch the import POST
+    // Use pathMatch to avoid false matches if URL lacks trailing slash
+    let importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, {
+      pathMatch: true,
+    });
+
+    // Click Import button
+    await importModal.clickImport();
+
+    // Wait for first import response
+    let importResponse = await importResponsePromise;
+
+    // Handle overwrite confirmation if dataset already exists
+    // First response may be 409/422 indicating overwrite is required - this 
is expected
+    const overwriteInput = importModal.getOverwriteInput();
+    await overwriteInput
+      .waitFor({ state: 'visible', timeout: 3000 })
+      .catch(error => {
+        // Only ignore TimeoutError (input not visible); re-throw other errors
+        if (!(error instanceof Error) || error.name !== 'TimeoutError') {
+          throw error;
+        }
+      });
+
+    if (await overwriteInput.isVisible()) {
+      // Set up new intercept for the actual import after overwrite 
confirmation
+      importResponsePromise = waitForPost(page, ENDPOINTS.DATASET_IMPORT, {
+        pathMatch: true,
+      });
+      await importModal.fillOverwriteConfirmation();
+      await importModal.clickImport();
+      // Wait for the second (final) import response
+      importResponse = await importResponsePromise;
+    }
+
+    // Check final import response for gsheets connector errors
+    if (!importResponse.ok()) {
+      const errorBody = await importResponse.json().catch(() => ({}));
+      const errorText = JSON.stringify(errorBody);
+      // Skip test if gsheets connector not installed
+      if (
+        errorText.includes('gsheets') ||
+        errorText.includes('No such DB engine') ||
+        errorText.includes('Could not load database driver')
+      ) {
+        await test.info().attach('skip-reason', {
+          body: `Import failed due to missing gsheets connector: ${errorText}`,
+          contentType: 'text/plain',
+        });
+        test.skip();
+        return;
+      }
+      // Re-throw other errors
+      throw new Error(`Import failed: ${errorText}`);
+    }
+
+    // Modal should close on success
+    await importModal.waitForHidden({ timeout: 30000 });
+
+    // Verify success toast appears
+    const toast = new Toast(page);
+    await expect(toast.getSuccess()).toBeVisible({ timeout: 10000 });
+
+    // Refresh the page to see the imported dataset
+    await datasetListPage.goto();
+    await datasetListPage.waitForTableLoad();
+
+    // Verify dataset appears in list
+    await expect(
+      datasetListPage.getDatasetRow(importedDatasetName),
+    ).toBeVisible();
+
+    // Get dataset ID for cleanup
+    const importedDataset = await getDatasetByName(page, importedDatasetName);
+    expect(importedDataset).not.toBeNull();
+    testAssets.trackDataset(importedDataset!.id);
+  });
+});
+
+test('should edit column date format via modal', async ({
+  page,
+  datasetListPage,
+  testAssets,
+}) => {
+  // Create virtual dataset with a date column for testing
+  // Using SQL to create a dataset with 'ds' column avoids duplication issues
+  const datasetName = 
`test_date_format_${Date.now()}_${test.info().parallelIndex}`;
+  const baseDataset = await getDatasetByName(page, 'members_channels_2');
+  expect(baseDataset, 'members_channels_2 dataset must exist').not.toBeNull();
+
+  const createResponse = await apiPostVirtualDataset(page, {
+    database: baseDataset!.database.id,
+    schema: baseDataset!.schema ?? '',
+    table_name: datasetName,
+    sql: "SELECT CAST('2024-01-01' AS DATE) as ds, 'test' as name",

Review Comment:
   The SQL query uses double quotes for the string literal 'test', which is 
non-standard SQL. While this may work with SQLite, it's better practice to use 
single quotes for string literals to ensure cross-database compatibility. 
Consider changing the SQL to: `"SELECT CAST('2024-01-01' AS DATE) as ds, 'test' 
as name"`



##########
superset-frontend/playwright/tests/experimental/dataset/dataset-list.spec.ts:
##########
@@ -188,13 +206,17 @@ test('should duplicate a dataset with new name', async ({ 
page }) => {
   // Click the Duplicate button
   await duplicateModal.clickDuplicate();
 
-  // Get the duplicate dataset ID from response
-  const duplicateResponse = await duplicateResponsePromise;
+  // Get the duplicate dataset ID from response (handle both response shapes)
+  const duplicateResponse = expectStatusOneOf(
+    await duplicateResponsePromise,
+    [200, 201],
+  );
   const duplicateData = await duplicateResponse.json();
-  const duplicateId = duplicateData.id;
+  const duplicateId = duplicateData.result?.id ?? duplicateData.id;
+  expect(duplicateId, 'Duplicate API should return dataset id').toBeTruthy();
 
-  // Track both original and duplicate for cleanup
-  testResources = { datasetIds: [originalId!, duplicateId] };
+  // Track duplicate for cleanup (original is example data, don't delete it)

Review Comment:
   The comment says "original is example data, don't delete it" but the 
original dataset was actually created via `createTestDataset()` on lines 
177-182, which already tracks it in testAssets for cleanup (line 64 of 
dataset-test-helpers.ts calls `testAssets.trackDataset(id)`). Therefore, the 
comment is misleading - the original will be cleaned up automatically. Consider 
removing or updating the comment to clarify that both datasets are tracked for 
cleanup.
   ```suggestion
     // Track the duplicated dataset for cleanup; the original created via 
createTestDataset is also tracked
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to