codeant-ai-for-open-source[bot] commented on code in PR #37902:
URL: https://github.com/apache/superset/pull/37902#discussion_r2796311542


##########
superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/TTestTable.tsx:
##########
@@ -32,279 +32,308 @@ export interface DataEntry {
 }
 
 interface TTestTableProps {
-  alpha: number;
+  alpha?: number;
   data: DataEntry[];
   groups: string[];
-  liftValPrec: number;
+  liftValPrec?: number;
   metric: string;
-  pValPrec: number;
+  pValPrec?: number;
 }
 
-interface TTestTableState {
-  control: number;
-  liftValues: (string | number)[];
-  pValues: (string | number)[];
-}
+function TTestTable({
+  alpha = 0.05,
+  data,
+  groups,
+  liftValPrec = 4,
+  metric,
+  pValPrec = 6,
+}: TTestTableProps) {
+  const [control, setControl] = useState(0);
+  const [liftValues, setLiftValues] = useState<(string | number)[]>([]);
+  const [pValues, setPValues] = useState<(string | number)[]>([]);
 
-const defaultProps = {
-  alpha: 0.05,
-  liftValPrec: 4,
-  pValPrec: 6,
-};
+  const computeLift = useCallback(
+    (values: DataPointValue[], controlValues: DataPointValue[]): string => {
+      // Compute the lift value between two time series
+      let sumValues = 0;
+      let sumControl = 0;
+      values.forEach((value, i) => {
+        sumValues += value.y;
+        sumControl += controlValues[i].y;
+      });
 
-class TTestTable extends Component<TTestTableProps, TTestTableState> {
-  static defaultProps = defaultProps;
+      return (((sumValues - sumControl) / sumControl) * 100).toFixed(
+        liftValPrec,
+      );
+    },
+    [liftValPrec],
+  );
 
-  constructor(props: TTestTableProps) {
-    super(props);
-    this.state = {
-      control: 0,
-      liftValues: [],
-      pValues: [],
-    };
-  }
+  const computePValue = useCallback(
+    (
+      values: DataPointValue[],
+      controlValues: DataPointValue[],
+    ): string | number => {
+      // Compute the p-value from Student's t-test
+      // between two time series
+      let diffSum = 0;
+      let diffSqSum = 0;
+      let finiteCount = 0;
+      values.forEach((value, i) => {
+        const diff = controlValues[i].y - value.y;
+        /* eslint-disable-next-line */
+        if (isFinite(diff)) {
+          finiteCount += 1;
+          diffSum += diff;
+          diffSqSum += diff * diff;
+        }
+      });
+      const tvalue = -Math.abs(
+        diffSum *
+          Math.sqrt(
+            (finiteCount - 1) / (finiteCount * diffSqSum - diffSum * diffSum),
+          ),
+      );
+      try {
+        return (2 * new dist.Studentt(finiteCount - 1).cdf(tvalue)).toFixed(
+          pValPrec,
+        ); // two-sided test
+      } catch (error) {
+        return NaN;
+      }
+    },
+    [pValPrec],
+  );
 
-  componentDidMount() {
-    const { control } = this.state;
-    this.computeTTest(control); // initially populate table
-  }
+  const computeTTest = useCallback(
+    (controlIndex: number) => {
+      // Compute lift and p-values for each row
+      // against the selected control
+      const newPValues: (string | number)[] = [];
+      const newLiftValues: (string | number)[] = [];
+      if (!data) {
+        return;
+      }
+      for (let i = 0; i < data.length; i += 1) {
+        if (i === controlIndex) {
+          newPValues.push('control');
+          newLiftValues.push('control');
+        } else {
+          newPValues.push(
+            computePValue(data[i].values, data[controlIndex].values),
+          );
+          newLiftValues.push(
+            computeLift(data[i].values, data[controlIndex].values),
+          );
+        }
+      }
+      setControl(controlIndex);
+      setLiftValues(newLiftValues);
+      setPValues(newPValues);
+    },
+    [data, computeLift, computePValue],
+  );
 
-  getLiftStatus(row: number): string {
-    const { control, liftValues } = this.state;
-    // Get a css class name for coloring
-    if (row === control) {
-      return 'control';
-    }
-    const liftVal = liftValues[row];
-    if (Number.isNaN(liftVal) || !Number.isFinite(liftVal)) {
-      return 'invalid'; // infinite or NaN values
+  // Recompute table when data or control row changes, keeping control index 
in range
+  useEffect(() => {
+    if (!data || data.length === 0) {
+      setControl(0);
+      setLiftValues([]);
+      setPValues([]);
+      return;
     }
 
-    return Number(liftVal) >= 0 ? 'true' : 'false'; // green on true, red on 
false
-  }
-
-  getPValueStatus(row: number): string {
-    const { control, pValues } = this.state;
-    if (row === control) {
-      return 'control';
-    }
-    const pVal = pValues[row];
-    if (Number.isNaN(pVal) || !Number.isFinite(pVal)) {
-      return 'invalid';
+    const safeControlIndex = Math.min(control, data.length - 1);
+    if (safeControlIndex !== control) {
+      setControl(safeControlIndex);
+      computeTTest(safeControlIndex);
+    } else {
+      computeTTest(control);
     }
+  }, [computeTTest, control, data]);
 
-    return ''; // p-values won't normally be colored
-  }
+  const getLiftStatus = useCallback(
+    (row: number): string => {
+      // Get a css class name for coloring
+      if (row === control) {
+        return 'control';
+      }
+      const liftVal = liftValues[row];
+      if (Number.isNaN(liftVal) || !Number.isFinite(liftVal)) {
+        return 'invalid'; // infinite or NaN values
+      }
 
-  getSignificance(row: number): string | boolean {
-    const { control, pValues } = this.state;
-    const { alpha } = this.props;
-    // Color significant as green, else red
-    if (row === control) {
-      return 'control';
-    }
+      return Number(liftVal) >= 0 ? 'true' : 'false'; // green on true, red on 
false
+    },
+    [control, liftValues],
+  );
 
-    // p-values significant below set threshold
-    return Number(pValues[row]) <= alpha;
-  }
+  const getPValueStatus = useCallback(
+    (row: number): string => {
+      if (row === control) {
+        return 'control';
+      }
+      const pVal = pValues[row];
+      if (Number.isNaN(pVal) || !Number.isFinite(pVal)) {

Review Comment:
   **Suggestion:** The `getPValueStatus` function similarly checks `pVal` with 
`Number.isNaN` and `Number.isFinite` on a `string | number` value, but p-values 
are stored as strings, so `Number.isFinite` will always return false for 
non-control rows and all p-value cells are marked "invalid"; convert to a 
number before these checks so valid p-values are handled correctly. [logic 
error]
   
   <details>
   <summary><b>Severity Level:</b> Major ⚠️</summary>
   
   ```mdx
   - ⚠️ Paired t-test table shows all p-values as invalid.
   - ⚠️ Users may distrust valid statistical significance results.
   - ⚠️ Visual styling contradicts underlying numeric p-value data.
   - ⚠️ Limited to legacy paired t-test chart visualization.
   ```
   </details>
   
   ```suggestion
         const numericPVal = Number(pVal);
         if (Number.isNaN(numericPVal) || !Number.isFinite(numericPVal)) {
   ```
   <details>
   <summary><b>Steps of Reproduction ✅ </b></summary>
   
   ```mdx
   1. In the Superset UI, create or open a "Paired t-test table" chart, which 
uses
   `PairedTTest`
   
(`superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/PairedTTest.tsx:109-139`)
   and renders `TTestTable` for each metric at `PairedTTest.tsx:123-132`.
   
   2. Provide experiment data with multiple groups so `data[metric]` contains 
several
   `DataEntry` items; `TTestTable` receives these via props at 
`TTestTable.tsx:43-50` and
   initializes `pValues` state (`useState<(string | number)[]>` at line 53).
   
   3. On mount/update, `computeTTest` at `TTestTable.tsx:108-135` fills 
`pValues` with the
   results of `computePValue` at `TTestTable.tsx:72-106`, which returns 
p-values as strings
   using `.toFixed(pValPrec)` for valid cases and `NaN` (number) on error.
   
   4. While building table rows at `TTestTable.tsx:234-247`, each non-control 
row uses
   `getPValueStatus` at `TTestTable.tsx:171-184` to compute the `className` for 
its p-value
   cell; since `pVal` is usually a string from `.toFixed`, 
`Number.isFinite(pVal)` is always
   `false`, making `Number.isNaN(pVal) || !Number.isFinite(pVal)` always `true` 
for
   non-control rows and causing all p-value cells to be styled as `"invalid"` 
(yellow per
   `.reactable-data tr .invalid` in `PairedTTest.tsx:79-80`) instead of having 
no special
   color as intended.
   ```
   </details>
   <details>
   <summary><b>Prompt for AI Agent 🤖 </b></summary>
   
   ```mdx
   This is a comment left during a code review.
   
   **Path:** 
superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/TTestTable.tsx
   **Line:** 177:177
   **Comment:**
        *Logic Error: The `getPValueStatus` function similarly checks `pVal` 
with `Number.isNaN` and `Number.isFinite` on a `string | number` value, but 
p-values are stored as strings, so `Number.isFinite` will always return false 
for non-control rows and all p-value cells are marked "invalid"; convert to a 
number before these checks so valid p-values are handled correctly.
   
   Validate the correctness of the flagged issue. If correct, How can I resolve 
this? If you propose a fix, implement it and please make it concise.
   ```
   </details>
   <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=04f791c0266c788c8296e9672ac3118ce3d059317d7a64a772d63175c84a731a&reaction=like'>👍</a>
 | <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=04f791c0266c788c8296e9672ac3118ce3d059317d7a64a772d63175c84a731a&reaction=dislike'>👎</a>



##########
superset-frontend/src/dashboard/components/Dashboard.tsx:
##########
@@ -321,24 +217,144 @@ class Dashboard extends PureComponent<DashboardProps> {
     });
 
     // remove dup in affectedChartIds
-    this.refreshCharts([...new Set(affectedChartIds)]);
-    this.appliedFilters = activeFilters;
-    this.appliedOwnDataCharts = ownDataCharts;
-  }
+    refreshCharts([...new Set(affectedChartIds)]);
+    appliedFiltersRef.current = activeFilters;
+    appliedOwnDataChartsRef.current = ownDataCharts;
+  }, [activeFilters, ownDataCharts, slices, refreshCharts]);
 
-  refreshCharts(ids: (string | number)[]): void {
-    ids.forEach(id => {
-      this.props.actions.triggerQuery(true, id);
-    });
-  }
+  const applyCharts = useCallback((): void => {
+    if (!chartConfiguration) {
+      // For a first loading we need to wait for cross filters charts data 
loaded to get all active filters
+      // for correct comparing of filters to avoid unnecessary requests
+      return;
+    }
+
+    if (
+      !editMode &&
+      (!areObjectsEqual(appliedOwnDataChartsRef.current, ownDataCharts, {
+        ignoreUndefined: true,
+      }) ||
+        !areObjectsEqual(appliedFiltersRef.current, activeFilters, {
+          ignoreUndefined: true,
+        }))
+    ) {
+      applyFilters();
+    }
 
-  render(): ReactNode {
-    const context = this.context as PluginContextType;
-    if (context.loading) {
-      return <Loading />;
+    if (hasUnsavedChanges) {
+      onBeforeUnload(true);
+    } else {
+      onBeforeUnload(false);
     }
-    return this.props.children;
+  }, [
+    chartConfiguration,
+    editMode,
+    ownDataCharts,
+    activeFilters,
+    hasUnsavedChanges,
+    applyFilters,
+  ]);
+
+  const onVisibilityChange = useCallback((): void => {
+    if (document.visibilityState === 'hidden') {
+      // from visible to hidden
+      visibilityEventDataRef.current = {
+        start_offset: Logger.getTimestamp(),
+        ts: new Date().getTime(),
+      };
+    } else if (document.visibilityState === 'visible') {
+      // from hidden to visible
+      const logStart = visibilityEventDataRef.current.start_offset;
+      actions.logEvent(LOG_ACTIONS_HIDE_BROWSER_TAB, {
+        ...visibilityEventDataRef.current,
+        duration: Logger.getTimestamp() - logStart,
+      });
+    }
+  }, [actions]);
+
+  // componentDidMount equivalent
+  useEffect(() => {
+    const bootstrapData = getBootstrapData();
+    const eventData: Record<string, unknown> = {
+      is_soft_navigation: Logger.timeOriginOffset > 0,
+      is_edit_mode: editMode,
+      mount_duration: Logger.getTimestamp(),
+      is_empty: isDashboardEmpty(layout),
+      is_published: isPublished,
+      bootstrap_data_length: JSON.stringify(bootstrapData).length,
+    };
+    const directLinkComponentId = getLocationHash();
+    if (directLinkComponentId) {
+      eventData.target_id = directLinkComponentId;
+    }
+    actions.logEvent(LOG_ACTIONS_MOUNT_DASHBOARD, eventData);
+
+    // Handle browser tab visibility change
+    if (document.visibilityState === 'hidden') {
+      visibilityEventDataRef.current = {
+        start_offset: Logger.getTimestamp(),
+        ts: new Date().getTime(),
+      };
+    }
+    window.addEventListener('visibilitychange', onVisibilityChange);
+
+    // componentWillUnmount equivalent
+    return () => {
+      window.removeEventListener('visibilitychange', onVisibilityChange);

Review Comment:
   **Suggestion:** The `beforeunload` listener used to warn about unsaved 
changes is added via `onBeforeUnload(true)` but never explicitly removed when 
the Dashboard component unmounts; if the component is unmounted while there are 
still unsaved changes, the global listener remains registered and will continue 
to trigger warnings even after navigating away from the dashboard, leading to 
incorrect prompts and a leak of a global event handler. The cleanup function of 
the mount `useEffect` should always call `onBeforeUnload(false)` to deregister 
the listener on unmount. [logic error]
   
   <details>
   <summary><b>Severity Level:</b> Major ⚠️</summary>
   
   ```mdx
   - ⚠️ Stale unsaved changes prompt after leaving dashboard page.
   - ⚠️ Global beforeunload handler persists beyond dashboard lifecycle.
   - ⚠️ User confusion when closing tab on unrelated application pages.
   ```
   </details>
   
   ```suggestion
         // Ensure any beforeunload handler is removed when the dashboard 
unmounts
         onBeforeUnload(false);
   ```
   <details>
   <summary><b>Steps of Reproduction ✅ </b></summary>
   
   ```mdx
   1. Load any dashboard route that renders `DashboardPage`
   (`superset-frontend/src/dashboard/containers/DashboardPage.tsx:113-317`), 
which in turn
   renders the connected `Dashboard` component from
   `superset-frontend/src/dashboard/containers/Dashboard.ts:22` and
   `superset-frontend/src/dashboard/components/Dashboard.tsx:108-124`.
   
   2. Perform an edit that marks the dashboard as dirty so 
`dashboardState.hasUnsavedChanges`
   becomes `true` (this flows via `mapStateToProps` in
   `superset-frontend/src/dashboard/containers/Dashboard.ts:34-56` as 
`hasUnsavedChanges`
   into `DashboardProps` at
   `superset-frontend/src/dashboard/components/Dashboard.tsx:70-86`). With
   `hasUnsavedChanges` true, the `applyCharts` callback 
(`Dashboard.tsx:225-256`) calls
   `onBeforeUnload(true)` (`Dashboard.tsx:244-247`), which registers
   `window.addEventListener('beforeunload', unload)` via `onBeforeUnload` at
   `Dashboard.tsx:93-99`.
   
   3. Navigate away from the dashboard to another React route (for example, to 
another page
   component using React Router) so that the `Dashboard` component unmounts. On 
unmount, the
   effect cleanup in `Dashboard.tsx:275-309` runs and its returned function at
   `Dashboard.tsx:301-305` executes, which only removes the `visibilitychange` 
listener and
   clears chart/data mask state, but never calls `onBeforeUnload(false)`, 
leaving the global
   `beforeunload` listener (`unload` at `Dashboard.tsx:101-105`) registered on 
`window`.
   
   4. While now on a different page (e.g., chart list or SQL Lab) in the same 
browser tab,
   attempt to close the tab or reload the page. The still-registered 
`beforeunload` listener
   from `Dashboard.tsx:93-105` fires and sets `event.returnValue` in `unload`, 
causing the
   browser to show a `"You have unsaved changes."` prompt even though the 
dashboard component
   has been unmounted and the user is no longer on the dashboard, demonstrating 
the stale
   global handler.
   ```
   </details>
   <details>
   <summary><b>Prompt for AI Agent 🤖 </b></summary>
   
   ```mdx
   This is a comment left during a code review.
   
   **Path:** superset-frontend/src/dashboard/components/Dashboard.tsx
   **Line:** 303:303
   **Comment:**
        *Logic Error: The `beforeunload` listener used to warn about unsaved 
changes is added via `onBeforeUnload(true)` but never explicitly removed when 
the Dashboard component unmounts; if the component is unmounted while there are 
still unsaved changes, the global listener remains registered and will continue 
to trigger warnings even after navigating away from the dashboard, leading to 
incorrect prompts and a leak of a global event handler. The cleanup function of 
the mount `useEffect` should always call `onBeforeUnload(false)` to deregister 
the listener on unmount.
   
   Validate the correctness of the flagged issue. If correct, How can I resolve 
this? If you propose a fix, implement it and please make it concise.
   ```
   </details>
   <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=de318d7ba46d6f2c2dca2dd20ef1f068668fa6b7f46751a982e6dc2710551626&reaction=like'>👍</a>
 | <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=de318d7ba46d6f2c2dca2dd20ef1f068668fa6b7f46751a982e6dc2710551626&reaction=dislike'>👎</a>



##########
superset-frontend/src/components/Chart/Chart.tsx:
##########
@@ -176,243 +163,310 @@ const MessageSpan = styled.span`
   color: ${({ theme }) => theme.colorText};
 `;
 
-class Chart extends PureComponent<ChartProps, {}> {
-  static defaultProps = defaultProps;
-
-  renderStartTime: any;
-
-  constructor(props: ChartProps) {
-    super(props);
-    this.handleRenderContainerFailure =
-      this.handleRenderContainerFailure.bind(this);
-  }
-
-  componentDidMount() {
-    if (this.props.triggerQuery) {
-      this.runQuery();
-    }
-  }
-
-  componentDidUpdate() {
-    if (this.props.triggerQuery) {
-      this.runQuery();
-    }
-  }
-
-  shouldRenderChart() {
-    return (
-      this.props.isInView ||
+function Chart({
+  addFilter = () => BLANK,
+  onFilterMenuOpen = () => BLANK,
+  onFilterMenuClose = () => BLANK,
+  initialValues = BLANK,
+  setControlValue = () => BLANK,
+  triggerRender = false,
+  dashboardId,
+  chartStackTrace,
+  force = false,
+  isInView = true,
+  ...restProps
+}: ChartProps): JSX.Element {
+  const {
+    actions,
+    chartId,
+    datasource,
+    formData,
+    timeout,
+    ownState,
+    chartAlert,
+    chartStatus,
+    queriesResponse = [],
+    errorMessage,
+    chartIsStale,
+    width,
+    height,
+    datasetsStatus,
+    onQuery,
+    annotationData,
+    labelColors: _labelColors,
+    sharedLabelColors: _sharedLabelColors,
+    vizType,
+    isFiltersInitialized: _isFiltersInitialized,
+    latestQueryFormData,
+    triggerQuery,
+    postTransformProps,
+    emitCrossFilters,
+    onChartStateChange,
+  } = restProps;
+
+  const renderStartTimeRef = useRef<number>(Logger.getTimestamp());
+

Review Comment:
   **Suggestion:** The `renderStartTimeRef` used for logging render durations 
is initialized only once on mount and never updated on subsequent renders, so 
any later chart rendering failures will log a `start_offset` based on the 
initial mount time instead of the current render, producing misleading render 
duration metrics. Updating the ref on every render restores the original class 
component behavior where `renderStartTime` was set at the start of each render. 
[logic error]
   
   <details>
   <summary><b>Severity Level:</b> Major ⚠️</summary>
   
   ```mdx
   - ⚠️ Logged render durations for chart errors are overstated.
   - ⚠️ Error timing metrics in analytics become less trustworthy.
   - ⚠️ Harder to diagnose slow or failing chart renders.
   ```
   </details>
   
   ```suggestion
     // Match class component behavior: refresh start time on every render
     renderStartTimeRef.current = Logger.getTimestamp();
   ```
   <details>
   <summary><b>Steps of Reproduction ✅ </b></summary>
   
   ```mdx
   1. Open any chart that uses the shared Chart component, for example the 
CartoDiagram
   plugin wrapper at
   
`superset-frontend/plugins/plugin-chart-cartodiagram/src/components/ChartWrapper.tsx:58`,
   which renders `<Chart {...chartConfig.properties} height={height} 
width={width} />`.
   
   2. Cause a React render error in the chart (e.g., introduce a bug in a 
visualization so
   that `ChartRenderer` throws during render), which is caught by the 
`ErrorBoundary`
   wrapping the chart container in
   `superset-frontend/src/components/Chart/Chart.tsx:458-469`.
   
   3. When the error occurs, `ErrorBoundary` invokes 
`handleRenderContainerFailure` defined
   in `Chart.tsx:244-261`, which logs a timing event via
   `actions.logEvent(LOG_ACTIONS_RENDER_CHART, { start_offset, duration, ... 
})` using
   `start_offset: renderStartTimeRef.current` and `duration: 
Logger.getTimestamp() -
   renderStartTimeRef.current`.
   
   4. Because `renderStartTimeRef` is created once at mount (`Chart.tsx:207`) 
and never
   updated on subsequent renders, `renderStartTimeRef.current` still holds the 
timestamp of
   the initial mount rather than the current render attempt; therefore, the 
logged
   `start_offset` and `duration` for this failure reflect the time since the 
component was
   first mounted (potentially minutes or hours) instead of the time for the 
failing render,
   leading to misleading render-duration metrics compared with the original 
class component,
   which set `this.renderStartTime = Logger.getTimestamp()` at the beginning of 
each render.
   ```
   </details>
   <details>
   <summary><b>Prompt for AI Agent 🤖 </b></summary>
   
   ```mdx
   This is a comment left during a code review.
   
   **Path:** superset-frontend/src/components/Chart/Chart.tsx
   **Line:** 208:208
   **Comment:**
        *Logic Error: The `renderStartTimeRef` used for logging render 
durations is initialized only once on mount and never updated on subsequent 
renders, so any later chart rendering failures will log a `start_offset` based 
on the initial mount time instead of the current render, producing misleading 
render duration metrics. Updating the ref on every render restores the original 
class component behavior where `renderStartTime` was set at the start of each 
render.
   
   Validate the correctness of the flagged issue. If correct, How can I resolve 
this? If you propose a fix, implement it and please make it concise.
   ```
   </details>
   <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=f717770139170d6eed7a5c1fddbd250c60f6818e2a10eb85e7dbd53b50c38010&reaction=like'>👍</a>
 | <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=f717770139170d6eed7a5c1fddbd250c60f6818e2a10eb85e7dbd53b50c38010&reaction=dislike'>👎</a>



##########
superset-frontend/src/dashboard/components/gridComponents/Markdown/Markdown.tsx:
##########
@@ -335,126 +333,118 @@ class Markdown extends PureComponent<MarkdownProps, 
MarkdownState> {
         onReady={(handle: EditorInstance) => {
           // The handle provides access to the underlying editor for resize
           if (handle && typeof handle.focus === 'function') {
-            this.setEditor(handle);
+            setEditor(handle);
           }
         }}
         data-test="editor"
       />
-    );
-  }
-
-  renderPreviewMode(): JSX.Element {
-    const { hasError } = this.state;
-
-    return (
-      <SafeMarkdown
-        source={
-          hasError
-            ? MARKDOWN_ERROR_MESSAGE
-            : this.state.markdownSource || MARKDOWN_PLACE_HOLDER
-        }
-        htmlSanitization={this.props.htmlSanitization}
-        htmlSchemaOverrides={this.props.htmlSchemaOverrides}
-      />
-    );
-  }
-
-  render() {
-    const { isFocused, editorMode } = this.state;
-
-    const {
-      component,
-      parentComponent,
-      index,
-      depth,
-      availableColumnCount,
-      columnWidth,
-      onResize,
-      onResizeStop,
-      handleComponentDrop,
-      editMode,
-    } = this.props;
-
-    // inherit the size of parent columns
-    const widthMultiple =
-      parentComponent.type === COLUMN_TYPE
-        ? parentComponent.meta.width || GRID_MIN_COLUMN_COUNT
-        : component.meta.width || GRID_MIN_COLUMN_COUNT;
-
-    const isEditing = editorMode === 'edit';
-
-    return (
-      <Draggable
-        component={component}
-        parentComponent={parentComponent}
-        orientation={parentComponent.type === ROW_TYPE ? 'column' : 'row'}
-        index={index}
-        depth={depth}
-        onDrop={handleComponentDrop}
-        disableDragDrop={isFocused}
-        editMode={editMode}
-      >
-        {({ dragSourceRef }: DragChildProps) => (
-          <WithPopoverMenu
-            onChangeFocus={this.handleChangeFocus}
-            shouldFocus={this.shouldFocusMarkdown}
-            menuItems={[
-              <MarkdownModeDropdown
-                key={`${component.id}-mode`}
-                id={`${component.id}-mode`}
-                value={this.state.editorMode}
-                onChange={this.handleChangeEditorMode}
-              />,
-            ]}
-            editMode={editMode}
+    ),
+    [id, markdownSource, handleMarkdownChange, setEditor],
+  );
+
+  const renderPreviewMode = useMemo(
+    () => (
+      <ErrorBoundary onError={handleRenderError} showMessage={false}>

Review Comment:
   **Suggestion:** Wrapping `SafeMarkdown` in `ErrorBoundary` with 
`showMessage={false}` means that once an error is thrown, the boundary remains 
in an error state and always renders `null`, so the fallback markdown error 
message controlled by `hasError` is never actually displayed and the component 
area stays blank after the first render failure. [logic error]
   
   <details>
   <summary><b>Severity Level:</b> Major ⚠️</summary>
   
   ```mdx
   - ❌ Dashboard markdown widget can become permanently blank after error.
   - ⚠️ Inline markdown error message never appears, only toast shows.
   ```
   </details>
   
   ```suggestion
         <ErrorBoundary
           key={hasError ? 'markdown-error' : 'markdown-ok'}
           onError={handleRenderError}
           showMessage={false}
         >
   ```
   <details>
   <summary><b>Steps of Reproduction ✅ </b></summary>
   
   ```mdx
   1. Open a dashboard that includes a markdown widget, which is rendered by 
`Markdown` in
   
`superset-frontend/src/dashboard/components/gridComponents/Markdown/Markdown.tsx:133-448`
   via the dashboard layout system.
   
   2. Ensure the markdown is in preview mode (the default) so that 
`renderPreviewMode` is
   used; this is defined at `Markdown.tsx:345-366` and wraps `<SafeMarkdown>` in
   `<ErrorBoundary onError={handleRenderError} showMessage={false}>`.
   
   3. Trigger a runtime render error inside `SafeMarkdown` (from
   `@superset-ui/core/components`) — for example, by a malformed input or 
internal bug —
   causing React to throw while rendering the `SafeMarkdown` child. 
`ErrorBoundary`
   (implemented in 
`superset-frontend/src/components/ErrorBoundary/index.tsx:24-61`) catches
   this in `componentDidCatch`, calls its `onError` prop, and sets its internal 
`error`
   state.
   
   4. `ErrorBoundary` calls `handleRenderError` in `Markdown.tsx:302-313`, 
which sets
   `hasError` to `true` and, if `editorMode === 'preview'`, invokes 
`addDangerToast` with the
   user-facing error message. However, because `ErrorBoundary` now has 
`this.state.error` set
   and `showMessage={false}`, its `render()` (at `index.tsx:43-60`) always 
returns `null` and
   never renders its `children`, so the `SafeMarkdown` fallback using
   `MARKDOWN_ERROR_MESSAGE` at `Markdown.tsx:349-352` is never shown. The 
markdown area stays
   permanently blank until the whole widget is unmounted/remounted.
   ```
   </details>
   <details>
   <summary><b>Prompt for AI Agent 🤖 </b></summary>
   
   ```mdx
   This is a comment left during a code review.
   
   **Path:** 
superset-frontend/src/dashboard/components/gridComponents/Markdown/Markdown.tsx
   **Line:** 347:347
   **Comment:**
        *Logic Error: Wrapping `SafeMarkdown` in `ErrorBoundary` with 
`showMessage={false}` means that once an error is thrown, the boundary remains 
in an error state and always renders `null`, so the fallback markdown error 
message controlled by `hasError` is never actually displayed and the component 
area stays blank after the first render failure.
   
   Validate the correctness of the flagged issue. If correct, How can I resolve 
this? If you propose a fix, implement it and please make it concise.
   ```
   </details>
   <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=25dbdd04f2f470ebffc0bd138d35b2857966a31728d9065ae1c5a9bf8d4d20eb&reaction=like'>👍</a>
 | <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=25dbdd04f2f470ebffc0bd138d35b2857966a31728d9065ae1c5a9bf8d4d20eb&reaction=dislike'>👎</a>



##########
superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/TTestTable.tsx:
##########
@@ -32,279 +32,308 @@ export interface DataEntry {
 }
 
 interface TTestTableProps {
-  alpha: number;
+  alpha?: number;
   data: DataEntry[];
   groups: string[];
-  liftValPrec: number;
+  liftValPrec?: number;
   metric: string;
-  pValPrec: number;
+  pValPrec?: number;
 }
 
-interface TTestTableState {
-  control: number;
-  liftValues: (string | number)[];
-  pValues: (string | number)[];
-}
+function TTestTable({
+  alpha = 0.05,
+  data,
+  groups,
+  liftValPrec = 4,
+  metric,
+  pValPrec = 6,
+}: TTestTableProps) {
+  const [control, setControl] = useState(0);
+  const [liftValues, setLiftValues] = useState<(string | number)[]>([]);
+  const [pValues, setPValues] = useState<(string | number)[]>([]);
 
-const defaultProps = {
-  alpha: 0.05,
-  liftValPrec: 4,
-  pValPrec: 6,
-};
+  const computeLift = useCallback(
+    (values: DataPointValue[], controlValues: DataPointValue[]): string => {
+      // Compute the lift value between two time series
+      let sumValues = 0;
+      let sumControl = 0;
+      values.forEach((value, i) => {
+        sumValues += value.y;
+        sumControl += controlValues[i].y;
+      });
 
-class TTestTable extends Component<TTestTableProps, TTestTableState> {
-  static defaultProps = defaultProps;
+      return (((sumValues - sumControl) / sumControl) * 100).toFixed(
+        liftValPrec,
+      );
+    },
+    [liftValPrec],
+  );
 
-  constructor(props: TTestTableProps) {
-    super(props);
-    this.state = {
-      control: 0,
-      liftValues: [],
-      pValues: [],
-    };
-  }
+  const computePValue = useCallback(
+    (
+      values: DataPointValue[],
+      controlValues: DataPointValue[],
+    ): string | number => {
+      // Compute the p-value from Student's t-test
+      // between two time series
+      let diffSum = 0;
+      let diffSqSum = 0;
+      let finiteCount = 0;
+      values.forEach((value, i) => {
+        const diff = controlValues[i].y - value.y;
+        /* eslint-disable-next-line */
+        if (isFinite(diff)) {
+          finiteCount += 1;
+          diffSum += diff;
+          diffSqSum += diff * diff;
+        }
+      });
+      const tvalue = -Math.abs(
+        diffSum *
+          Math.sqrt(
+            (finiteCount - 1) / (finiteCount * diffSqSum - diffSum * diffSum),
+          ),
+      );
+      try {
+        return (2 * new dist.Studentt(finiteCount - 1).cdf(tvalue)).toFixed(
+          pValPrec,
+        ); // two-sided test
+      } catch (error) {
+        return NaN;
+      }
+    },
+    [pValPrec],
+  );
 
-  componentDidMount() {
-    const { control } = this.state;
-    this.computeTTest(control); // initially populate table
-  }
+  const computeTTest = useCallback(
+    (controlIndex: number) => {
+      // Compute lift and p-values for each row
+      // against the selected control
+      const newPValues: (string | number)[] = [];
+      const newLiftValues: (string | number)[] = [];
+      if (!data) {
+        return;
+      }
+      for (let i = 0; i < data.length; i += 1) {
+        if (i === controlIndex) {
+          newPValues.push('control');
+          newLiftValues.push('control');
+        } else {
+          newPValues.push(
+            computePValue(data[i].values, data[controlIndex].values),
+          );
+          newLiftValues.push(
+            computeLift(data[i].values, data[controlIndex].values),
+          );
+        }
+      }
+      setControl(controlIndex);
+      setLiftValues(newLiftValues);
+      setPValues(newPValues);
+    },
+    [data, computeLift, computePValue],
+  );
 
-  getLiftStatus(row: number): string {
-    const { control, liftValues } = this.state;
-    // Get a css class name for coloring
-    if (row === control) {
-      return 'control';
-    }
-    const liftVal = liftValues[row];
-    if (Number.isNaN(liftVal) || !Number.isFinite(liftVal)) {
-      return 'invalid'; // infinite or NaN values
+  // Recompute table when data or control row changes, keeping control index 
in range
+  useEffect(() => {
+    if (!data || data.length === 0) {
+      setControl(0);
+      setLiftValues([]);
+      setPValues([]);
+      return;
     }
 
-    return Number(liftVal) >= 0 ? 'true' : 'false'; // green on true, red on 
false
-  }
-
-  getPValueStatus(row: number): string {
-    const { control, pValues } = this.state;
-    if (row === control) {
-      return 'control';
-    }
-    const pVal = pValues[row];
-    if (Number.isNaN(pVal) || !Number.isFinite(pVal)) {
-      return 'invalid';
+    const safeControlIndex = Math.min(control, data.length - 1);
+    if (safeControlIndex !== control) {
+      setControl(safeControlIndex);
+      computeTTest(safeControlIndex);
+    } else {
+      computeTTest(control);
     }
+  }, [computeTTest, control, data]);
 
-    return ''; // p-values won't normally be colored
-  }
+  const getLiftStatus = useCallback(
+    (row: number): string => {
+      // Get a css class name for coloring
+      if (row === control) {
+        return 'control';
+      }
+      const liftVal = liftValues[row];
+      if (Number.isNaN(liftVal) || !Number.isFinite(liftVal)) {
+        return 'invalid'; // infinite or NaN values
+      }
 
-  getSignificance(row: number): string | boolean {
-    const { control, pValues } = this.state;
-    const { alpha } = this.props;
-    // Color significant as green, else red
-    if (row === control) {
-      return 'control';
-    }
+      return Number(liftVal) >= 0 ? 'true' : 'false'; // green on true, red on 
false

Review Comment:
   **Suggestion:** The `getLiftStatus` function checks `liftVal` with 
`Number.isNaN` and `Number.isFinite` directly on a `string | number` value, but 
lift values are stored as strings (from `toFixed`), so `Number.isFinite` will 
always return false for non-control rows and every non-control row will be 
marked as "invalid" instead of "true"/"false`; convert to a number before these 
checks so valid values are classified correctly. [logic error]
   
   <details>
   <summary><b>Severity Level:</b> Major ⚠️</summary>
   
   ```mdx
   - ⚠️ Paired t-test table miscolors all non-control lift cells.
   - ⚠️ Users see warning color instead of positive/negative lift.
   - ⚠️ Visual interpretation of experiment results becomes misleading.
   - ⚠️ Only affects legacy paired t-test chart visualization.
   ```
   </details>
   
   ```suggestion
         const numericLift = Number(liftVal);
         if (Number.isNaN(numericLift) || !Number.isFinite(numericLift)) {
           return 'invalid'; // infinite or NaN values
         }
   
         return numericLift >= 0 ? 'true' : 'false'; // green on true, red on 
false
   ```
   <details>
   <summary><b>Steps of Reproduction ✅ </b></summary>
   
   ```mdx
   1. In the Superset UI, create or open a chart using the "Paired t-test table"
   visualization, which is backed by `PairedTTest` at
   
`superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/PairedTTest.tsx:109-139`
   and imports `TTestTable` from `./TTestTable` at line 21.
   
   2. Ensure the chart has data for at least two groups so that `PairedTTest` 
passes a
   `data[metric]` array with multiple `DataEntry` items into `<TTestTable ... 
/>` at
   `PairedTTest.tsx:123-132`, triggering `TTestTable`'s render path in
   `TTestTable.tsx:43-133`.
   
   3. When `TTestTable` mounts, `computeTTest` at `TTestTable.tsx:108-135` 
computes lift
   values using `computeLift` at `TTestTable.tsx:55-70`, which returns `string` 
values via
   `.toFixed(liftValPrec)`; these strings are stored in `liftValues` state 
(`useState<(string
   | number)[]>` at line 52).
   
   4. During row rendering at `TTestTable.tsx:234-255`, each non-control row 
calls
   `getLiftStatus` at `TTestTable.tsx:155-169`; since `liftVal` is a string,
   `Number.isFinite(liftVal)` is always `false` (per JavaScript semantics for
   `Number.isFinite` on non-number values), so the condition 
`Number.isNaN(liftVal) ||
   !Number.isFinite(liftVal)` is always `true` for non-control rows, causing 
every
   non-control lift cell to use the `"invalid"` CSS class, which `PairedTTest` 
styles in
   yellow via `.reactable-data tr .invalid` at `PairedTTest.tsx:79-80`, instead 
of
   `"true"`/`"false"` classes mapped to green/red at `PairedTTest.tsx:67-73`.
   ```
   </details>
   <details>
   <summary><b>Prompt for AI Agent 🤖 </b></summary>
   
   ```mdx
   This is a comment left during a code review.
   
   **Path:** 
superset-frontend/plugins/legacy-plugin-chart-paired-t-test/src/TTestTable.tsx
   **Line:** 162:166
   **Comment:**
        *Logic Error: The `getLiftStatus` function checks `liftVal` with 
`Number.isNaN` and `Number.isFinite` directly on a `string | number` value, but 
lift values are stored as strings (from `toFixed`), so `Number.isFinite` will 
always return false for non-control rows and every non-control row will be 
marked as "invalid" instead of "true"/"false`; convert to a number before these 
checks so valid values are classified correctly.
   
   Validate the correctness of the flagged issue. If correct, How can I resolve 
this? If you propose a fix, implement it and please make it concise.
   ```
   </details>
   <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=43bb7605bf42a925ef9f4dc2617372f6f90cca3bd8b33c71ce11626809efa549&reaction=like'>👍</a>
 | <a 
href='https://app.codeant.ai/feedback?pr_url=https%3A%2F%2Fgithub.com%2Fapache%2Fsuperset%2Fpull%2F37902&comment_hash=43bb7605bf42a925ef9f4dc2617372f6f90cca3bd8b33c71ce11626809efa549&reaction=dislike'>👎</a>



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to