From 708206debb3eb94aa5ab07b756188b9b9bd4bea3 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 25 Nov 2025 21:54:28 -0500 Subject: [PATCH 01/18] feat: Add the submenu called Export Current View UI --- .../useExploreAdditionalActionsMenu/index.jsx | 70 +++++++++++++++++-- 1 file changed, 65 insertions(+), 5 deletions(-) diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index b85817f698e8..736ee0aa3c15 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -72,6 +72,9 @@ const MENU_KEYS = { VIEW_QUERY: 'view_query', RUN_IN_SQL_LAB: 'run_in_sql_lab', EXPORT_TO_PIVOT_XLSX: 'export_to_pivot_xlsx', + DATA_EXPORT_OPTIONS: 'data_export_options', + EXPORT_ALL_DATA_GROUP: 'export_all_data_group', + EXPORT_CURRENT_VIEW_GROUP: 'export_current_view_group', }; const VIZ_TYPES_PIVOTABLE = [VizType.PivotTable]; @@ -368,6 +371,8 @@ export const useExploreAdditionalActionsMenu = ( // Download submenu const downloadChildren = []; + // Current View Download Submenu for Table Type + const currentViewChildren = [...downloadChildren]; if (VIZ_TYPES_PIVOTABLE.includes(latestQueryFormData.viz_type)) { downloadChildren.push( @@ -443,7 +448,7 @@ export const useExploreAdditionalActionsMenu = ( }); } - downloadChildren.push( + currentViewChildren.push( { key: MENU_KEYS.EXPORT_TO_JSON, label: t('Export to .JSON'), @@ -462,7 +467,7 @@ export const useExploreAdditionalActionsMenu = ( }, { key: MENU_KEYS.DOWNLOAD_AS_IMAGE, - label: t('Download as image'), + label: t('Export screenshot (jpeg)'), icon: , onClick: e => { downloadAsImage( @@ -498,11 +503,66 @@ export const useExploreAdditionalActionsMenu = ( }, ); + const allDataChildren = [ + { + key: 'export_full_csv', + label: t('Export to .CSV'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + exportCSV(); + setIsDropdownVisible(false); + dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { + chartId: slice?.slice_id, chartName: slice?.slice_name, + })); + }, + }, + { + key: 'export_full_json', + label: t('Export to .JSON'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + exportJson(); + setIsDropdownVisible(false); + dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { + chartId: slice?.slice_id, chartName: slice?.slice_name, + })); + }, + }, + { + key: 'export_full_xlsx', + label: t('Export to Excel'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + exportExcel(); + setIsDropdownVisible(false); + dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { + chartId: slice?.slice_id, chartName: slice?.slice_name, + })); + }, + }, + ]; + menuItems.push({ - key: MENU_KEYS.DOWNLOAD_SUBMENU, + key: MENU_KEYS.DATA_EXPORT_OPTIONS, type: 'submenu', - label: t('Download'), - children: downloadChildren, + label: t('Data Export Options'), + children: [ + { + key: MENU_KEYS.EXPORT_ALL_DATA_GROUP, + type: 'submenu', + label: t('Export All Data'), + children: allDataChildren, + }, + { + key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, + type: 'submenu', + label: t('Export Current View'), + children: currentViewChildren, + }, + ], }); // Share submenu From ddfa5103f0381d1daa82f75ac17435534ee52751 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 25 Nov 2025 21:58:44 -0500 Subject: [PATCH 02/18] feat: Add the option of exporting csv for the current view submenu --- .../useExploreAdditionalActionsMenu/index.jsx | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 736ee0aa3c15..0b611b58012e 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -75,6 +75,7 @@ const MENU_KEYS = { DATA_EXPORT_OPTIONS: 'data_export_options', EXPORT_ALL_DATA_GROUP: 'export_all_data_group', EXPORT_CURRENT_VIEW_GROUP: 'export_current_view_group', + EXPORT_CURRENT_TO_CSV: 'export_current_to_csv', }; const VIZ_TYPES_PIVOTABLE = [VizType.PivotTable]; @@ -449,6 +450,29 @@ export const useExploreAdditionalActionsMenu = ( } currentViewChildren.push( + { + key: MENU_KEYS.EXPORT_CURRENT_TO_CSV, + label: t('Export to .CSV'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + // Use 'results' to export the *current view* (as opposed to 'full'). + // Pass ownState so client/UI state (e.g., filters) can be respected when supported. + exportChart({ + formData: latestQueryFormData, + ownState, + resultType: 'results', + resultFormat: 'csv', + }); + setIsDropdownVisible(false); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); + }, + }, { key: MENU_KEYS.EXPORT_TO_JSON, label: t('Export to .JSON'), @@ -679,6 +703,7 @@ export const useExploreAdditionalActionsMenu = ( showDashboardSearch, slice, theme.sizeUnit, + ownState, ]); // Return streaming modal state and handlers for parent to render From 8264a1a72d374f4f34fb2011d19f19c08cde103c Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 25 Nov 2025 22:12:15 -0500 Subject: [PATCH 03/18] feat: Update the core functionality of exporting current view table(csv/JSON/xlsx) --- .../useExploreAdditionalActionsMenu/index.jsx | 156 ++++++++++++++++-- 1 file changed, 142 insertions(+), 14 deletions(-) diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 0b611b58012e..f7ef0e78ec83 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -311,6 +311,107 @@ export const useExploreAdditionalActionsMenu = ( } }, [addDangerToast, addSuccessToast, latestQueryFormData]); + // Minimal client-side CSV builder used for "Current View" when pagination is disabled + const downloadClientCSV = (rows, columns, filename) => { + if (!rows?.length || !columns?.length) return; + const esc = v => { + if (v === null || v === undefined) return ''; + const s = String(v); + const wrapped = /[",\n]/.test(s) ? `"${s.replace(/"/g, '""')}"` : s; + return wrapped; + }; + const header = columns.map(c => esc(c.label ?? c.key ?? '')).join(','); + const body = rows + .map(r => columns.map(c => esc(r[c.key])).join(',')) + .join('\n'); + const csv = `${header}\n${body}`; + const blob = new Blob([csv], { type: 'text/csv;charset=utf-8;' }); + const link = document.createElement('a'); + link.href = URL.createObjectURL(blob); + link.download = `${filename || 'current_view'}.csv`; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + URL.revokeObjectURL(link.href); + }; + + // Robust client-side JSON for "Current View" + const downloadClientJSON = (rows, columns, filename) => { + if (!rows?.length || !columns?.length) return; + + const norm = v => { + if (v instanceof Date) return v.toISOString(); + if (v && typeof v === 'object' && 'input' in v && 'formatter' in v) { + const dv = v.input ?? v.value ?? v.toString?.() ?? ''; + return dv instanceof Date ? dv.toISOString() : dv; + } + return v; + }; + + const data = rows.map(r => { + const out = {}; + columns.forEach(c => { + out[c.key] = norm(r[c.key]); + }); + return out; + }); + + const meta = { + columns: columns.map(c => ({ + key: c.key, + label: c.label ?? c.key, + })), + count: rows.length, + }; + + const payload = { meta, data }; + const blob = new Blob([JSON.stringify(payload, null, 2)], { + type: 'application/json;charset=utf-8;', + }); + const link = document.createElement('a'); + link.href = URL.createObjectURL(blob); + link.download = `${filename || 'current_view'}.json`; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + URL.revokeObjectURL(link.href); + }; + + // NEW: Client-side XLSX for "Current View" (uses 'xlsx' already in deps) + const downloadClientXLSX = async (rows, columns, filename) => { + if (!rows?.length || !columns?.length) return; + try { + const XLSX = (await import(/* webpackChunkName: "xlsx" */ 'xlsx')).default; + + // Build a flat array of objects keyed by backend column key + const data = rows.map(r => { + const o = {}; + columns.forEach(c => { + const v = r[c.key]; + o[c.label ?? c.key] = + v && typeof v === 'object' && 'input' in v && 'formatter' in v + ? (v.input instanceof Date ? v.input.toISOString() : (v.input ?? v.value ?? '')) + : (v instanceof Date ? v.toISOString() : v); + }); + return o; + }); + + const ws = XLSX.utils.json_to_sheet(data, { skipHeader: false }); + const wb = XLSX.utils.book_new(); + XLSX.utils.book_append_sheet(wb, ws, 'Current View'); + + // Autosize columns (roughly) by header length + const colWidths = (Object.keys(data[0] || {})).map(h => ({ wch: Math.max(10, String(h).length + 2) })); + ws['!cols'] = colWidths; + + XLSX.writeFile(wb, `${(filename || 'current_view')}.xlsx`); + } catch (e) { + // If xlsx isn’t available for some reason, fall back to CSV + downloadClientCSV(rows, columns, filename || 'current_view'); + addDangerToast?.(t('Falling back to CSV; Excel export library not available.')); + } + }; + const menu = useMemo(() => { const menuItems = []; @@ -372,8 +473,6 @@ export const useExploreAdditionalActionsMenu = ( // Download submenu const downloadChildren = []; - // Current View Download Submenu for Table Type - const currentViewChildren = [...downloadChildren]; if (VIZ_TYPES_PIVOTABLE.includes(latestQueryFormData.viz_type)) { downloadChildren.push( @@ -449,7 +548,7 @@ export const useExploreAdditionalActionsMenu = ( }); } - currentViewChildren.push( + const currentViewChildren = [ { key: MENU_KEYS.EXPORT_CURRENT_TO_CSV, label: t('Export to .CSV'), @@ -458,12 +557,21 @@ export const useExploreAdditionalActionsMenu = ( onClick: () => { // Use 'results' to export the *current view* (as opposed to 'full'). // Pass ownState so client/UI state (e.g., filters) can be respected when supported. - exportChart({ - formData: latestQueryFormData, - ownState, - resultType: 'results', - resultFormat: 'csv', - }); + if ( + !latestQueryFormData?.server_pagination && + ownState?.clientView?.rows?.length && + ownState?.clientView?.columns?.length + ) { + const { rows, columns } = ownState.clientView; + downloadClientCSV(rows, columns, slice?.slice_name || 'current_view'); + } else { + exportChart({ + formData: latestQueryFormData, + ownState, + resultType: 'results', + resultFormat: 'csv', + }); + } setIsDropdownVisible(false); dispatch( logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { @@ -479,7 +587,16 @@ export const useExploreAdditionalActionsMenu = ( icon: , disabled: !canDownloadCSV, onClick: () => { - exportJson(); + if ( + !latestQueryFormData?.server_pagination && + ownState?.clientView?.rows?.length && + ownState?.clientView?.columns?.length + ) { + const { rows, columns } = ownState.clientView; + downloadClientJSON(rows, columns, slice?.slice_name || 'current_view'); + } else { + exportJson(); + } setIsDropdownVisible(false); dispatch( logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { @@ -514,8 +631,19 @@ export const useExploreAdditionalActionsMenu = ( label: t('Export to Excel'), icon: , disabled: !canDownloadCSV, - onClick: () => { - exportExcel(); + onClick: async () => { + if ( + !latestQueryFormData?.server_pagination && + ownState?.clientView?.rows?.length && + ownState?.clientView?.columns?.length + ) { + // Client-side filtered view → XLSX + const { rows, columns } = ownState.clientView; + await downloadClientXLSX(rows, columns, slice?.slice_name || 'current_view'); + } else { + // Server path (respects backend filters/pagination) + await exportExcel(); + } setIsDropdownVisible(false); dispatch( logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { @@ -525,7 +653,7 @@ export const useExploreAdditionalActionsMenu = ( ); }, }, - ); + ]; const allDataChildren = [ { @@ -674,7 +802,7 @@ export const useExploreAdditionalActionsMenu = ( key: MENU_KEYS.RUN_IN_SQL_LAB, label: t('Run in SQL Lab'), onClick: e => { - onOpenInEditor(latestQueryFormData, e.domEvent.metaKey); + onOpenInEditor(latestQueryFormData, e.domEvent?.metaKey); setIsDropdownVisible(false); }, }); From 7203c179232b04622a94effc971ebe37f8e7755f Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 25 Nov 2025 22:28:24 -0500 Subject: [PATCH 04/18] feat: Update the core functionality; export logic and partially display the options --- .../plugin-chart-table/src/TableChart.tsx | 37 +++++++++++++++++++ .../plugins/plugin-chart-table/src/index.ts | 1 + .../components/ExploreViewContainer/index.jsx | 10 ++++- .../useExploreAdditionalActionsMenu/index.jsx | 21 ++++++++--- 4 files changed, 61 insertions(+), 8 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index c2061d738d11..d59c133c86e1 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -25,6 +25,7 @@ import { MouseEvent, KeyboardEvent as ReactKeyboardEvent, useEffect, + useRef, } from 'react'; import { @@ -1358,6 +1359,41 @@ export default function TableChart( } }; + // collect client-side filtered rows for export & push snapshot to ownState (guarded) + const [clientViewRows, setClientViewRows] = useState([]); + + const exportColumns = useMemo( + () => + visibleColumnsMeta.map(col => ({ + key: col.key, + label: col.config?.customColumnName || col.originalLabel || col.key, + })), + [visibleColumnsMeta], + ); + + const clientViewSigRef = useRef(''); + useEffect(() => { + if (serverPagination) return; // only for client-side mode + + const len = clientViewRows.length; + const first = len ? clientViewRows[0]?.[exportColumns[0]?.key ?? ''] : ''; + const last = len ? clientViewRows[len - 1]?.[exportColumns[0]?.key ?? ''] : ''; + const colSig = exportColumns.map(c => c.key).join(','); + const sig = `${len}|${String(first)}|${String(last)}|${colSig}`; + + if (sig !== clientViewSigRef.current) { + clientViewSigRef.current = sig; + updateTableOwnState(setDataMask, { + ...(serverPaginationData || {}), + clientView: { + rows: clientViewRows, + columns: exportColumns, + count: len, + }, + }); + } + }, [clientViewRows, exportColumns, serverPagination, setDataMask, serverPaginationData]); + return ( @@ -1395,6 +1431,7 @@ export default function TableChart( onSearchChange={debouncedSearch} searchOptions={searchOptions} onFilteredDataChange={handleFilteredDataChange} + onFilteredRowsChange={setClientViewRows} /> ); diff --git a/superset-frontend/plugins/plugin-chart-table/src/index.ts b/superset-frontend/plugins/plugin-chart-table/src/index.ts index 45cf7d8084fc..0518281d9cf2 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/index.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/index.ts @@ -39,6 +39,7 @@ const metadata = new ChartMetadata({ Behavior.InteractiveChart, Behavior.DrillToDetail, Behavior.DrillBy, + 'EXPORT_CURRENT_VIEW' as any, ], category: t('Table'), canBeAnnotationTypes: ['EVENT', 'INTERVAL'], diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx index c4372862778a..2443b5b35335 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx @@ -600,8 +600,10 @@ function ExploreViewContainer(props) { } }); + const previousOwnState = usePrevious(props.ownState); useEffect(() => { - if (props.ownState !== undefined) { + const strip = s => (s && typeof s === 'object' ? omit(s, ['clientView']) : s); + if (!isEqual(strip(previousOwnState), strip(props.ownState))) { onQuery(); reRenderChart(); } @@ -938,10 +940,14 @@ function mapStateToProps(state) { const form_data = isDeckGLChart ? getDeckGLFormData() : controlsBasedFormData; const slice_id = form_data.slice_id ?? slice?.slice_id ?? 0; // 0 - unsaved chart + + // exclude clientView from extra_form_data; keep other ownState pieces + const ownStateForQuery = omit(dataMask[slice_id]?.ownState, ['clientView']); + form_data.extra_form_data = mergeExtraFormData( { ...form_data.extra_form_data }, { - ...dataMask[slice_id]?.ownState, + ...ownStateForQuery, }, ); const chart = charts[slice_id]; diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index f7ef0e78ec83..24dac2a48823 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -27,6 +27,7 @@ import { Button, Input, } from '@superset-ui/core/components'; +import { getChartMetadataRegistry } from '@superset-ui/core'; import { Menu } from '@superset-ui/core/components/Menu'; import { useToasts } from 'src/components/MessageToasts/withToasts'; import { DEFAULT_CSV_STREAMING_ROW_THRESHOLD } from 'src/constants'; @@ -189,6 +190,12 @@ export const useExploreAdditionalActionsMenu = ( }); const showDashboardSearch = dashboards?.length > SEARCH_THRESHOLD; + const vizType = latestQueryFormData?.viz_type; + const meta = vizType ? getChartMetadataRegistry().get(vizType) : undefined + + // Detect if the chart plugin exposes the export-current-view behavior + const hasExportCurrentView = !!meta?.behaviors?.includes('EXPORT_CURRENT_VIEW'); + const showCurrentView = vizType === 'table' && hasExportCurrentView; const shareByEmail = useCallback(async () => { try { @@ -708,12 +715,14 @@ export const useExploreAdditionalActionsMenu = ( label: t('Export All Data'), children: allDataChildren, }, - { - key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, - type: 'submenu', - label: t('Export Current View'), - children: currentViewChildren, - }, + ...(showCurrentView + ? [{ + key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, + type: 'submenu', + label: t('Export Current View'), + children: currentViewChildren, + }] + : []), ], }); From 03eb3d950ff9c0cff4ae90babf0c1922bad38c36 Mon Sep 17 00:00:00 2001 From: RebeccaH2003 <114100529+RebeccaH2003@users.noreply.github.com> Date: Tue, 25 Nov 2025 22:32:28 -0500 Subject: [PATCH 05/18] feat: Update the export functionality and logic; fix type issue --- .../src/DataTable/DataTable.tsx | 42 +++++++++++++++++++ .../src/DataTable/types/react-table.d.ts | 2 + .../src/DataTable/utils/externalAPIs.ts | 10 +++-- 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index 1757dd74d83b..2718d0a6014c 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -90,6 +90,7 @@ export interface DataTableProps extends TableOptions { onSearchColChange: (searchCol: string) => void; searchOptions: SearchOption[]; onFilteredDataChange?: (rows: Row[], filterValue?: string) => void; + onFilteredRowsChange?: (rows: D[]) => void; } export interface RenderHTMLCellProps extends HTMLProps { @@ -133,6 +134,7 @@ export default typedMemo(function DataTable({ onSearchColChange, searchOptions, onFilteredDataChange, + onFilteredRowsChange, ...moreUseTableOptions }: DataTableProps): JSX.Element { const tableHooks: PluginHook[] = [ @@ -204,6 +206,7 @@ export default typedMemo(function DataTable({ ); const { + rows, // filtered/sorted rows before pagination getTableProps, getTableBodyProps, prepareRow, @@ -452,6 +455,45 @@ export default typedMemo(function DataTable({ onServerPaginationChange(pageNumber, serverPageSize); } + // Emit filtered rows to parent in client-side mode (debounced via RAF) + const isMountedRef = useRef(true); + useEffect(() => { + isMountedRef.current = true; + return () => { + isMountedRef.current = false; + }; + }, []); + + const rafRef = useRef(null); + const lastSigRef = useRef(''); + + useEffect(() => { + if (serverPagination || typeof onFilteredRowsChange !== 'function') { + return; + } + + const filtered = rows.map(r => r.original as D); + const len = filtered.length; + const first = len ? Object.values(filtered[0] as any)[0] : ''; + const last = len ? Object.values(filtered[len - 1] as any)[0] : ''; + const sig = `${len}|${String(first)}|${String(last)}`; + + if (sig !== lastSigRef.current) { + lastSigRef.current = sig; + + rafRef.current = requestAnimationFrame(() => { + if (isMountedRef.current) onFilteredRowsChange(filtered); + }); + } + + return () => { + if (rafRef.current != null) { + cancelAnimationFrame(rafRef.current); + rafRef.current = null; + } + }; + }, [rows, serverPagination, onFilteredRowsChange]); + return (
{}, pageNumber: number, pageSize: number, -) => +) => { setDataMask({ ownState: { currentPage: pageNumber, pageSize, }, }); +}; export const updateTableOwnState = ( setDataMask: SetDataMaskHook = () => {}, modifiedOwnState: TableOwnState, -) => +) => { setDataMask({ ownState: modifiedOwnState, }); +}; From a4c6e87a46247f67dde034616f30377dc1e20fa2 Mon Sep 17 00:00:00 2001 From: RebeccaH2003 <114100529+RebeccaH2003@users.noreply.github.com> Date: Thu, 27 Nov 2025 13:33:20 -0500 Subject: [PATCH 06/18] update menu logic --- .../components/useExploreAdditionalActionsMenu/index.jsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 24dac2a48823..70d3a965d77a 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -195,7 +195,8 @@ export const useExploreAdditionalActionsMenu = ( // Detect if the chart plugin exposes the export-current-view behavior const hasExportCurrentView = !!meta?.behaviors?.includes('EXPORT_CURRENT_VIEW'); - const showCurrentView = vizType === 'table' && hasExportCurrentView; + // const showCurrentView = vizType === 'table' && hasExportCurrentView; + // console.log("I have an update22!!"); const shareByEmail = useCallback(async () => { try { @@ -715,7 +716,7 @@ export const useExploreAdditionalActionsMenu = ( label: t('Export All Data'), children: allDataChildren, }, - ...(showCurrentView + ...(hasExportCurrentView ? [{ key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, type: 'submenu', From 9baa4cf895d973446e74ffcb7af4c9c656de01dd Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Fri, 28 Nov 2025 21:32:24 -0500 Subject: [PATCH 07/18] fix: update style for useExploreAdditionalActionsMenu --- .../components/useExploreAdditionalActionsMenu/index.jsx | 2 -- 1 file changed, 2 deletions(-) diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 70d3a965d77a..32f248e19f39 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -195,8 +195,6 @@ export const useExploreAdditionalActionsMenu = ( // Detect if the chart plugin exposes the export-current-view behavior const hasExportCurrentView = !!meta?.behaviors?.includes('EXPORT_CURRENT_VIEW'); - // const showCurrentView = vizType === 'table' && hasExportCurrentView; - // console.log("I have an update22!!"); const shareByEmail = useCallback(async () => { try { From ba16b1d5ce24964fd83ffb6f9e0c55c2222ca340 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Sat, 29 Nov 2025 16:07:29 -0500 Subject: [PATCH 08/18] fix: update the style --- .../src/DataTable/DataTable.tsx | 2 +- .../plugin-chart-table/src/TableChart.tsx | 12 ++- .../components/ExploreViewContainer/index.jsx | 3 +- .../useExploreAdditionalActionsMenu/index.jsx | 86 +++++++++++++------ 4 files changed, 73 insertions(+), 30 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index 2718d0a6014c..aade1f4f6ce1 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -206,7 +206,7 @@ export default typedMemo(function DataTable({ ); const { - rows, // filtered/sorted rows before pagination + rows, // filtered/sorted rows before pagination getTableProps, getTableBodyProps, prepareRow, diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index d59c133c86e1..08ff81994284 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -1377,7 +1377,9 @@ export default function TableChart( const len = clientViewRows.length; const first = len ? clientViewRows[0]?.[exportColumns[0]?.key ?? ''] : ''; - const last = len ? clientViewRows[len - 1]?.[exportColumns[0]?.key ?? ''] : ''; + const last = len + ? clientViewRows[len - 1]?.[exportColumns[0]?.key ?? ''] + : ''; const colSig = exportColumns.map(c => c.key).join(','); const sig = `${len}|${String(first)}|${String(last)}|${colSig}`; @@ -1392,7 +1394,13 @@ export default function TableChart( }, }); } - }, [clientViewRows, exportColumns, serverPagination, setDataMask, serverPaginationData]); + }, [ + clientViewRows, + exportColumns, + serverPagination, + setDataMask, + serverPaginationData, + ]); return ( diff --git a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx index 2443b5b35335..e49066165c29 100644 --- a/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx +++ b/superset-frontend/src/explore/components/ExploreViewContainer/index.jsx @@ -602,7 +602,8 @@ function ExploreViewContainer(props) { const previousOwnState = usePrevious(props.ownState); useEffect(() => { - const strip = s => (s && typeof s === 'object' ? omit(s, ['clientView']) : s); + const strip = s => + s && typeof s === 'object' ? omit(s, ['clientView']) : s; if (!isEqual(strip(previousOwnState), strip(props.ownState))) { onQuery(); reRenderChart(); diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 32f248e19f39..180439f63058 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -191,10 +191,12 @@ export const useExploreAdditionalActionsMenu = ( const showDashboardSearch = dashboards?.length > SEARCH_THRESHOLD; const vizType = latestQueryFormData?.viz_type; - const meta = vizType ? getChartMetadataRegistry().get(vizType) : undefined + const meta = vizType ? getChartMetadataRegistry().get(vizType) : undefined; // Detect if the chart plugin exposes the export-current-view behavior - const hasExportCurrentView = !!meta?.behaviors?.includes('EXPORT_CURRENT_VIEW'); + const hasExportCurrentView = !!meta?.behaviors?.includes( + 'EXPORT_CURRENT_VIEW', + ); const shareByEmail = useCallback(async () => { try { @@ -387,7 +389,8 @@ export const useExploreAdditionalActionsMenu = ( const downloadClientXLSX = async (rows, columns, filename) => { if (!rows?.length || !columns?.length) return; try { - const XLSX = (await import(/* webpackChunkName: "xlsx" */ 'xlsx')).default; + const XLSX = (await import(/* webpackChunkName: "xlsx" */ 'xlsx')) + .default; // Build a flat array of objects keyed by backend column key const data = rows.map(r => { @@ -396,8 +399,12 @@ export const useExploreAdditionalActionsMenu = ( const v = r[c.key]; o[c.label ?? c.key] = v && typeof v === 'object' && 'input' in v && 'formatter' in v - ? (v.input instanceof Date ? v.input.toISOString() : (v.input ?? v.value ?? '')) - : (v instanceof Date ? v.toISOString() : v); + ? v.input instanceof Date + ? v.input.toISOString() + : (v.input ?? v.value ?? '') + : v instanceof Date + ? v.toISOString() + : v; }); return o; }); @@ -407,14 +414,18 @@ export const useExploreAdditionalActionsMenu = ( XLSX.utils.book_append_sheet(wb, ws, 'Current View'); // Autosize columns (roughly) by header length - const colWidths = (Object.keys(data[0] || {})).map(h => ({ wch: Math.max(10, String(h).length + 2) })); + const colWidths = Object.keys(data[0] || {}).map(h => ({ + wch: Math.max(10, String(h).length + 2), + })); ws['!cols'] = colWidths; - XLSX.writeFile(wb, `${(filename || 'current_view')}.xlsx`); + XLSX.writeFile(wb, `${filename || 'current_view'}.xlsx`); } catch (e) { // If xlsx isn’t available for some reason, fall back to CSV downloadClientCSV(rows, columns, filename || 'current_view'); - addDangerToast?.(t('Falling back to CSV; Excel export library not available.')); + addDangerToast?.( + t('Falling back to CSV; Excel export library not available.'), + ); } }; @@ -569,7 +580,11 @@ export const useExploreAdditionalActionsMenu = ( ownState?.clientView?.columns?.length ) { const { rows, columns } = ownState.clientView; - downloadClientCSV(rows, columns, slice?.slice_name || 'current_view'); + downloadClientCSV( + rows, + columns, + slice?.slice_name || 'current_view', + ); } else { exportChart({ formData: latestQueryFormData, @@ -599,7 +614,11 @@ export const useExploreAdditionalActionsMenu = ( ownState?.clientView?.columns?.length ) { const { rows, columns } = ownState.clientView; - downloadClientJSON(rows, columns, slice?.slice_name || 'current_view'); + downloadClientJSON( + rows, + columns, + slice?.slice_name || 'current_view', + ); } else { exportJson(); } @@ -645,7 +664,11 @@ export const useExploreAdditionalActionsMenu = ( ) { // Client-side filtered view → XLSX const { rows, columns } = ownState.clientView; - await downloadClientXLSX(rows, columns, slice?.slice_name || 'current_view'); + await downloadClientXLSX( + rows, + columns, + slice?.slice_name || 'current_view', + ); } else { // Server path (respects backend filters/pagination) await exportExcel(); @@ -670,9 +693,12 @@ export const useExploreAdditionalActionsMenu = ( onClick: () => { exportCSV(); setIsDropdownVisible(false); - dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { - chartId: slice?.slice_id, chartName: slice?.slice_name, - })); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); }, }, { @@ -683,9 +709,12 @@ export const useExploreAdditionalActionsMenu = ( onClick: () => { exportJson(); setIsDropdownVisible(false); - dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { - chartId: slice?.slice_id, chartName: slice?.slice_name, - })); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); }, }, { @@ -696,9 +725,12 @@ export const useExploreAdditionalActionsMenu = ( onClick: () => { exportExcel(); setIsDropdownVisible(false); - dispatch(logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { - chartId: slice?.slice_id, chartName: slice?.slice_name, - })); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); }, }, ]; @@ -715,12 +747,14 @@ export const useExploreAdditionalActionsMenu = ( children: allDataChildren, }, ...(hasExportCurrentView - ? [{ - key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, - type: 'submenu', - label: t('Export Current View'), - children: currentViewChildren, - }] + ? [ + { + key: MENU_KEYS.EXPORT_CURRENT_VIEW_GROUP, + type: 'submenu', + label: t('Export Current View'), + children: currentViewChildren, + }, + ] : []), ], }); From 605f7b016b6c9000578a214f59b0aff3ecbf5773 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Mon, 1 Dec 2025 17:41:47 -0500 Subject: [PATCH 09/18] fix: update the style --- superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index 08ff81994284..0abebc8ac9fd 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -1386,7 +1386,7 @@ export default function TableChart( if (sig !== clientViewSigRef.current) { clientViewSigRef.current = sig; updateTableOwnState(setDataMask, { - ...(serverPaginationData || {}), + ...serverPaginationData, clientView: { rows: clientViewRows, columns: exportColumns, From 11523d48801639fae097be047077932fcdef5241 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Mon, 1 Dec 2025 19:29:32 -0500 Subject: [PATCH 10/18] fix: using pre-commit fix style --- .../tests/test_cli_bundle.py | 6 +- .../tests/test_cli_init.py | 24 +- .../tests/test_templates.py | 6 +- superset-extensions-cli/tests/utils.py | 12 +- .../src/DataTable/DataTable.tsx | 5 +- .../plugin-chart-table/src/TableChart.tsx | 27 +- .../useExploreAdditionalActionsMenu/index.jsx | 1 + tests/integration_tests/celery_tests.py | 12 +- tests/integration_tests/dao/base_dao_test.py | 6 +- .../integration_tests/dashboards/api_tests.py | 48 +- .../integration_tests/databases/api_tests.py | 480 +++++++++--------- .../security/row_level_security_tests.py | 24 +- .../sql_lab/commands_tests.py | 13 +- tests/integration_tests/sqla_models_tests.py | 6 +- tests/integration_tests/sqllab_tests.py | 51 +- tests/integration_tests/tags/api_tests.py | 6 +- .../tags/mysql_compatibility_test.py | 108 ++-- .../commands/chart/warm_up_cache_test.py | 36 +- .../common/test_query_context_processor.py | 18 +- .../unit_tests/connectors/sqla/models_test.py | 17 +- tests/unit_tests/dao/base_dao_test.py | 6 +- tests/unit_tests/databases/api_test.py | 2 +- .../db_engine_specs/test_gsheets.py | 101 ++-- .../unit_tests/db_engine_specs/test_mssql.py | 47 +- .../unit_tests/db_engine_specs/test_ocient.py | 6 +- tests/unit_tests/models/helpers_test.py | 24 +- tests/unit_tests/sql/dialects/pinot_tests.py | 6 +- tests/unit_tests/tags/models_test.py | 84 +-- tests/unit_tests/utils/test_core.py | 12 +- .../utils/test_screenshot_cache_fix.py | 6 +- 30 files changed, 614 insertions(+), 586 deletions(-) diff --git a/superset-extensions-cli/tests/test_cli_bundle.py b/superset-extensions-cli/tests/test_cli_bundle.py index a9c4ffd62555..eaa006c19bfc 100644 --- a/superset-extensions-cli/tests/test_cli_bundle.py +++ b/superset-extensions-cli/tests/test_cli_bundle.py @@ -208,9 +208,9 @@ def test_bundle_includes_all_files_recursively( "backend/src/complex_extension/utils/helpers.py", } - assert expected_files.issubset(file_list), ( - f"Missing files: {expected_files - file_list}" - ) + assert expected_files.issubset( + file_list + ), f"Missing files: {expected_files - file_list}" @pytest.mark.cli diff --git a/superset-extensions-cli/tests/test_cli_init.py b/superset-extensions-cli/tests/test_cli_init.py index 063f9e229239..de58ed76fe34 100644 --- a/superset-extensions-cli/tests/test_cli_init.py +++ b/superset-extensions-cli/tests/test_cli_init.py @@ -82,9 +82,9 @@ def test_init_creates_extension_with_frontend_only( # Should NOT have backend directory backend_path = extension_path / "backend" - assert not backend_path.exists(), ( - "Backend directory should not exist for frontend-only extension" - ) + assert ( + not backend_path.exists() + ), "Backend directory should not exist for frontend-only extension" @pytest.mark.cli @@ -105,9 +105,9 @@ def test_init_creates_extension_with_backend_only( # Should NOT have frontend directory frontend_path = extension_path / "frontend" - assert not frontend_path.exists(), ( - "Frontend directory should not exist for backend-only extension" - ) + assert ( + not frontend_path.exists() + ), "Frontend directory should not exist for backend-only extension" @pytest.mark.cli @@ -148,9 +148,9 @@ def test_init_validates_extension_name( cli_input = f"{invalid_name}\n0.1.0\nApache-2.0\ny\ny\n" result = cli_runner.invoke(app, ["init"], input=cli_input) - assert result.exit_code == 1, ( - f"Expected command to fail for invalid name '{invalid_name}'" - ) + assert ( + result.exit_code == 1 + ), f"Expected command to fail for invalid name '{invalid_name}'" assert expected_error in result.output @@ -174,9 +174,9 @@ def test_init_with_valid_alphanumeric_names(cli_runner, valid_id): cli_input = f"{valid_id}\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n" result = cli_runner.invoke(app, ["init"], input=cli_input) - assert result.exit_code == 0, ( - f"Valid name '{valid_id}' was rejected: {result.output}" - ) + assert ( + result.exit_code == 0 + ), f"Valid name '{valid_id}' was rejected: {result.output}" assert Path(valid_id).exists(), f"Directory for '{valid_id}' was not created" diff --git a/superset-extensions-cli/tests/test_templates.py b/superset-extensions-cli/tests/test_templates.py index b8cabc9ddd92..decc10bf7bdd 100644 --- a/superset-extensions-cli/tests/test_templates.py +++ b/superset-extensions-cli/tests/test_templates.py @@ -297,9 +297,9 @@ def test_template_whitespace_handling(jinja_env, template_context): empty_line_count = sum(1 for line in lines if line.strip() == "") # Some empty lines are OK for formatting, but not excessive - assert empty_line_count < len(lines) / 2, ( - "Too many empty lines in rendered template" - ) + assert ( + empty_line_count < len(lines) / 2 + ), "Too many empty lines in rendered template" # Should be properly formatted JSON parsed = json.loads(rendered) diff --git a/superset-extensions-cli/tests/utils.py b/superset-extensions-cli/tests/utils.py index 77513bcb8559..1bd1ec23a268 100644 --- a/superset-extensions-cli/tests/utils.py +++ b/superset-extensions-cli/tests/utils.py @@ -44,9 +44,9 @@ def assert_directory_exists(path: Path, description: str = "") -> None: description: Optional description for better error messages """ desc_msg = f" ({description})" if description else "" - assert path.exists(), ( - f"Expected directory {path}{desc_msg} to exist, but it doesn't" - ) + assert ( + path.exists() + ), f"Expected directory {path}{desc_msg} to exist, but it doesn't" assert path.is_dir(), f"Expected {path}{desc_msg} to be a directory, but it's not" @@ -136,9 +136,9 @@ def assert_json_content(path: Path, expected_values: dict[str, Any]) -> None: for key, expected_value in expected_values.items(): assert key in content, f"Expected key '{key}' not found in {path}" actual_value = content[key] - assert actual_value == expected_value, ( - f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}" - ) + assert ( + actual_value == expected_value + ), f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}" def assert_file_contains(path: Path, text: str) -> None: diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index aade1f4f6ce1..bf0e380dc99d 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -221,7 +221,6 @@ export default typedMemo(function DataTable({ wrapStickyTable, setColumnOrder, allColumns, - rows, state: { pageIndex, pageSize, @@ -480,7 +479,9 @@ export default typedMemo(function DataTable({ if (sig !== lastSigRef.current) { lastSigRef.current = sig; - + if (rafRef.current != null) { + cancelAnimationFrame(rafRef.current); + } rafRef.current = requestAnimationFrame(() => { if (isMountedRef.current) onFilteredRowsChange(filtered); }); diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index 0abebc8ac9fd..7d4f5c386f8b 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -1371,26 +1371,27 @@ export default function TableChart( [visibleColumnsMeta], ); - const clientViewSigRef = useRef(''); + // Use a ref to store previous clientViewRows and exportColumns for robust change detection + const prevClientViewRef = useRef<{ + rows: DataRecord[]; + columns: typeof exportColumns; + } | null>(null); useEffect(() => { if (serverPagination) return; // only for client-side mode - - const len = clientViewRows.length; - const first = len ? clientViewRows[0]?.[exportColumns[0]?.key ?? ''] : ''; - const last = len - ? clientViewRows[len - 1]?.[exportColumns[0]?.key ?? ''] - : ''; - const colSig = exportColumns.map(c => c.key).join(','); - const sig = `${len}|${String(first)}|${String(last)}|${colSig}`; - - if (sig !== clientViewSigRef.current) { - clientViewSigRef.current = sig; + const prev = prevClientViewRef.current; + const rowsChanged = !prev || !isEqual(prev.rows, clientViewRows); + const columnsChanged = !prev || !isEqual(prev.columns, exportColumns); + if (rowsChanged || columnsChanged) { + prevClientViewRef.current = { + rows: clientViewRows, + columns: exportColumns, + }; updateTableOwnState(setDataMask, { ...serverPaginationData, clientView: { rows: clientViewRows, columns: exportColumns, - count: len, + count: clientViewRows.length, }, }); } diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 180439f63058..8a2c662c9a7d 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -874,6 +874,7 @@ export const useExploreAdditionalActionsMenu = ( slice, theme.sizeUnit, ownState, + hasExportCurrentView, ]); // Return streaming modal state and handlers for parent to render diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py index 8fc5fe293e02..8a7ee09032a4 100644 --- a/tests/integration_tests/celery_tests.py +++ b/tests/integration_tests/celery_tests.py @@ -564,15 +564,15 @@ def my_task(self): # Expect True within an app context with app.app_context(): result = my_task.apply().get() - assert result is True, ( - "Task should have access to current_app within app context" - ) + assert ( + result is True + ), "Task should have access to current_app within app context" # Expect True outside of an app context result = my_task.apply().get() - assert result is True, ( - "Task should have access to current_app outside of app context" - ) + assert ( + result is True + ), "Task should have access to current_app outside of app context" def delete_tmp_view_or_table(name: str, ctas_method: CTASMethod): diff --git a/tests/integration_tests/dao/base_dao_test.py b/tests/integration_tests/dao/base_dao_test.py index b014c3bdaee8..717edde37a6d 100644 --- a/tests/integration_tests/dao/base_dao_test.py +++ b/tests/integration_tests/dao/base_dao_test.py @@ -1425,9 +1425,9 @@ def test_base_dao_list_count_accuracy_with_filters_and_relationships( assert count == 6, f"Expected 6 dashboards, but count was {count}" # Should return only 3 due to page_size - assert len(results) == 3, ( - f"Expected 3 results due to pagination, got {len(results)}" - ) + assert ( + len(results) == 3 + ), f"Expected 3 results due to pagination, got {len(results)}" # Each should have 3 owners as we set up for dashboard in results: diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 0e563bfc8b65..4011880ef6c1 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -3460,30 +3460,30 @@ def test_dashboard_custom_tags_relationship_filters_correctly(self): all_tags = dashboard.tags all_tag_names = [t.name for t in all_tags] assert "critical" in all_tag_names, "Should include custom tag" - assert any(t.name.startswith("owner:") for t in all_tags), ( - "Should include owner tags" - ) - assert any(t.name.startswith("type:") for t in all_tags), ( - "Should include type tags" - ) + assert any( + t.name.startswith("owner:") for t in all_tags + ), "Should include owner tags" + assert any( + t.name.startswith("type:") for t in all_tags + ), "Should include type tags" # 2. MODEL: dashboard.custom_tags returns ONLY custom tags custom_only = dashboard.custom_tags custom_tag_names = [t.name for t in custom_only] assert "critical" in custom_tag_names, "Should include custom tag" - assert not any(t.name.startswith("owner:") for t in custom_only), ( - f"custom_tags should NOT include owner tags, got: {custom_tag_names}" - ) - assert not any(t.name.startswith("type:") for t in custom_only), ( - f"custom_tags should NOT include type tags, got: {custom_tag_names}" - ) + assert not any( + t.name.startswith("owner:") for t in custom_only + ), f"custom_tags should NOT include owner tags, got: {custom_tag_names}" + assert not any( + t.name.startswith("type:") for t in custom_only + ), f"custom_tags should NOT include type tags, got: {custom_tag_names}" assert len(custom_only) < len(all_tags), "Should filter out implicit tags" # Verify all tags in custom_tags have type=custom for tag in custom_only: - assert tag.type == TagType.custom, ( - f"Tag {tag.name} has type {tag.type}, expected TagType.custom" - ) + assert ( + tag.type == TagType.custom + ), f"Tag {tag.name} has type {tag.type}, expected TagType.custom" # 3. API: With config=True, API returns ONLY custom tags rv = self.client.get("api/v1/dashboard/") @@ -3495,19 +3495,19 @@ def test_dashboard_custom_tags_relationship_filters_correctly(self): ) assert test_dash is not None # API returns "tags" (get_list override renames custom_tags→tags) - assert "tags" in test_dash, ( - f"Response should have tags, got: {test_dash.keys()}" - ) + assert ( + "tags" in test_dash + ), f"Response should have tags, got: {test_dash.keys()}" # API should return ONLY custom tags api_tag_names = [t["name"] for t in test_dash["tags"]] assert "critical" in api_tag_names, "API should include custom tag" - assert not any(t["name"].startswith("owner:") for t in test_dash["tags"]), ( - f"API should NOT include owner tags, got: {api_tag_names}" - ) - assert not any(t["name"].startswith("type:") for t in test_dash["tags"]), ( - f"API should NOT include type tags, got: {api_tag_names}" - ) + assert not any( + t["name"].startswith("owner:") for t in test_dash["tags"] + ), f"API should NOT include owner tags, got: {api_tag_names}" + assert not any( + t["name"].startswith("type:") for t in test_dash["tags"] + ), f"API should NOT include type tags, got: {api_tag_names}" assert len(test_dash["tags"]) == 1, ( f"API should return only 1 custom tag, " f"got {len(test_dash['tags'])}: {api_tag_names}" diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 9342e6b7a0f7..a80dca189045 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -3278,266 +3278,274 @@ def test_available(self, get_available_engine_specs): rv = self.client.get(uri) response = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 - assert response == { - "databases": [ - { - "available_drivers": ["psycopg2"], - "default_driver": "psycopg2", - "engine": "postgresql", - "name": "PostgreSQL", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", + assert ( + response + == { + "databases": [ + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "postgresql", + "name": "PostgreSQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "postgresql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": True, + "disable_ssh_tunneling": False, + "supports_oauth2": False, }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": True, - "sqlalchemy_uri_placeholder": "postgresql://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": True, - "disable_ssh_tunneling": False, "supports_oauth2": False, }, - "supports_oauth2": False, - }, - { - "available_drivers": ["bigquery"], - "default_driver": "bigquery", - "engine": "bigquery", - "name": "Google BigQuery", - "parameters": { - "properties": { - "credentials_info": { - "description": "Contents of BigQuery JSON credentials.", - "type": "string", - "x-encrypted-extra": True, + { + "available_drivers": ["bigquery"], + "default_driver": "bigquery", + "engine": "bigquery", + "name": "Google BigQuery", + "parameters": { + "properties": { + "credentials_info": { + "description": ( + "Contents of BigQuery JSON " "credentials." + ), + "type": "string", + "x-encrypted-extra": True, + }, + "query": {"type": "object"}, }, - "query": {"type": "object"}, + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "bigquery://{project_id}", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": True, + "disable_ssh_tunneling": True, + "supports_oauth2": False, }, - "type": "object", - }, - "preferred": True, - "sqlalchemy_uri_placeholder": "bigquery://{project_id}", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": True, - "disable_ssh_tunneling": True, "supports_oauth2": False, }, - "supports_oauth2": False, - }, - { - "available_drivers": ["psycopg2"], - "default_driver": "psycopg2", - "engine": "redshift", - "name": "Amazon Redshift", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "redshift", + "name": "Amazon Redshift", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "redshift+psycopg2://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, + "supports_oauth2": False, }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "redshift+psycopg2://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, "supports_oauth2": False, }, - "supports_oauth2": False, - }, - { - "available_drivers": ["apsw"], - "default_driver": "apsw", - "engine": "gsheets", - "name": "Google Sheets", - "parameters": { - "properties": { - "catalog": {"type": "object"}, - "oauth2_client_info": { - "default": { - "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", - "scope": ( - "https://www.googleapis.com/auth/" - "drive.readonly " - "https://www.googleapis.com/auth/spreadsheets " - "https://spreadsheets.google.com/feeds" + { + "available_drivers": ["apsw"], + "default_driver": "apsw", + "engine": "gsheets", + "name": "Google Sheets", + "parameters": { + "properties": { + "catalog": {"type": "object"}, + "oauth2_client_info": { + "default": { + "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", + "scope": ( + "https://www.googleapis.com/auth/" + "drive.readonly " + "https://www.googleapis.com/auth/" + "spreadsheets " + "https://spreadsheets.google.com/feeds" + ), + "token_request_uri": "https://oauth2.googleapis.com/token", + }, + "description": "OAuth2 client information", + "nullable": True, + "type": "string", + "x-encrypted-extra": True, + }, + "service_account_info": { + "description": ( + "Contents of GSheets JSON " "credentials." ), - "token_request_uri": "https://oauth2.googleapis.com/token", + "type": "string", + "x-encrypted-extra": True, }, - "description": "OAuth2 client information", - "nullable": True, - "type": "string", - "x-encrypted-extra": True, - }, - "service_account_info": { - "description": "Contents of GSheets JSON credentials.", - "type": "string", - "x-encrypted-extra": True, }, + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "gsheets://", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": True, + "supports_oauth2": True, }, - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "gsheets://", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": True, "supports_oauth2": True, }, - "supports_oauth2": True, - }, - { - "available_drivers": ["mysqlconnector", "mysqldb"], - "default_driver": "mysqldb", - "engine": "mysql", - "name": "MySQL", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", + { + "available_drivers": ["mysqlconnector", "mysqldb"], + "default_driver": "mysqldb", + "engine": "mysql", + "name": "MySQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", + }, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "mysql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, + "supports_oauth2": False, }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "mysql://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, "supports_oauth2": False, }, - "supports_oauth2": False, - }, - { - "available_drivers": [""], - "engine": "hana", - "name": "SAP HANA", - "preferred": False, - "sqlalchemy_uri_placeholder": "engine+driver://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, + { + "available_drivers": [""], + "engine": "hana", + "name": "SAP HANA", + "preferred": False, + "sqlalchemy_uri_placeholder": "engine+driver://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, + "supports_oauth2": False, + }, "supports_oauth2": False, }, - "supports_oauth2": False, - }, - ] - } + ] + } + ) @with_config({"PREFERRED_DATABASES": ["MySQL"]}) @mock.patch("superset.databases.api.get_available_engine_specs") diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index cab80b49476e..6435bbb7f7f3 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -341,12 +341,12 @@ def test_rls_filter_applies_to_virtual_dataset(self): # Gamma user should have the name filters (A%, B%, Q%) and gender filter # Note: SQL uses uppercase LIKE and %% escaping sql_lower = sql.lower() - assert "name like 'a%" in sql_lower or "name like 'q%" in sql_lower, ( - f"RLS name filters not found in virtual dataset query: {sql}" - ) - assert "gender = 'boy'" in sql_lower, ( - f"RLS gender filter not found in virtual dataset query: {sql}" - ) + assert ( + "name like 'a%" in sql_lower or "name like 'q%" in sql_lower + ), f"RLS name filters not found in virtual dataset query: {sql}" + assert ( + "gender = 'boy'" in sql_lower + ), f"RLS gender filter not found in virtual dataset query: {sql}" # Test as admin user who has no RLS filters g.user = self.get_user(username="admin") @@ -395,12 +395,12 @@ def test_rls_filter_applies_to_virtual_dataset_with_join(self): # Verify that RLS filters from both physical tables are applied # birth_names filters sql_lower = sql.lower() - assert "name like 'a%" in sql_lower or "name like 'q%" in sql_lower, ( - f"birth_names RLS filters not found: {sql}" - ) - assert "gender = 'boy'" in sql_lower, ( - f"birth_names gender filter not found: {sql}" - ) + assert ( + "name like 'a%" in sql_lower or "name like 'q%" in sql_lower + ), f"birth_names RLS filters not found: {sql}" + assert ( + "gender = 'boy'" in sql_lower + ), f"birth_names gender filter not found: {sql}" # energy_usage filter assert "value > 1" in sql_lower, f"energy_usage RLS filter not found: {sql}" diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py index e3c09fc50918..23a9e3274e6c 100644 --- a/tests/integration_tests/sql_lab/commands_tests.py +++ b/tests/integration_tests/sql_lab/commands_tests.py @@ -87,10 +87,15 @@ def test_run_timeout(self, is_feature_enabled) -> None: assert ( ex_info.value.error.error_type == SupersetErrorType.SQLLAB_TIMEOUT_ERROR ) - assert ex_info.value.error.message == __( - "The query estimation was killed after %(sqllab_timeout)s seconds. It might " # noqa: E501 - "be too complex, or the database might be under heavy load.", - sqllab_timeout=current_app.config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"], + assert ( + ex_info.value.error.message + == __( + "The query estimation was killed after %(sqllab_timeout)s seconds. It might " # noqa: E501 + "be too complex, or the database might be under heavy load.", + sqllab_timeout=current_app.config[ + "SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT" + ], + ) ) def test_run_success(self) -> None: diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index f770b7273196..8de5d741781b 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -1263,9 +1263,9 @@ def mock_get_df(sql, catalog=None, schema=None, mutator=None): result = table.query(query_obj) expected_order = ["metric_y", "col_b", "metric_x", "col_a"] - assert list(result.df.columns) == expected_order, ( - f"Expected {expected_order}, got {list(result.df.columns)}" - ) + assert ( + list(result.df.columns) == expected_order + ), f"Expected {expected_order}, got {list(result.df.columns)}" finally: db.session.delete(table) db.session.commit() diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index 99c347d95f9d..edbd6b8404bb 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -812,30 +812,33 @@ def test_sql_json_soft_timeout(self): handle_cursor.side_effect = SoftTimeLimitExceeded() data = self.run_sql("SELECT * FROM birth_names LIMIT 1", "1") - assert data == { - "errors": [ - { - "message": ( - "The query was killed after 21600 seconds. It might be too complex, " # noqa: E501 - "or the database might be under heavy load." - ), - "error_type": SupersetErrorType.SQLLAB_TIMEOUT_ERROR, - "level": ErrorLevel.ERROR, - "extra": { - "issue_codes": [ - { - "code": 1026, - "message": "Issue 1026 - Query is too complex and takes too long to run.", # noqa: E501 - }, - { - "code": 1027, - "message": "Issue 1027 - The database is currently running too many queries.", # noqa: E501 - }, - ] - }, - } - ] - } + assert ( + data + == { + "errors": [ + { + "message": ( + "The query was killed after 21600 seconds. It might be too complex, " # noqa: E501 + "or the database might be under heavy load." + ), + "error_type": SupersetErrorType.SQLLAB_TIMEOUT_ERROR, + "level": ErrorLevel.ERROR, + "extra": { + "issue_codes": [ + { + "code": 1026, + "message": "Issue 1026 - Query is too complex and takes too long to run.", # noqa: E501 + }, + { + "code": 1027, + "message": "Issue 1027 - The database is currently running too many queries.", # noqa: E501 + }, + ] + }, + } + ] + } + ) @pytest.mark.parametrize("spec", [HiveEngineSpec, PrestoEngineSpec]) diff --git a/tests/integration_tests/tags/api_tests.py b/tests/integration_tests/tags/api_tests.py index ba4ce8aba6c7..33a4b7e6d2ad 100644 --- a/tests/integration_tests/tags/api_tests.py +++ b/tests/integration_tests/tags/api_tests.py @@ -861,9 +861,9 @@ def test_create_tag_mysql_compatibility(self) -> None: # Critical check: ensure the tag name is a plain string, not Markup assert isinstance(created_tag.name, str), "Tag name should be a plain string" - assert not isinstance(created_tag.name, Markup), ( - "Tag name should NOT be a Markup object" - ) + assert not isinstance( + created_tag.name, Markup + ), "Tag name should NOT be a Markup object" assert created_tag.name.__class__ is str, "Tag name should be exactly str type" assert created_tag.name == tag_name, "Tag name should match the input" diff --git a/tests/integration_tests/tags/mysql_compatibility_test.py b/tests/integration_tests/tags/mysql_compatibility_test.py index 6cb457ae368f..1045add6f729 100644 --- a/tests/integration_tests/tags/mysql_compatibility_test.py +++ b/tests/integration_tests/tags/mysql_compatibility_test.py @@ -70,9 +70,9 @@ def test_create_tag_returns_string_not_markup(self) -> None: # Critical checks for MySQL compatibility assert isinstance(tag.name, str), "Tag name should be a plain string" - assert not isinstance(tag.name, Markup), ( - "Tag name should NOT be a Markup object" - ) + assert not isinstance( + tag.name, Markup + ), "Tag name should NOT be a Markup object" assert tag.name.__class__ is str, "Tag name should be exactly str type" assert tag.name == tag_name, f"Tag name should be '{tag_name}'" @@ -82,15 +82,15 @@ def test_create_tag_returns_string_not_markup(self) -> None: # Retrieve the tag from database to ensure it was stored correctly retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() assert retrieved_tag is not None, "Tag should be retrievable from database" - assert isinstance(retrieved_tag.name, str), ( - "Retrieved tag name should be a string" - ) - assert not isinstance(retrieved_tag.name, Markup), ( - "Retrieved tag name should NOT be Markup" - ) - assert retrieved_tag.name == tag_name, ( - "Retrieved tag name should match original" - ) + assert isinstance( + retrieved_tag.name, str + ), "Retrieved tag name should be a string" + assert not isinstance( + retrieved_tag.name, Markup + ), "Retrieved tag name should NOT be Markup" + assert ( + retrieved_tag.name == tag_name + ), "Retrieved tag name should match original" def test_create_multiple_tags_no_sql_error(self) -> None: """ @@ -110,12 +110,12 @@ def test_create_multiple_tags_no_sql_error(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) created_tags.append(tag) - assert isinstance(tag.name, str), ( - f"Tag '{tag_name}' name should be a string" - ) - assert not isinstance(tag.name, Markup), ( - f"Tag '{tag_name}' should NOT be Markup" - ) + assert isinstance( + tag.name, str + ), f"Tag '{tag_name}' name should be a string" + assert not isinstance( + tag.name, Markup + ), f"Tag '{tag_name}' should NOT be Markup" except ProgrammingError as e: pytest.fail( f"ProgrammingError should not occur when creating tags: {e}", @@ -129,9 +129,9 @@ def test_create_multiple_tags_no_sql_error(self) -> None: for tag_name in tag_names: tag = db.session.query(Tag).filter_by(name=tag_name).first() assert tag is not None, f"Tag '{tag_name}' should exist in database" - assert isinstance(tag.name, str), ( - f"Tag '{tag_name}' should have string name" - ) + assert isinstance( + tag.name, str + ), f"Tag '{tag_name}' should have string name" def test_create_tag_with_special_characters(self) -> None: """ @@ -152,23 +152,23 @@ def test_create_tag_with_special_characters(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) assert isinstance(tag.name, str), f"Tag '{tag_name}' should be a string" - assert not isinstance(tag.name, Markup), ( - f"Tag '{tag_name}' should NOT be Markup" - ) - assert tag.name == tag_name, ( - f"Tag name should match input: '{tag_name}'" - ) + assert not isinstance( + tag.name, Markup + ), f"Tag '{tag_name}' should NOT be Markup" + assert ( + tag.name == tag_name + ), f"Tag name should match input: '{tag_name}'" db.session.commit() # Verify database persistence retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() - assert retrieved_tag is not None, ( - f"Tag '{tag_name}' should be in database" - ) - assert retrieved_tag.name == tag_name, ( - f"Retrieved tag name should match: '{tag_name}'" - ) + assert ( + retrieved_tag is not None + ), f"Tag '{tag_name}' should be in database" + assert ( + retrieved_tag.name == tag_name + ), f"Retrieved tag name should match: '{tag_name}'" except ProgrammingError as e: pytest.fail( @@ -187,12 +187,12 @@ def test_get_existing_tag_returns_string(self) -> None: retrieved_tag = get_tag(tag_name, db.session, TagType.custom) assert retrieved_tag.id == original_tag.id, "Should retrieve the same tag" - assert isinstance(retrieved_tag.name, str), ( - "Retrieved tag name should be a string" - ) - assert not isinstance(retrieved_tag.name, Markup), ( - "Retrieved tag name should NOT be Markup" - ) + assert isinstance( + retrieved_tag.name, str + ), "Retrieved tag name should be a string" + assert not isinstance( + retrieved_tag.name, Markup + ), "Retrieved tag name should NOT be Markup" def test_tag_with_whitespace_handling(self) -> None: """Test that tags with leading/trailing whitespace are handled correctly.""" @@ -224,12 +224,12 @@ def test_tag_type_variations(self) -> None: try: tag = get_tag(tag_name, db.session, tag_type) - assert isinstance(tag.name, str), ( - f"Tag name for {tag_type} should be a string" - ) - assert not isinstance(tag.name, Markup), ( - f"Tag name for {tag_type} should NOT be Markup" - ) + assert isinstance( + tag.name, str + ), f"Tag name for {tag_type} should be a string" + assert not isinstance( + tag.name, Markup + ), f"Tag name for {tag_type} should NOT be Markup" assert tag.type == tag_type, f"Tag type should be {tag_type}" db.session.commit() @@ -254,9 +254,9 @@ def test_no_sql_syntax_error_on_commit(self) -> None: # Verify the tag exists in the database retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() assert retrieved_tag is not None, "Tag should exist after commit" - assert isinstance(retrieved_tag.name, str), ( - "Retrieved tag should have string name" - ) + assert isinstance( + retrieved_tag.name, str + ), "Retrieved tag should have string name" except ProgrammingError as e: pytest.fail(f"ProgrammingError should not occur during commit: {e}") @@ -274,12 +274,12 @@ def test_tag_name_is_pure_string_type(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) # Check that it's exactly a str, not a subclass - assert tag.name.__class__ is str, ( - f"Tag name type should be exactly 'str', got {type(tag.name)}" - ) - assert not isinstance(tag.name, Markup), ( - "Tag name should not be a Markup instance" - ) + assert ( + tag.name.__class__ is str + ), f"Tag name type should be exactly 'str', got {type(tag.name)}" + assert not isinstance( + tag.name, Markup + ), "Tag name should not be a Markup instance" # Markup is a subclass of str, so this additional check is important assert tag.name.__class__.__name__ == "str", "Tag name class should be 'str'" diff --git a/tests/unit_tests/commands/chart/warm_up_cache_test.py b/tests/unit_tests/commands/chart/warm_up_cache_test.py index 7eaefafc08a2..078bb5d81b1b 100644 --- a/tests/unit_tests/commands/chart/warm_up_cache_test.py +++ b/tests/unit_tests/commands/chart/warm_up_cache_test.py @@ -237,12 +237,12 @@ def test_handles_empty_dashboard_filters( ChartWarmUpCacheCommand(chart, 42, None).run() # VALIDATE: No filters added (empty list case) - assert len(mock_query.filter) == 0, ( - "No filters should be added when dashboard has no filters" - ) - assert mock_get_dashboard_filters.called, ( - "Should still call get_dashboard_extra_filters" - ) + assert ( + len(mock_query.filter) == 0 + ), "No filters should be added when dashboard has no filters" + assert ( + mock_get_dashboard_filters.called + ), "Should still call get_dashboard_extra_filters" @patch("superset.commands.chart.warm_up_cache.ChartDataCommand") @@ -305,12 +305,12 @@ def test_none_query_context_raises_chart_invalid_error(mock_chart_data_command): assert result["viz_error"] is not None, "Should return an error" assert result["chart_id"] == 129 error_str = str(result["viz_error"]).lower() - assert "query context" in error_str, ( - f"Error should mention query context: {result['viz_error']}" - ) - assert "not exist" in error_str, ( - f"Error should mention not exist: {result['viz_error']}" - ) + assert ( + "query context" in error_str + ), f"Error should mention query context: {result['viz_error']}" + assert ( + "not exist" in error_str + ), f"Error should mention not exist: {result['viz_error']}" @patch("superset.commands.chart.warm_up_cache.viz_types", ["table"]) @@ -336,12 +336,12 @@ def test_legacy_chart_without_datasource_raises_error(): assert result["viz_error"] is not None, "Should return an error" assert result["chart_id"] == 130 error_str = str(result["viz_error"]).lower() - assert "datasource" in error_str, ( - f"Error should mention datasource: {result['viz_error']}" - ) - assert "not exist" in error_str, ( - f"Error should mention not exist: {result['viz_error']}" - ) + assert ( + "datasource" in error_str + ), f"Error should mention datasource: {result['viz_error']}" + assert ( + "not exist" in error_str + ), f"Error should mention not exist: {result['viz_error']}" @patch("superset.commands.chart.warm_up_cache.get_dashboard_extra_filters") diff --git a/tests/unit_tests/common/test_query_context_processor.py b/tests/unit_tests/common/test_query_context_processor.py index c83d5b728bac..f79e521399b4 100644 --- a/tests/unit_tests/common/test_query_context_processor.py +++ b/tests/unit_tests/common/test_query_context_processor.py @@ -912,9 +912,9 @@ def mock_validate(*args, **kwargs): def mock_cache_key(*args, **kwargs): call_order.append("cache_key") # Verify that extras have been sanitized at this point - assert query_obj.extras["where"] == "(col1 > 0)", ( - f"Expected sanitized clause in cache_key, got: {query_obj.extras['where']}" - ) + assert ( + query_obj.extras["where"] == "(col1 > 0)" + ), f"Expected sanitized clause in cache_key, got: {query_obj.extras['where']}" return original_cache_key(*args, **kwargs) with patch.object(query_obj, "validate", side_effect=mock_validate): @@ -1172,9 +1172,9 @@ def test_cache_key_preserves_other_post_processing_options(): ) # Cache keys should differ because rename_columns is different - assert query1.cache_key() != query2.cache_key(), ( - "Cache keys should differ when other post_processing options differ" - ) + assert ( + query1.cache_key() != query2.cache_key() + ), "Cache keys should differ when other post_processing options differ" def test_cache_key_non_contribution_post_processing_unchanged(): @@ -1214,9 +1214,9 @@ def test_cache_key_non_contribution_post_processing_unchanged(): ) # Cache keys should differ because aggregates option is different - assert query1.cache_key() != query2.cache_key(), ( - "Cache keys should differ for different non-contribution post_processing" - ) + assert ( + query1.cache_key() != query2.cache_key() + ), "Cache keys should differ for different non-contribution post_processing" def test_force_cached_normalizes_totals_query_row_limit(): diff --git a/tests/unit_tests/connectors/sqla/models_test.py b/tests/unit_tests/connectors/sqla/models_test.py index 437ddf9151ba..045560bab23c 100644 --- a/tests/unit_tests/connectors/sqla/models_test.py +++ b/tests/unit_tests/connectors/sqla/models_test.py @@ -187,10 +187,13 @@ def test_query_datasources_by_permissions_with_catalog_schema( ["[my_db].[db1].[schema1]", "[my_other_db].[schema]"], # type: ignore ) clause = db.session.query().filter_by().filter.mock_calls[0].args[0] - assert str(clause.compile(engine, compile_kwargs={"literal_binds": True})) == ( - "tables.perm IN ('[my_db].[table1](id:1)') OR " - "tables.schema_perm IN ('[my_db].[db1].[schema1]', '[my_other_db].[schema]') OR " # noqa: E501 - "tables.catalog_perm IN ('[my_db].[db1]')" + assert ( + str(clause.compile(engine, compile_kwargs={"literal_binds": True})) + == ( + "tables.perm IN ('[my_db].[table1](id:1)') OR " + "tables.schema_perm IN ('[my_db].[db1].[schema1]', '[my_other_db].[schema]') OR " # noqa: E501 + "tables.catalog_perm IN ('[my_db].[db1]')" + ) ) @@ -763,9 +766,9 @@ def test_get_sqla_table_quoting_for_cross_catalog( # The compiled SQL should contain each part quoted separately assert expected_in_sql in compiled, f"Expected {expected_in_sql} in SQL: {compiled}" # Should NOT have the entire identifier quoted as one string - assert not_expected_in_sql not in compiled, ( - f"Should not have {not_expected_in_sql} in SQL: {compiled}" - ) + assert ( + not_expected_in_sql not in compiled + ), f"Should not have {not_expected_in_sql} in SQL: {compiled}" def test_get_sqla_table_without_cross_catalog_ignores_catalog( diff --git a/tests/unit_tests/dao/base_dao_test.py b/tests/unit_tests/dao/base_dao_test.py index eb7d20adfd0a..d7835d484067 100644 --- a/tests/unit_tests/dao/base_dao_test.py +++ b/tests/unit_tests/dao/base_dao_test.py @@ -143,9 +143,9 @@ class TestModel(Base_test): # type: ignore } # Ensure we've tested all operators - assert tested_operators == all_operators, ( - f"Missing operators: {all_operators - tested_operators}" - ) + assert ( + tested_operators == all_operators + ), f"Missing operators: {all_operators - tested_operators}" def test_find_by_ids_sqlalchemy_error_with_model_cls(): diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 0785d762e50e..3f7810df41f5 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -255,7 +255,7 @@ def test_database_connection( "service_account_info": { "type": "service_account", "project_id": "black-sanctum-314419", - "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", # noqa: E501 "private_key": "XXXXXXXXXX", "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 "client_id": "114567578578109757129", diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py index 148064d8a6fb..ade17b6bc129 100644 --- a/tests/unit_tests/db_engine_specs/test_gsheets.py +++ b/tests/unit_tests/db_engine_specs/test_gsheets.py @@ -160,58 +160,61 @@ def test_validate_parameters_catalog( } errors = GSheetsEngineSpec.validate_parameters(properties) # ignore: type - assert errors == [ - SupersetError( - message=( - "The URL could not be identified. Please check for typos " - "and make sure that ‘Type of Google Sheets allowed’ " - "selection matches the input." - ), - error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, - level=ErrorLevel.WARNING, - extra={ - "catalog": { - "idx": 0, - "url": True, - }, - "issue_codes": [ - { - "code": 1003, - "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 - }, - { - "code": 1005, - "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + assert ( + errors + == [ + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 0, + "url": True, }, - ], - }, - ), - SupersetError( - message=( - "The URL could not be identified. Please check for typos " - "and make sure that ‘Type of Google Sheets allowed’ " - "selection matches the input." - ), - error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, - level=ErrorLevel.WARNING, - extra={ - "catalog": { - "idx": 2, - "url": True, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + }, + ], }, - "issue_codes": [ - { - "code": 1003, - "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 - }, - { - "code": 1005, - "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + ), + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." + ), + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 2, + "url": True, }, - ], - }, - ), - ] + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + }, + ], + }, + ), + ] + ) create_engine.assert_called_with( "gsheets://", diff --git a/tests/unit_tests/db_engine_specs/test_mssql.py b/tests/unit_tests/db_engine_specs/test_mssql.py index e0ce5e1180c8..e1c9ab28c060 100644 --- a/tests/unit_tests/db_engine_specs/test_mssql.py +++ b/tests/unit_tests/db_engine_specs/test_mssql.py @@ -394,28 +394,31 @@ def test_extract_errors() -> None: result = MssqlEngineSpec.extract_errors( Exception(msg), context={"username": "testuser", "database": "testdb"} ) - assert result == [ - SupersetError( - message='Either the username "testuser", password, or database name "testdb" is incorrect.', # noqa: E501 - error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, - level=ErrorLevel.ERROR, - extra={ - "engine_name": "Microsoft SQL Server", - "issue_codes": [ - { - "code": 1014, - "message": "Issue 1014 - Either the username or " - "the password is wrong.", - }, - { - "code": 1015, - "message": "Issue 1015 - Either the database is " - "spelled incorrectly or does not exist.", - }, - ], - }, - ) - ] + assert ( + result + == [ + SupersetError( + message='Either the username "testuser", password, or database name "testdb" is incorrect.', # noqa: E501 + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the username or " + "the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] + ) @pytest.mark.parametrize( diff --git a/tests/unit_tests/db_engine_specs/test_ocient.py b/tests/unit_tests/db_engine_specs/test_ocient.py index 82c7c206bfcb..445a118b7be5 100644 --- a/tests/unit_tests/db_engine_specs/test_ocient.py +++ b/tests/unit_tests/db_engine_specs/test_ocient.py @@ -223,9 +223,9 @@ def test_connection_errors(msg: str, expected: SupersetError) -> None: assert result == [expected] -def _generate_gis_type_sanitization_test_cases() -> list[ - tuple[str, int, Any, dict[str, Any]] -]: +def _generate_gis_type_sanitization_test_cases() -> ( + list[tuple[str, int, Any, dict[str, Any]]] +): if not ocient_is_installed(): return [] diff --git a/tests/unit_tests/models/helpers_test.py b/tests/unit_tests/models/helpers_test.py index e913526975fb..019bf7d46a3a 100644 --- a/tests/unit_tests/models/helpers_test.py +++ b/tests/unit_tests/models/helpers_test.py @@ -477,9 +477,9 @@ def condition_factory(col_name: str, expr): has_single_quotes = "'Others'" in select_sql and "'Others'" in groupby_sql has_double_quotes = '"Others"' in select_sql and '"Others"' in groupby_sql - assert has_single_quotes or has_double_quotes, ( - "Others literal should be quoted with either single or double quotes" - ) + assert ( + has_single_quotes or has_double_quotes + ), "Others literal should be quoted with either single or double quotes" # Verify the structure of the generated SQL assert "CASE WHEN" in select_sql @@ -1121,13 +1121,13 @@ def test_process_select_expression_end_to_end(database: Database) -> None: # sqlglot may normalize the SQL slightly, so we check the result exists # and doesn't contain the SELECT prefix assert result is not None, f"Failed to process: {expression}" - assert not result.upper().startswith("SELECT"), ( - f"Result still has SELECT prefix: {result}" - ) + assert not result.upper().startswith( + "SELECT" + ), f"Result still has SELECT prefix: {result}" # The result should contain the core expression (case-insensitive check) - assert expected.replace(" ", "").lower() in result.replace(" ", "").lower(), ( - f"Expected '{expected}' to be in result '{result}' for input '{expression}'" - ) + assert ( + expected.replace(" ", "").lower() in result.replace(" ", "").lower() + ), f"Expected '{expected}' to be in result '{result}' for input '{expression}'" def test_reapply_query_filters_with_granularity(database: Database) -> None: @@ -1641,9 +1641,9 @@ def test_adhoc_column_with_spaces_generates_quoted_sql(database: Database) -> No ) ) - assert '"Order Total"' in sql_numeric, ( - f"Expected quoted column name in SQL: {sql_numeric}" - ) + assert ( + '"Order Total"' in sql_numeric + ), f"Expected quoted column name in SQL: {sql_numeric}" def test_adhoc_column_with_spaces_in_full_query(database: Database) -> None: diff --git a/tests/unit_tests/sql/dialects/pinot_tests.py b/tests/unit_tests/sql/dialects/pinot_tests.py index 57f08381875e..6a3dab751a02 100644 --- a/tests/unit_tests/sql/dialects/pinot_tests.py +++ b/tests/unit_tests/sql/dialects/pinot_tests.py @@ -876,6 +876,6 @@ def test_pinot_function_names_preserved(function_name: str, sample_args: str) -> generated = parsed.sql(dialect=Pinot) # The function name should be preserved (case-insensitive check) - assert function_name.upper() in generated.upper(), ( - f"Function {function_name} not preserved in output: {generated}" - ) + assert ( + function_name.upper() in generated.upper() + ), f"Function {function_name} not preserved in output: {generated}" diff --git a/tests/unit_tests/tags/models_test.py b/tests/unit_tests/tags/models_test.py index 93054f4dac44..325e3636e6fc 100644 --- a/tests/unit_tests/tags/models_test.py +++ b/tests/unit_tests/tags/models_test.py @@ -74,12 +74,12 @@ def test_get_tag_with_special_characters() -> None: for tag_name in tag_names: result = get_tag(tag_name, mock_session, TagType.custom) - assert isinstance(result.name, str), ( - f"Tag name '{tag_name}' should be a plain string" - ) - assert not isinstance(result.name, Markup), ( - f"Tag name '{tag_name}' should NOT be a Markup object" - ) + assert isinstance( + result.name, str + ), f"Tag name '{tag_name}' should be a plain string" + assert not isinstance( + result.name, Markup + ), f"Tag name '{tag_name}' should NOT be a Markup object" assert result.name == tag_name, f"Tag name should match input: '{tag_name}'" @@ -106,12 +106,12 @@ def test_get_tag_with_html_characters() -> None: for tag_name in tag_names: result = get_tag(tag_name, mock_session, TagType.custom) - assert isinstance(result.name, str), ( - f"Tag name '{tag_name}' should be a plain string" - ) - assert not isinstance(result.name, Markup), ( - f"Tag name '{tag_name}' should NOT be a Markup object" - ) + assert isinstance( + result.name, str + ), f"Tag name '{tag_name}' should be a plain string" + assert not isinstance( + result.name, Markup + ), f"Tag name '{tag_name}' should NOT be a Markup object" # Name should remain unchanged (not HTML-escaped) assert result.name == tag_name, f"Tag name should not be escaped: '{tag_name}'" @@ -135,12 +135,12 @@ def test_get_tag_strips_whitespace() -> None: result = get_tag(input_name, mock_session, TagType.custom) assert isinstance(result.name, str), "Tag name should be a plain string" - assert not isinstance(result.name, Markup), ( - "Tag name should NOT be a Markup object" - ) - assert result.name == expected_name, ( - f"Tag name should be stripped: '{expected_name}'" - ) + assert not isinstance( + result.name, Markup + ), "Tag name should NOT be a Markup object" + assert ( + result.name == expected_name + ), f"Tag name should be stripped: '{expected_name}'" def test_get_tag_returns_existing_tag() -> None: @@ -190,9 +190,9 @@ def test_get_tag_creates_new_tag() -> None: added_tag = mock_session.add.call_args[0][0] assert isinstance(added_tag, Tag), "Should add a Tag object" assert isinstance(added_tag.name, str), "Tag name should be a plain string" - assert not isinstance(added_tag.name, Markup), ( - "Tag name should NOT be a Markup object" - ) + assert not isinstance( + added_tag.name, Markup + ), "Tag name should NOT be a Markup object" assert added_tag.name == tag_name, "Tag name should match" assert added_tag.type == tag_type, "Tag type should match" @@ -215,12 +215,12 @@ def test_get_tag_with_different_tag_types() -> None: tag_name = f"tag-{tag_type.name}" result = get_tag(tag_name, mock_session, tag_type) - assert isinstance(result.name, str), ( - f"Tag name for {tag_type} should be a plain string" - ) - assert not isinstance(result.name, Markup), ( - f"Tag name for {tag_type} should NOT be a Markup object" - ) + assert isinstance( + result.name, str + ), f"Tag name for {tag_type} should be a plain string" + assert not isinstance( + result.name, Markup + ), f"Tag name for {tag_type} should NOT be a Markup object" assert result.type == tag_type, f"Tag type should be {tag_type}" @@ -240,24 +240,24 @@ def test_tag_name_type_after_database_operation() -> None: result = get_tag(tag_name, mock_session, TagType.custom) # Verify the tag object before database operations - assert isinstance(result.name, str), ( - "Tag name should be a string before DB operations" - ) - assert not isinstance(result.name, Markup), ( - "Tag name should NOT be Markup before DB operations" - ) + assert isinstance( + result.name, str + ), "Tag name should be a string before DB operations" + assert not isinstance( + result.name, Markup + ), "Tag name should NOT be Markup before DB operations" # Verify that session.add was called with the correct tag mock_session.add.assert_called_once() added_tag = mock_session.add.call_args[0][0] # The critical check: ensure the name passed to the database is a plain string - assert isinstance(added_tag.name, str), ( - "Tag name should be a plain string when added to session" - ) - assert not isinstance(added_tag.name, Markup), ( - "Tag name should NOT be Markup when added to session" - ) - assert added_tag.name.__class__ is str, ( - "Tag name should be exactly str type, not a subclass" - ) + assert isinstance( + added_tag.name, str + ), "Tag name should be a plain string when added to session" + assert not isinstance( + added_tag.name, Markup + ), "Tag name should NOT be Markup when added to session" + assert ( + added_tag.name.__class__ is str + ), "Tag name should be exactly str type, not a subclass" diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py index 37b8ed1877a1..b9478e79c782 100644 --- a/tests/unit_tests/utils/test_core.py +++ b/tests/unit_tests/utils/test_core.py @@ -637,9 +637,9 @@ def test_get_user_agent(mocker: MockerFixture, app_context: None) -> None: database_mock = mocker.MagicMock() database_mock.database_name = "mydb" - assert get_user_agent(database_mock, QuerySource.DASHBOARD) == "Apache Superset", ( - "The default user agent should be returned" - ) + assert ( + get_user_agent(database_mock, QuerySource.DASHBOARD) == "Apache Superset" + ), "The default user agent should be returned" @with_config( @@ -652,9 +652,9 @@ def test_get_user_agent_custom(mocker: MockerFixture, app_context: None) -> None database_mock = mocker.MagicMock() database_mock.database_name = "mydb" - assert get_user_agent(database_mock, QuerySource.DASHBOARD) == "mydb DASHBOARD", ( - "the custom user agent function result should have been returned" - ) + assert ( + get_user_agent(database_mock, QuerySource.DASHBOARD) == "mydb DASHBOARD" + ), "the custom user agent function result should have been returned" def test_merge_extra_filters(): diff --git a/tests/unit_tests/utils/test_screenshot_cache_fix.py b/tests/unit_tests/utils/test_screenshot_cache_fix.py index 8d0946abdf9e..ce077480c532 100644 --- a/tests/unit_tests/utils/test_screenshot_cache_fix.py +++ b/tests/unit_tests/utils/test_screenshot_cache_fix.py @@ -163,9 +163,9 @@ def check_cache_during_screenshot(*args, **kwargs): cache_key = screenshot_obj.get_cache_key() cached_value = BaseScreenshot.cache.get(cache_key) # Cache should be empty during screenshot generation - assert cached_value is None, ( - "Cache should not be saved during COMPUTING state" - ) + assert ( + cached_value is None + ), "Cache should not be saved during COMPUTING state" return b"image_data" mocker.patch( From bd9478745801e8955800700c700521d7afb0591d Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 2 Dec 2025 12:09:21 -0500 Subject: [PATCH 11/18] fix: solve issues proposed by copilot --- .../src/DataTable/DataTable.tsx | 48 ++++++++++++++++--- .../useExploreAdditionalActionsMenu/index.jsx | 22 +++++++++ 2 files changed, 64 insertions(+), 6 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index bf0e380dc99d..0f444140572c 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -466,16 +466,49 @@ export default typedMemo(function DataTable({ const rafRef = useRef(null); const lastSigRef = useRef(''); + // Prefer a stable identifier from original row data; otherwise use a deterministic + // concatenation of visible values (keys sorted so column order changes are detected). + function stableRowKey(r: Row): string { + const orig = r.original as Record | undefined; + if (orig) { + const idLike = + (orig as any).id ?? + (orig as any).ID ?? + (orig as any).key ?? + (orig as any).uuid; + if (idLike != null) return String(idLike); + } + + // Fallback: derive from row.values, but make it stable against column order changes. + const v = r.values as Record; + const keys = Object.keys(v).sort(); // detect column order changes + return keys.map(k => String(v[k] ?? '')).join('|'); + } + + // Very small, fast hash for strings (no crypto dependency). + function hashString(s: string): string { + let h = 0; + for (let i = 0; i < s.length; i+=1) { + h = (h * 31 + s.charCodeAt(i)) | 0; + } + return String(h); + } + + function signatureOfRows(rs: Row[]): string { + const keys = rs.map(stableRowKey); + const len = keys.length; + const first = keys[0] ?? ''; + const last = keys[len - 1] ?? ''; + const digest = hashString(keys.join('\u0001')); // non-printable separator to avoid collisions + return `${len}|${first}|${last}|${digest}`; + } + useEffect(() => { if (serverPagination || typeof onFilteredRowsChange !== 'function') { return; } - const filtered = rows.map(r => r.original as D); - const len = filtered.length; - const first = len ? Object.values(filtered[0] as any)[0] : ''; - const last = len ? Object.values(filtered[len - 1] as any)[0] : ''; - const sig = `${len}|${String(first)}|${String(last)}`; + const sig = signatureOfRows(rows); if (sig !== lastSigRef.current) { lastSigRef.current = sig; @@ -483,7 +516,10 @@ export default typedMemo(function DataTable({ cancelAnimationFrame(rafRef.current); } rafRef.current = requestAnimationFrame(() => { - if (isMountedRef.current) onFilteredRowsChange(filtered); + if (isMountedRef.current) { + // Only emit originals when the signature truly changed + onFilteredRowsChange(rows.map(r => r.original as D)); + } }); } diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index 8a2c662c9a7d..f176aa9c5ae2 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -77,6 +77,7 @@ const MENU_KEYS = { EXPORT_ALL_DATA_GROUP: 'export_all_data_group', EXPORT_CURRENT_VIEW_GROUP: 'export_current_view_group', EXPORT_CURRENT_TO_CSV: 'export_current_to_csv', + EXPORT_ALL_SCREENSHOT: 'export_all_screenshot', }; const VIZ_TYPES_PIVOTABLE = [VizType.PivotTable]; @@ -717,6 +718,27 @@ export const useExploreAdditionalActionsMenu = ( ); }, }, + { + key: MENU_KEYS.EXPORT_ALL_SCREENSHOT, + label: t('Export screenshot (jpeg)'), + icon: , + onClick: e => { + // Visual export of the currently visible chart + downloadAsImage( + '.panel-body .chart-container', + slice?.slice_name ?? t('New chart'), + true, + theme, + )(e.domEvent); + setIsDropdownVisible(false); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_IMAGE, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); + }, + }, { key: 'export_full_xlsx', label: t('Export to Excel'), From c971ebfa35a99e9e7bd8fc046bea4a69ec4faaa1 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 2 Dec 2025 14:03:16 -0500 Subject: [PATCH 12/18] fix: update style using pre-commit --- .../tests/test_cli_bundle.py | 6 +- .../tests/test_cli_init.py | 24 +- .../tests/test_templates.py | 6 +- superset-extensions-cli/tests/utils.py | 12 +- .../src/DataTable/DataTable.tsx | 2 +- tests/integration_tests/celery_tests.py | 12 +- tests/integration_tests/dao/base_dao_test.py | 6 +- .../integration_tests/dashboards/api_tests.py | 48 +- .../integration_tests/databases/api_tests.py | 485 +++++++++--------- .../security/row_level_security_tests.py | 24 +- .../sql_lab/commands_tests.py | 13 +- tests/integration_tests/sqla_models_tests.py | 6 +- tests/integration_tests/sqllab_tests.py | 51 +- tests/integration_tests/tags/api_tests.py | 6 +- .../tags/mysql_compatibility_test.py | 108 ++-- .../commands/chart/warm_up_cache_test.py | 36 +- .../common/test_query_context_processor.py | 18 +- .../unit_tests/connectors/sqla/models_test.py | 17 +- tests/unit_tests/dao/base_dao_test.py | 6 +- tests/unit_tests/databases/api_test.py | 152 +++--- .../db_engine_specs/test_gsheets.py | 101 ++-- .../unit_tests/db_engine_specs/test_mssql.py | 47 +- .../unit_tests/db_engine_specs/test_ocient.py | 6 +- tests/unit_tests/models/helpers_test.py | 24 +- tests/unit_tests/sql/dialects/pinot_tests.py | 6 +- tests/unit_tests/tags/models_test.py | 84 +-- tests/unit_tests/utils/test_core.py | 12 +- .../utils/test_screenshot_cache_fix.py | 6 +- 28 files changed, 652 insertions(+), 672 deletions(-) diff --git a/superset-extensions-cli/tests/test_cli_bundle.py b/superset-extensions-cli/tests/test_cli_bundle.py index eaa006c19bfc..a9c4ffd62555 100644 --- a/superset-extensions-cli/tests/test_cli_bundle.py +++ b/superset-extensions-cli/tests/test_cli_bundle.py @@ -208,9 +208,9 @@ def test_bundle_includes_all_files_recursively( "backend/src/complex_extension/utils/helpers.py", } - assert expected_files.issubset( - file_list - ), f"Missing files: {expected_files - file_list}" + assert expected_files.issubset(file_list), ( + f"Missing files: {expected_files - file_list}" + ) @pytest.mark.cli diff --git a/superset-extensions-cli/tests/test_cli_init.py b/superset-extensions-cli/tests/test_cli_init.py index de58ed76fe34..063f9e229239 100644 --- a/superset-extensions-cli/tests/test_cli_init.py +++ b/superset-extensions-cli/tests/test_cli_init.py @@ -82,9 +82,9 @@ def test_init_creates_extension_with_frontend_only( # Should NOT have backend directory backend_path = extension_path / "backend" - assert ( - not backend_path.exists() - ), "Backend directory should not exist for frontend-only extension" + assert not backend_path.exists(), ( + "Backend directory should not exist for frontend-only extension" + ) @pytest.mark.cli @@ -105,9 +105,9 @@ def test_init_creates_extension_with_backend_only( # Should NOT have frontend directory frontend_path = extension_path / "frontend" - assert ( - not frontend_path.exists() - ), "Frontend directory should not exist for backend-only extension" + assert not frontend_path.exists(), ( + "Frontend directory should not exist for backend-only extension" + ) @pytest.mark.cli @@ -148,9 +148,9 @@ def test_init_validates_extension_name( cli_input = f"{invalid_name}\n0.1.0\nApache-2.0\ny\ny\n" result = cli_runner.invoke(app, ["init"], input=cli_input) - assert ( - result.exit_code == 1 - ), f"Expected command to fail for invalid name '{invalid_name}'" + assert result.exit_code == 1, ( + f"Expected command to fail for invalid name '{invalid_name}'" + ) assert expected_error in result.output @@ -174,9 +174,9 @@ def test_init_with_valid_alphanumeric_names(cli_runner, valid_id): cli_input = f"{valid_id}\nTest Extension\n0.1.0\nApache-2.0\ny\ny\n" result = cli_runner.invoke(app, ["init"], input=cli_input) - assert ( - result.exit_code == 0 - ), f"Valid name '{valid_id}' was rejected: {result.output}" + assert result.exit_code == 0, ( + f"Valid name '{valid_id}' was rejected: {result.output}" + ) assert Path(valid_id).exists(), f"Directory for '{valid_id}' was not created" diff --git a/superset-extensions-cli/tests/test_templates.py b/superset-extensions-cli/tests/test_templates.py index decc10bf7bdd..b8cabc9ddd92 100644 --- a/superset-extensions-cli/tests/test_templates.py +++ b/superset-extensions-cli/tests/test_templates.py @@ -297,9 +297,9 @@ def test_template_whitespace_handling(jinja_env, template_context): empty_line_count = sum(1 for line in lines if line.strip() == "") # Some empty lines are OK for formatting, but not excessive - assert ( - empty_line_count < len(lines) / 2 - ), "Too many empty lines in rendered template" + assert empty_line_count < len(lines) / 2, ( + "Too many empty lines in rendered template" + ) # Should be properly formatted JSON parsed = json.loads(rendered) diff --git a/superset-extensions-cli/tests/utils.py b/superset-extensions-cli/tests/utils.py index 1bd1ec23a268..77513bcb8559 100644 --- a/superset-extensions-cli/tests/utils.py +++ b/superset-extensions-cli/tests/utils.py @@ -44,9 +44,9 @@ def assert_directory_exists(path: Path, description: str = "") -> None: description: Optional description for better error messages """ desc_msg = f" ({description})" if description else "" - assert ( - path.exists() - ), f"Expected directory {path}{desc_msg} to exist, but it doesn't" + assert path.exists(), ( + f"Expected directory {path}{desc_msg} to exist, but it doesn't" + ) assert path.is_dir(), f"Expected {path}{desc_msg} to be a directory, but it's not" @@ -136,9 +136,9 @@ def assert_json_content(path: Path, expected_values: dict[str, Any]) -> None: for key, expected_value in expected_values.items(): assert key in content, f"Expected key '{key}' not found in {path}" actual_value = content[key] - assert ( - actual_value == expected_value - ), f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}" + assert actual_value == expected_value, ( + f"Expected {key}='{expected_value}' but got '{actual_value}' in {path}" + ) def assert_file_contains(path: Path, text: str) -> None: diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index 0f444140572c..553a26085993 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -488,7 +488,7 @@ export default typedMemo(function DataTable({ // Very small, fast hash for strings (no crypto dependency). function hashString(s: string): string { let h = 0; - for (let i = 0; i < s.length; i+=1) { + for (let i = 0; i < s.length; i += 1) { h = (h * 31 + s.charCodeAt(i)) | 0; } return String(h); diff --git a/tests/integration_tests/celery_tests.py b/tests/integration_tests/celery_tests.py index 8a7ee09032a4..8fc5fe293e02 100644 --- a/tests/integration_tests/celery_tests.py +++ b/tests/integration_tests/celery_tests.py @@ -564,15 +564,15 @@ def my_task(self): # Expect True within an app context with app.app_context(): result = my_task.apply().get() - assert ( - result is True - ), "Task should have access to current_app within app context" + assert result is True, ( + "Task should have access to current_app within app context" + ) # Expect True outside of an app context result = my_task.apply().get() - assert ( - result is True - ), "Task should have access to current_app outside of app context" + assert result is True, ( + "Task should have access to current_app outside of app context" + ) def delete_tmp_view_or_table(name: str, ctas_method: CTASMethod): diff --git a/tests/integration_tests/dao/base_dao_test.py b/tests/integration_tests/dao/base_dao_test.py index 717edde37a6d..b014c3bdaee8 100644 --- a/tests/integration_tests/dao/base_dao_test.py +++ b/tests/integration_tests/dao/base_dao_test.py @@ -1425,9 +1425,9 @@ def test_base_dao_list_count_accuracy_with_filters_and_relationships( assert count == 6, f"Expected 6 dashboards, but count was {count}" # Should return only 3 due to page_size - assert ( - len(results) == 3 - ), f"Expected 3 results due to pagination, got {len(results)}" + assert len(results) == 3, ( + f"Expected 3 results due to pagination, got {len(results)}" + ) # Each should have 3 owners as we set up for dashboard in results: diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 4011880ef6c1..0e563bfc8b65 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -3460,30 +3460,30 @@ def test_dashboard_custom_tags_relationship_filters_correctly(self): all_tags = dashboard.tags all_tag_names = [t.name for t in all_tags] assert "critical" in all_tag_names, "Should include custom tag" - assert any( - t.name.startswith("owner:") for t in all_tags - ), "Should include owner tags" - assert any( - t.name.startswith("type:") for t in all_tags - ), "Should include type tags" + assert any(t.name.startswith("owner:") for t in all_tags), ( + "Should include owner tags" + ) + assert any(t.name.startswith("type:") for t in all_tags), ( + "Should include type tags" + ) # 2. MODEL: dashboard.custom_tags returns ONLY custom tags custom_only = dashboard.custom_tags custom_tag_names = [t.name for t in custom_only] assert "critical" in custom_tag_names, "Should include custom tag" - assert not any( - t.name.startswith("owner:") for t in custom_only - ), f"custom_tags should NOT include owner tags, got: {custom_tag_names}" - assert not any( - t.name.startswith("type:") for t in custom_only - ), f"custom_tags should NOT include type tags, got: {custom_tag_names}" + assert not any(t.name.startswith("owner:") for t in custom_only), ( + f"custom_tags should NOT include owner tags, got: {custom_tag_names}" + ) + assert not any(t.name.startswith("type:") for t in custom_only), ( + f"custom_tags should NOT include type tags, got: {custom_tag_names}" + ) assert len(custom_only) < len(all_tags), "Should filter out implicit tags" # Verify all tags in custom_tags have type=custom for tag in custom_only: - assert ( - tag.type == TagType.custom - ), f"Tag {tag.name} has type {tag.type}, expected TagType.custom" + assert tag.type == TagType.custom, ( + f"Tag {tag.name} has type {tag.type}, expected TagType.custom" + ) # 3. API: With config=True, API returns ONLY custom tags rv = self.client.get("api/v1/dashboard/") @@ -3495,19 +3495,19 @@ def test_dashboard_custom_tags_relationship_filters_correctly(self): ) assert test_dash is not None # API returns "tags" (get_list override renames custom_tags→tags) - assert ( - "tags" in test_dash - ), f"Response should have tags, got: {test_dash.keys()}" + assert "tags" in test_dash, ( + f"Response should have tags, got: {test_dash.keys()}" + ) # API should return ONLY custom tags api_tag_names = [t["name"] for t in test_dash["tags"]] assert "critical" in api_tag_names, "API should include custom tag" - assert not any( - t["name"].startswith("owner:") for t in test_dash["tags"] - ), f"API should NOT include owner tags, got: {api_tag_names}" - assert not any( - t["name"].startswith("type:") for t in test_dash["tags"] - ), f"API should NOT include type tags, got: {api_tag_names}" + assert not any(t["name"].startswith("owner:") for t in test_dash["tags"]), ( + f"API should NOT include owner tags, got: {api_tag_names}" + ) + assert not any(t["name"].startswith("type:") for t in test_dash["tags"]), ( + f"API should NOT include type tags, got: {api_tag_names}" + ) assert len(test_dash["tags"]) == 1, ( f"API should return only 1 custom tag, " f"got {len(test_dash['tags'])}: {api_tag_names}" diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index a80dca189045..cfad96158936 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -3278,274 +3278,271 @@ def test_available(self, get_available_engine_specs): rv = self.client.get(uri) response = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 - assert ( - response - == { - "databases": [ - { - "available_drivers": ["psycopg2"], - "default_driver": "psycopg2", - "engine": "postgresql", - "name": "PostgreSQL", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", - }, + assert response == { + "databases": [ + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "postgresql", + "name": "PostgreSQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": True, - "sqlalchemy_uri_placeholder": "postgresql://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": True, - "disable_ssh_tunneling": False, - "supports_oauth2": False, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "postgresql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": True, + "disable_ssh_tunneling": False, "supports_oauth2": False, }, - { - "available_drivers": ["bigquery"], - "default_driver": "bigquery", - "engine": "bigquery", - "name": "Google BigQuery", - "parameters": { - "properties": { - "credentials_info": { - "description": ( - "Contents of BigQuery JSON " "credentials." - ), - "type": "string", - "x-encrypted-extra": True, - }, - "query": {"type": "object"}, + "supports_oauth2": False, + }, + { + "available_drivers": ["bigquery"], + "default_driver": "bigquery", + "engine": "bigquery", + "name": "Google BigQuery", + "parameters": { + "properties": { + "credentials_info": { + "description": ( + "Contents of BigQuery JSON credentials." + ), + "type": "string", + "x-encrypted-extra": True, }, - "type": "object", - }, - "preferred": True, - "sqlalchemy_uri_placeholder": "bigquery://{project_id}", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": True, - "disable_ssh_tunneling": True, - "supports_oauth2": False, + "query": {"type": "object"}, }, + "type": "object", + }, + "preferred": True, + "sqlalchemy_uri_placeholder": "bigquery://{project_id}", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": True, + "disable_ssh_tunneling": True, "supports_oauth2": False, }, - { - "available_drivers": ["psycopg2"], - "default_driver": "psycopg2", - "engine": "redshift", - "name": "Amazon Redshift", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", - }, + "supports_oauth2": False, + }, + { + "available_drivers": ["psycopg2"], + "default_driver": "psycopg2", + "engine": "redshift", + "name": "Amazon Redshift", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "redshift+psycopg2://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, - "supports_oauth2": False, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "redshift+psycopg2://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, "supports_oauth2": False, }, - { - "available_drivers": ["apsw"], - "default_driver": "apsw", - "engine": "gsheets", - "name": "Google Sheets", - "parameters": { - "properties": { - "catalog": {"type": "object"}, - "oauth2_client_info": { - "default": { - "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", - "scope": ( - "https://www.googleapis.com/auth/" - "drive.readonly " - "https://www.googleapis.com/auth/" - "spreadsheets " - "https://spreadsheets.google.com/feeds" - ), - "token_request_uri": "https://oauth2.googleapis.com/token", - }, - "description": "OAuth2 client information", - "nullable": True, - "type": "string", - "x-encrypted-extra": True, - }, - "service_account_info": { - "description": ( - "Contents of GSheets JSON " "credentials." + "supports_oauth2": False, + }, + { + "available_drivers": ["apsw"], + "default_driver": "apsw", + "engine": "gsheets", + "name": "Google Sheets", + "parameters": { + "properties": { + "catalog": {"type": "object"}, + "oauth2_client_info": { + "default": { + "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", + "scope": ( + "https://www.googleapis.com/auth/" + "drive.readonly " + "https://www.googleapis.com/auth/" + "spreadsheets " + "https://spreadsheets.google.com/feeds" ), - "type": "string", - "x-encrypted-extra": True, + "token_request_uri": "https://oauth2.googleapis.com/token", }, + "description": "OAuth2 client information", + "nullable": True, + "type": "string", + "x-encrypted-extra": True, + }, + "service_account_info": { + "description": ( + "Contents of GSheets JSON credentials." + ), + "type": "string", + "x-encrypted-extra": True, }, - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "gsheets://", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": True, - "supports_oauth2": True, }, + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "gsheets://", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": True, "supports_oauth2": True, }, - { - "available_drivers": ["mysqlconnector", "mysqldb"], - "default_driver": "mysqldb", - "engine": "mysql", - "name": "MySQL", - "parameters": { - "properties": { - "database": { - "description": "Database name", - "type": "string", - }, - "encryption": { - "description": "Use an encrypted connection to the database", # noqa: E501 - "type": "boolean", - }, - "host": { - "description": "Hostname or IP address", - "type": "string", - }, - "password": { - "description": "Password", - "nullable": True, - "type": "string", - }, - "port": { - "description": "Database port", - "maximum": 65536, - "minimum": 0, - "type": "integer", - }, - "query": { - "additionalProperties": {}, - "description": "Additional parameters", - "type": "object", - }, - "ssh": { - "description": "Use an ssh tunnel connection to the database", # noqa: E501 - "type": "boolean", - }, - "username": { - "description": "Username", - "nullable": True, - "type": "string", - }, + "supports_oauth2": True, + }, + { + "available_drivers": ["mysqlconnector", "mysqldb"], + "default_driver": "mysqldb", + "engine": "mysql", + "name": "MySQL", + "parameters": { + "properties": { + "database": { + "description": "Database name", + "type": "string", + }, + "encryption": { + "description": "Use an encrypted connection to the database", # noqa: E501 + "type": "boolean", + }, + "host": { + "description": "Hostname or IP address", + "type": "string", + }, + "password": { + "description": "Password", + "nullable": True, + "type": "string", + }, + "port": { + "description": "Database port", + "maximum": 65536, + "minimum": 0, + "type": "integer", + }, + "query": { + "additionalProperties": {}, + "description": "Additional parameters", + "type": "object", + }, + "ssh": { + "description": "Use an ssh tunnel connection to the database", # noqa: E501 + "type": "boolean", + }, + "username": { + "description": "Username", + "nullable": True, + "type": "string", }, - "required": ["database", "host", "port", "username"], - "type": "object", - }, - "preferred": False, - "sqlalchemy_uri_placeholder": "mysql://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, - "supports_oauth2": False, }, + "required": ["database", "host", "port", "username"], + "type": "object", + }, + "preferred": False, + "sqlalchemy_uri_placeholder": "mysql://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, "supports_oauth2": False, }, - { - "available_drivers": [""], - "engine": "hana", - "name": "SAP HANA", - "preferred": False, - "sqlalchemy_uri_placeholder": "engine+driver://user:password@host:port/dbname[?key=value&key=value...]", - "engine_information": { - "supports_file_upload": True, - "supports_dynamic_catalog": False, - "disable_ssh_tunneling": False, - "supports_oauth2": False, - }, + "supports_oauth2": False, + }, + { + "available_drivers": [""], + "engine": "hana", + "name": "SAP HANA", + "preferred": False, + "sqlalchemy_uri_placeholder": "engine+driver://user:password@host:port/dbname[?key=value&key=value...]", + "engine_information": { + "supports_file_upload": True, + "supports_dynamic_catalog": False, + "disable_ssh_tunneling": False, "supports_oauth2": False, }, - ] - } - ) + "supports_oauth2": False, + }, + ] + } @with_config({"PREFERRED_DATABASES": ["MySQL"]}) @mock.patch("superset.databases.api.get_available_engine_specs") diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index 6435bbb7f7f3..cab80b49476e 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -341,12 +341,12 @@ def test_rls_filter_applies_to_virtual_dataset(self): # Gamma user should have the name filters (A%, B%, Q%) and gender filter # Note: SQL uses uppercase LIKE and %% escaping sql_lower = sql.lower() - assert ( - "name like 'a%" in sql_lower or "name like 'q%" in sql_lower - ), f"RLS name filters not found in virtual dataset query: {sql}" - assert ( - "gender = 'boy'" in sql_lower - ), f"RLS gender filter not found in virtual dataset query: {sql}" + assert "name like 'a%" in sql_lower or "name like 'q%" in sql_lower, ( + f"RLS name filters not found in virtual dataset query: {sql}" + ) + assert "gender = 'boy'" in sql_lower, ( + f"RLS gender filter not found in virtual dataset query: {sql}" + ) # Test as admin user who has no RLS filters g.user = self.get_user(username="admin") @@ -395,12 +395,12 @@ def test_rls_filter_applies_to_virtual_dataset_with_join(self): # Verify that RLS filters from both physical tables are applied # birth_names filters sql_lower = sql.lower() - assert ( - "name like 'a%" in sql_lower or "name like 'q%" in sql_lower - ), f"birth_names RLS filters not found: {sql}" - assert ( - "gender = 'boy'" in sql_lower - ), f"birth_names gender filter not found: {sql}" + assert "name like 'a%" in sql_lower or "name like 'q%" in sql_lower, ( + f"birth_names RLS filters not found: {sql}" + ) + assert "gender = 'boy'" in sql_lower, ( + f"birth_names gender filter not found: {sql}" + ) # energy_usage filter assert "value > 1" in sql_lower, f"energy_usage RLS filter not found: {sql}" diff --git a/tests/integration_tests/sql_lab/commands_tests.py b/tests/integration_tests/sql_lab/commands_tests.py index 23a9e3274e6c..e3c09fc50918 100644 --- a/tests/integration_tests/sql_lab/commands_tests.py +++ b/tests/integration_tests/sql_lab/commands_tests.py @@ -87,15 +87,10 @@ def test_run_timeout(self, is_feature_enabled) -> None: assert ( ex_info.value.error.error_type == SupersetErrorType.SQLLAB_TIMEOUT_ERROR ) - assert ( - ex_info.value.error.message - == __( - "The query estimation was killed after %(sqllab_timeout)s seconds. It might " # noqa: E501 - "be too complex, or the database might be under heavy load.", - sqllab_timeout=current_app.config[ - "SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT" - ], - ) + assert ex_info.value.error.message == __( + "The query estimation was killed after %(sqllab_timeout)s seconds. It might " # noqa: E501 + "be too complex, or the database might be under heavy load.", + sqllab_timeout=current_app.config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"], ) def test_run_success(self) -> None: diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py index 8de5d741781b..f770b7273196 100644 --- a/tests/integration_tests/sqla_models_tests.py +++ b/tests/integration_tests/sqla_models_tests.py @@ -1263,9 +1263,9 @@ def mock_get_df(sql, catalog=None, schema=None, mutator=None): result = table.query(query_obj) expected_order = ["metric_y", "col_b", "metric_x", "col_a"] - assert ( - list(result.df.columns) == expected_order - ), f"Expected {expected_order}, got {list(result.df.columns)}" + assert list(result.df.columns) == expected_order, ( + f"Expected {expected_order}, got {list(result.df.columns)}" + ) finally: db.session.delete(table) db.session.commit() diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index edbd6b8404bb..99c347d95f9d 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -812,33 +812,30 @@ def test_sql_json_soft_timeout(self): handle_cursor.side_effect = SoftTimeLimitExceeded() data = self.run_sql("SELECT * FROM birth_names LIMIT 1", "1") - assert ( - data - == { - "errors": [ - { - "message": ( - "The query was killed after 21600 seconds. It might be too complex, " # noqa: E501 - "or the database might be under heavy load." - ), - "error_type": SupersetErrorType.SQLLAB_TIMEOUT_ERROR, - "level": ErrorLevel.ERROR, - "extra": { - "issue_codes": [ - { - "code": 1026, - "message": "Issue 1026 - Query is too complex and takes too long to run.", # noqa: E501 - }, - { - "code": 1027, - "message": "Issue 1027 - The database is currently running too many queries.", # noqa: E501 - }, - ] - }, - } - ] - } - ) + assert data == { + "errors": [ + { + "message": ( + "The query was killed after 21600 seconds. It might be too complex, " # noqa: E501 + "or the database might be under heavy load." + ), + "error_type": SupersetErrorType.SQLLAB_TIMEOUT_ERROR, + "level": ErrorLevel.ERROR, + "extra": { + "issue_codes": [ + { + "code": 1026, + "message": "Issue 1026 - Query is too complex and takes too long to run.", # noqa: E501 + }, + { + "code": 1027, + "message": "Issue 1027 - The database is currently running too many queries.", # noqa: E501 + }, + ] + }, + } + ] + } @pytest.mark.parametrize("spec", [HiveEngineSpec, PrestoEngineSpec]) diff --git a/tests/integration_tests/tags/api_tests.py b/tests/integration_tests/tags/api_tests.py index 33a4b7e6d2ad..ba4ce8aba6c7 100644 --- a/tests/integration_tests/tags/api_tests.py +++ b/tests/integration_tests/tags/api_tests.py @@ -861,9 +861,9 @@ def test_create_tag_mysql_compatibility(self) -> None: # Critical check: ensure the tag name is a plain string, not Markup assert isinstance(created_tag.name, str), "Tag name should be a plain string" - assert not isinstance( - created_tag.name, Markup - ), "Tag name should NOT be a Markup object" + assert not isinstance(created_tag.name, Markup), ( + "Tag name should NOT be a Markup object" + ) assert created_tag.name.__class__ is str, "Tag name should be exactly str type" assert created_tag.name == tag_name, "Tag name should match the input" diff --git a/tests/integration_tests/tags/mysql_compatibility_test.py b/tests/integration_tests/tags/mysql_compatibility_test.py index 1045add6f729..6cb457ae368f 100644 --- a/tests/integration_tests/tags/mysql_compatibility_test.py +++ b/tests/integration_tests/tags/mysql_compatibility_test.py @@ -70,9 +70,9 @@ def test_create_tag_returns_string_not_markup(self) -> None: # Critical checks for MySQL compatibility assert isinstance(tag.name, str), "Tag name should be a plain string" - assert not isinstance( - tag.name, Markup - ), "Tag name should NOT be a Markup object" + assert not isinstance(tag.name, Markup), ( + "Tag name should NOT be a Markup object" + ) assert tag.name.__class__ is str, "Tag name should be exactly str type" assert tag.name == tag_name, f"Tag name should be '{tag_name}'" @@ -82,15 +82,15 @@ def test_create_tag_returns_string_not_markup(self) -> None: # Retrieve the tag from database to ensure it was stored correctly retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() assert retrieved_tag is not None, "Tag should be retrievable from database" - assert isinstance( - retrieved_tag.name, str - ), "Retrieved tag name should be a string" - assert not isinstance( - retrieved_tag.name, Markup - ), "Retrieved tag name should NOT be Markup" - assert ( - retrieved_tag.name == tag_name - ), "Retrieved tag name should match original" + assert isinstance(retrieved_tag.name, str), ( + "Retrieved tag name should be a string" + ) + assert not isinstance(retrieved_tag.name, Markup), ( + "Retrieved tag name should NOT be Markup" + ) + assert retrieved_tag.name == tag_name, ( + "Retrieved tag name should match original" + ) def test_create_multiple_tags_no_sql_error(self) -> None: """ @@ -110,12 +110,12 @@ def test_create_multiple_tags_no_sql_error(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) created_tags.append(tag) - assert isinstance( - tag.name, str - ), f"Tag '{tag_name}' name should be a string" - assert not isinstance( - tag.name, Markup - ), f"Tag '{tag_name}' should NOT be Markup" + assert isinstance(tag.name, str), ( + f"Tag '{tag_name}' name should be a string" + ) + assert not isinstance(tag.name, Markup), ( + f"Tag '{tag_name}' should NOT be Markup" + ) except ProgrammingError as e: pytest.fail( f"ProgrammingError should not occur when creating tags: {e}", @@ -129,9 +129,9 @@ def test_create_multiple_tags_no_sql_error(self) -> None: for tag_name in tag_names: tag = db.session.query(Tag).filter_by(name=tag_name).first() assert tag is not None, f"Tag '{tag_name}' should exist in database" - assert isinstance( - tag.name, str - ), f"Tag '{tag_name}' should have string name" + assert isinstance(tag.name, str), ( + f"Tag '{tag_name}' should have string name" + ) def test_create_tag_with_special_characters(self) -> None: """ @@ -152,23 +152,23 @@ def test_create_tag_with_special_characters(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) assert isinstance(tag.name, str), f"Tag '{tag_name}' should be a string" - assert not isinstance( - tag.name, Markup - ), f"Tag '{tag_name}' should NOT be Markup" - assert ( - tag.name == tag_name - ), f"Tag name should match input: '{tag_name}'" + assert not isinstance(tag.name, Markup), ( + f"Tag '{tag_name}' should NOT be Markup" + ) + assert tag.name == tag_name, ( + f"Tag name should match input: '{tag_name}'" + ) db.session.commit() # Verify database persistence retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() - assert ( - retrieved_tag is not None - ), f"Tag '{tag_name}' should be in database" - assert ( - retrieved_tag.name == tag_name - ), f"Retrieved tag name should match: '{tag_name}'" + assert retrieved_tag is not None, ( + f"Tag '{tag_name}' should be in database" + ) + assert retrieved_tag.name == tag_name, ( + f"Retrieved tag name should match: '{tag_name}'" + ) except ProgrammingError as e: pytest.fail( @@ -187,12 +187,12 @@ def test_get_existing_tag_returns_string(self) -> None: retrieved_tag = get_tag(tag_name, db.session, TagType.custom) assert retrieved_tag.id == original_tag.id, "Should retrieve the same tag" - assert isinstance( - retrieved_tag.name, str - ), "Retrieved tag name should be a string" - assert not isinstance( - retrieved_tag.name, Markup - ), "Retrieved tag name should NOT be Markup" + assert isinstance(retrieved_tag.name, str), ( + "Retrieved tag name should be a string" + ) + assert not isinstance(retrieved_tag.name, Markup), ( + "Retrieved tag name should NOT be Markup" + ) def test_tag_with_whitespace_handling(self) -> None: """Test that tags with leading/trailing whitespace are handled correctly.""" @@ -224,12 +224,12 @@ def test_tag_type_variations(self) -> None: try: tag = get_tag(tag_name, db.session, tag_type) - assert isinstance( - tag.name, str - ), f"Tag name for {tag_type} should be a string" - assert not isinstance( - tag.name, Markup - ), f"Tag name for {tag_type} should NOT be Markup" + assert isinstance(tag.name, str), ( + f"Tag name for {tag_type} should be a string" + ) + assert not isinstance(tag.name, Markup), ( + f"Tag name for {tag_type} should NOT be Markup" + ) assert tag.type == tag_type, f"Tag type should be {tag_type}" db.session.commit() @@ -254,9 +254,9 @@ def test_no_sql_syntax_error_on_commit(self) -> None: # Verify the tag exists in the database retrieved_tag = db.session.query(Tag).filter_by(name=tag_name).first() assert retrieved_tag is not None, "Tag should exist after commit" - assert isinstance( - retrieved_tag.name, str - ), "Retrieved tag should have string name" + assert isinstance(retrieved_tag.name, str), ( + "Retrieved tag should have string name" + ) except ProgrammingError as e: pytest.fail(f"ProgrammingError should not occur during commit: {e}") @@ -274,12 +274,12 @@ def test_tag_name_is_pure_string_type(self) -> None: tag = get_tag(tag_name, db.session, TagType.custom) # Check that it's exactly a str, not a subclass - assert ( - tag.name.__class__ is str - ), f"Tag name type should be exactly 'str', got {type(tag.name)}" - assert not isinstance( - tag.name, Markup - ), "Tag name should not be a Markup instance" + assert tag.name.__class__ is str, ( + f"Tag name type should be exactly 'str', got {type(tag.name)}" + ) + assert not isinstance(tag.name, Markup), ( + "Tag name should not be a Markup instance" + ) # Markup is a subclass of str, so this additional check is important assert tag.name.__class__.__name__ == "str", "Tag name class should be 'str'" diff --git a/tests/unit_tests/commands/chart/warm_up_cache_test.py b/tests/unit_tests/commands/chart/warm_up_cache_test.py index 078bb5d81b1b..7eaefafc08a2 100644 --- a/tests/unit_tests/commands/chart/warm_up_cache_test.py +++ b/tests/unit_tests/commands/chart/warm_up_cache_test.py @@ -237,12 +237,12 @@ def test_handles_empty_dashboard_filters( ChartWarmUpCacheCommand(chart, 42, None).run() # VALIDATE: No filters added (empty list case) - assert ( - len(mock_query.filter) == 0 - ), "No filters should be added when dashboard has no filters" - assert ( - mock_get_dashboard_filters.called - ), "Should still call get_dashboard_extra_filters" + assert len(mock_query.filter) == 0, ( + "No filters should be added when dashboard has no filters" + ) + assert mock_get_dashboard_filters.called, ( + "Should still call get_dashboard_extra_filters" + ) @patch("superset.commands.chart.warm_up_cache.ChartDataCommand") @@ -305,12 +305,12 @@ def test_none_query_context_raises_chart_invalid_error(mock_chart_data_command): assert result["viz_error"] is not None, "Should return an error" assert result["chart_id"] == 129 error_str = str(result["viz_error"]).lower() - assert ( - "query context" in error_str - ), f"Error should mention query context: {result['viz_error']}" - assert ( - "not exist" in error_str - ), f"Error should mention not exist: {result['viz_error']}" + assert "query context" in error_str, ( + f"Error should mention query context: {result['viz_error']}" + ) + assert "not exist" in error_str, ( + f"Error should mention not exist: {result['viz_error']}" + ) @patch("superset.commands.chart.warm_up_cache.viz_types", ["table"]) @@ -336,12 +336,12 @@ def test_legacy_chart_without_datasource_raises_error(): assert result["viz_error"] is not None, "Should return an error" assert result["chart_id"] == 130 error_str = str(result["viz_error"]).lower() - assert ( - "datasource" in error_str - ), f"Error should mention datasource: {result['viz_error']}" - assert ( - "not exist" in error_str - ), f"Error should mention not exist: {result['viz_error']}" + assert "datasource" in error_str, ( + f"Error should mention datasource: {result['viz_error']}" + ) + assert "not exist" in error_str, ( + f"Error should mention not exist: {result['viz_error']}" + ) @patch("superset.commands.chart.warm_up_cache.get_dashboard_extra_filters") diff --git a/tests/unit_tests/common/test_query_context_processor.py b/tests/unit_tests/common/test_query_context_processor.py index f79e521399b4..c83d5b728bac 100644 --- a/tests/unit_tests/common/test_query_context_processor.py +++ b/tests/unit_tests/common/test_query_context_processor.py @@ -912,9 +912,9 @@ def mock_validate(*args, **kwargs): def mock_cache_key(*args, **kwargs): call_order.append("cache_key") # Verify that extras have been sanitized at this point - assert ( - query_obj.extras["where"] == "(col1 > 0)" - ), f"Expected sanitized clause in cache_key, got: {query_obj.extras['where']}" + assert query_obj.extras["where"] == "(col1 > 0)", ( + f"Expected sanitized clause in cache_key, got: {query_obj.extras['where']}" + ) return original_cache_key(*args, **kwargs) with patch.object(query_obj, "validate", side_effect=mock_validate): @@ -1172,9 +1172,9 @@ def test_cache_key_preserves_other_post_processing_options(): ) # Cache keys should differ because rename_columns is different - assert ( - query1.cache_key() != query2.cache_key() - ), "Cache keys should differ when other post_processing options differ" + assert query1.cache_key() != query2.cache_key(), ( + "Cache keys should differ when other post_processing options differ" + ) def test_cache_key_non_contribution_post_processing_unchanged(): @@ -1214,9 +1214,9 @@ def test_cache_key_non_contribution_post_processing_unchanged(): ) # Cache keys should differ because aggregates option is different - assert ( - query1.cache_key() != query2.cache_key() - ), "Cache keys should differ for different non-contribution post_processing" + assert query1.cache_key() != query2.cache_key(), ( + "Cache keys should differ for different non-contribution post_processing" + ) def test_force_cached_normalizes_totals_query_row_limit(): diff --git a/tests/unit_tests/connectors/sqla/models_test.py b/tests/unit_tests/connectors/sqla/models_test.py index 045560bab23c..437ddf9151ba 100644 --- a/tests/unit_tests/connectors/sqla/models_test.py +++ b/tests/unit_tests/connectors/sqla/models_test.py @@ -187,13 +187,10 @@ def test_query_datasources_by_permissions_with_catalog_schema( ["[my_db].[db1].[schema1]", "[my_other_db].[schema]"], # type: ignore ) clause = db.session.query().filter_by().filter.mock_calls[0].args[0] - assert ( - str(clause.compile(engine, compile_kwargs={"literal_binds": True})) - == ( - "tables.perm IN ('[my_db].[table1](id:1)') OR " - "tables.schema_perm IN ('[my_db].[db1].[schema1]', '[my_other_db].[schema]') OR " # noqa: E501 - "tables.catalog_perm IN ('[my_db].[db1]')" - ) + assert str(clause.compile(engine, compile_kwargs={"literal_binds": True})) == ( + "tables.perm IN ('[my_db].[table1](id:1)') OR " + "tables.schema_perm IN ('[my_db].[db1].[schema1]', '[my_other_db].[schema]') OR " # noqa: E501 + "tables.catalog_perm IN ('[my_db].[db1]')" ) @@ -766,9 +763,9 @@ def test_get_sqla_table_quoting_for_cross_catalog( # The compiled SQL should contain each part quoted separately assert expected_in_sql in compiled, f"Expected {expected_in_sql} in SQL: {compiled}" # Should NOT have the entire identifier quoted as one string - assert ( - not_expected_in_sql not in compiled - ), f"Should not have {not_expected_in_sql} in SQL: {compiled}" + assert not_expected_in_sql not in compiled, ( + f"Should not have {not_expected_in_sql} in SQL: {compiled}" + ) def test_get_sqla_table_without_cross_catalog_ignores_catalog( diff --git a/tests/unit_tests/dao/base_dao_test.py b/tests/unit_tests/dao/base_dao_test.py index d7835d484067..eb7d20adfd0a 100644 --- a/tests/unit_tests/dao/base_dao_test.py +++ b/tests/unit_tests/dao/base_dao_test.py @@ -143,9 +143,9 @@ class TestModel(Base_test): # type: ignore } # Ensure we've tested all operators - assert ( - tested_operators == all_operators - ), f"Missing operators: {all_operators - tested_operators}" + assert tested_operators == all_operators, ( + f"Missing operators: {all_operators - tested_operators}" + ) def test_find_by_ids_sqlalchemy_error_with_model_cls(): diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 3f7810df41f5..499f06689394 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -226,92 +226,92 @@ def test_database_connection( response = client.get("/api/v1/database/1/connection") assert response.json == { - "id": 1, - "result": { - "allow_ctas": False, - "allow_cvas": False, - "allow_dml": False, - "allow_file_upload": False, - "allow_run_async": False, - "backend": "gsheets", - "cache_timeout": None, - "configuration_method": "sqlalchemy_form", - "database_name": "my_database", - "driver": "gsheets", - "engine_information": { - "disable_ssh_tunneling": True, - "supports_dynamic_catalog": False, - "supports_file_upload": True, - "supports_oauth2": True, - }, - "expose_in_sqllab": True, - "extra": '{\n "metadata_params": {},\n "engine_params": {},\n "metadata_cache_timeout": {},\n "schemas_allowed_for_file_upload": []\n}\n', # noqa: E501 - "force_ctas_schema": None, "id": 1, - "impersonate_user": False, - "is_managed_externally": False, - "masked_encrypted_extra": json.dumps( - { + "result": { + "allow_ctas": False, + "allow_cvas": False, + "allow_dml": False, + "allow_file_upload": False, + "allow_run_async": False, + "backend": "gsheets", + "cache_timeout": None, + "configuration_method": "sqlalchemy_form", + "database_name": "my_database", + "driver": "gsheets", + "engine_information": { + "disable_ssh_tunneling": True, + "supports_dynamic_catalog": False, + "supports_file_upload": True, + "supports_oauth2": True, + }, + "expose_in_sqllab": True, + "extra": '{\n "metadata_params": {},\n "engine_params": {},\n "metadata_cache_timeout": {},\n "schemas_allowed_for_file_upload": []\n}\n', # noqa: E501 + "force_ctas_schema": None, + "id": 1, + "impersonate_user": False, + "is_managed_externally": False, + "masked_encrypted_extra": json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", # noqa: E501 + "private_key": "XXXXXXXXXX", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 + "client_id": "114567578578109757129", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + } + } + ), + "parameters": { "service_account_info": { - "type": "service_account", - "project_id": "black-sanctum-314419", - "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", # noqa: E501 - "private_key": "XXXXXXXXXX", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 "client_id": "114567578578109757129", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + "private_key": "XXXXXXXXXX", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "project_id": "black-sanctum-314419", + "token_uri": "https://oauth2.googleapis.com/token", + "type": "service_account", } - } - ), - "parameters": { - "service_account_info": { - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 - "client_id": "114567578578109757129", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", - "private_key": "XXXXXXXXXX", - "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", - "project_id": "black-sanctum-314419", - "token_uri": "https://oauth2.googleapis.com/token", - "type": "service_account", - } - }, - "parameters_schema": { - "properties": { - "catalog": {"type": "object"}, - "oauth2_client_info": { - "default": { - "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", - "scope": ( - "https://www.googleapis.com/auth/drive.readonly " - "https://www.googleapis.com/auth/spreadsheets " - "https://spreadsheets.google.com/feeds" - ), - "token_request_uri": "https://oauth2.googleapis.com/token", + }, + "parameters_schema": { + "properties": { + "catalog": {"type": "object"}, + "oauth2_client_info": { + "default": { + "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", + "scope": ( + "https://www.googleapis.com/auth/drive.readonly " + "https://www.googleapis.com/auth/spreadsheets " + "https://spreadsheets.google.com/feeds" + ), + "token_request_uri": "https://oauth2.googleapis.com/token", + }, + "description": "OAuth2 client information", + "nullable": True, + "type": "string", + "x-encrypted-extra": True, + }, + "service_account_info": { + "description": "Contents of GSheets JSON credentials.", + "type": "string", + "x-encrypted-extra": True, }, - "description": "OAuth2 client information", - "nullable": True, - "type": "string", - "x-encrypted-extra": True, - }, - "service_account_info": { - "description": "Contents of GSheets JSON credentials.", - "type": "string", - "x-encrypted-extra": True, }, + "type": "object", }, - "type": "object", + "server_cert": None, + "sqlalchemy_uri": "gsheets://", + "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", }, - "server_cert": None, - "sqlalchemy_uri": "gsheets://", - "ssh_tunnel": None, - "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", - }, - } + } + ) response = client.get("/api/v1/database/1") assert response.json == { diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py index ade17b6bc129..148064d8a6fb 100644 --- a/tests/unit_tests/db_engine_specs/test_gsheets.py +++ b/tests/unit_tests/db_engine_specs/test_gsheets.py @@ -160,61 +160,58 @@ def test_validate_parameters_catalog( } errors = GSheetsEngineSpec.validate_parameters(properties) # ignore: type - assert ( - errors - == [ - SupersetError( - message=( - "The URL could not be identified. Please check for typos " - "and make sure that ‘Type of Google Sheets allowed’ " - "selection matches the input." - ), - error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, - level=ErrorLevel.WARNING, - extra={ - "catalog": { - "idx": 0, - "url": True, - }, - "issue_codes": [ - { - "code": 1003, - "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 - }, - { - "code": 1005, - "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 - }, - ], - }, + assert errors == [ + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." ), - SupersetError( - message=( - "The URL could not be identified. Please check for typos " - "and make sure that ‘Type of Google Sheets allowed’ " - "selection matches the input." - ), - error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, - level=ErrorLevel.WARNING, - extra={ - "catalog": { - "idx": 2, - "url": True, - }, - "issue_codes": [ - { - "code": 1003, - "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 - }, - { - "code": 1005, - "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 - }, - ], + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 0, + "url": True, }, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + }, + ], + }, + ), + SupersetError( + message=( + "The URL could not be identified. Please check for typos " + "and make sure that ‘Type of Google Sheets allowed’ " + "selection matches the input." ), - ] - ) + error_type=SupersetErrorType.TABLE_DOES_NOT_EXIST_ERROR, + level=ErrorLevel.WARNING, + extra={ + "catalog": { + "idx": 2, + "url": True, + }, + "issue_codes": [ + { + "code": 1003, + "message": "Issue 1003 - There is a syntax error in the SQL query. Perhaps there was a misspelling or a typo.", # noqa: E501 + }, + { + "code": 1005, + "message": "Issue 1005 - The table was deleted or renamed in the database.", # noqa: E501 + }, + ], + }, + ), + ] create_engine.assert_called_with( "gsheets://", diff --git a/tests/unit_tests/db_engine_specs/test_mssql.py b/tests/unit_tests/db_engine_specs/test_mssql.py index e1c9ab28c060..e0ce5e1180c8 100644 --- a/tests/unit_tests/db_engine_specs/test_mssql.py +++ b/tests/unit_tests/db_engine_specs/test_mssql.py @@ -394,31 +394,28 @@ def test_extract_errors() -> None: result = MssqlEngineSpec.extract_errors( Exception(msg), context={"username": "testuser", "database": "testdb"} ) - assert ( - result - == [ - SupersetError( - message='Either the username "testuser", password, or database name "testdb" is incorrect.', # noqa: E501 - error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, - level=ErrorLevel.ERROR, - extra={ - "engine_name": "Microsoft SQL Server", - "issue_codes": [ - { - "code": 1014, - "message": "Issue 1014 - Either the username or " - "the password is wrong.", - }, - { - "code": 1015, - "message": "Issue 1015 - Either the database is " - "spelled incorrectly or does not exist.", - }, - ], - }, - ) - ] - ) + assert result == [ + SupersetError( + message='Either the username "testuser", password, or database name "testdb" is incorrect.', # noqa: E501 + error_type=SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR, + level=ErrorLevel.ERROR, + extra={ + "engine_name": "Microsoft SQL Server", + "issue_codes": [ + { + "code": 1014, + "message": "Issue 1014 - Either the username or " + "the password is wrong.", + }, + { + "code": 1015, + "message": "Issue 1015 - Either the database is " + "spelled incorrectly or does not exist.", + }, + ], + }, + ) + ] @pytest.mark.parametrize( diff --git a/tests/unit_tests/db_engine_specs/test_ocient.py b/tests/unit_tests/db_engine_specs/test_ocient.py index 445a118b7be5..82c7c206bfcb 100644 --- a/tests/unit_tests/db_engine_specs/test_ocient.py +++ b/tests/unit_tests/db_engine_specs/test_ocient.py @@ -223,9 +223,9 @@ def test_connection_errors(msg: str, expected: SupersetError) -> None: assert result == [expected] -def _generate_gis_type_sanitization_test_cases() -> ( - list[tuple[str, int, Any, dict[str, Any]]] -): +def _generate_gis_type_sanitization_test_cases() -> list[ + tuple[str, int, Any, dict[str, Any]] +]: if not ocient_is_installed(): return [] diff --git a/tests/unit_tests/models/helpers_test.py b/tests/unit_tests/models/helpers_test.py index 019bf7d46a3a..e913526975fb 100644 --- a/tests/unit_tests/models/helpers_test.py +++ b/tests/unit_tests/models/helpers_test.py @@ -477,9 +477,9 @@ def condition_factory(col_name: str, expr): has_single_quotes = "'Others'" in select_sql and "'Others'" in groupby_sql has_double_quotes = '"Others"' in select_sql and '"Others"' in groupby_sql - assert ( - has_single_quotes or has_double_quotes - ), "Others literal should be quoted with either single or double quotes" + assert has_single_quotes or has_double_quotes, ( + "Others literal should be quoted with either single or double quotes" + ) # Verify the structure of the generated SQL assert "CASE WHEN" in select_sql @@ -1121,13 +1121,13 @@ def test_process_select_expression_end_to_end(database: Database) -> None: # sqlglot may normalize the SQL slightly, so we check the result exists # and doesn't contain the SELECT prefix assert result is not None, f"Failed to process: {expression}" - assert not result.upper().startswith( - "SELECT" - ), f"Result still has SELECT prefix: {result}" + assert not result.upper().startswith("SELECT"), ( + f"Result still has SELECT prefix: {result}" + ) # The result should contain the core expression (case-insensitive check) - assert ( - expected.replace(" ", "").lower() in result.replace(" ", "").lower() - ), f"Expected '{expected}' to be in result '{result}' for input '{expression}'" + assert expected.replace(" ", "").lower() in result.replace(" ", "").lower(), ( + f"Expected '{expected}' to be in result '{result}' for input '{expression}'" + ) def test_reapply_query_filters_with_granularity(database: Database) -> None: @@ -1641,9 +1641,9 @@ def test_adhoc_column_with_spaces_generates_quoted_sql(database: Database) -> No ) ) - assert ( - '"Order Total"' in sql_numeric - ), f"Expected quoted column name in SQL: {sql_numeric}" + assert '"Order Total"' in sql_numeric, ( + f"Expected quoted column name in SQL: {sql_numeric}" + ) def test_adhoc_column_with_spaces_in_full_query(database: Database) -> None: diff --git a/tests/unit_tests/sql/dialects/pinot_tests.py b/tests/unit_tests/sql/dialects/pinot_tests.py index 6a3dab751a02..57f08381875e 100644 --- a/tests/unit_tests/sql/dialects/pinot_tests.py +++ b/tests/unit_tests/sql/dialects/pinot_tests.py @@ -876,6 +876,6 @@ def test_pinot_function_names_preserved(function_name: str, sample_args: str) -> generated = parsed.sql(dialect=Pinot) # The function name should be preserved (case-insensitive check) - assert ( - function_name.upper() in generated.upper() - ), f"Function {function_name} not preserved in output: {generated}" + assert function_name.upper() in generated.upper(), ( + f"Function {function_name} not preserved in output: {generated}" + ) diff --git a/tests/unit_tests/tags/models_test.py b/tests/unit_tests/tags/models_test.py index 325e3636e6fc..93054f4dac44 100644 --- a/tests/unit_tests/tags/models_test.py +++ b/tests/unit_tests/tags/models_test.py @@ -74,12 +74,12 @@ def test_get_tag_with_special_characters() -> None: for tag_name in tag_names: result = get_tag(tag_name, mock_session, TagType.custom) - assert isinstance( - result.name, str - ), f"Tag name '{tag_name}' should be a plain string" - assert not isinstance( - result.name, Markup - ), f"Tag name '{tag_name}' should NOT be a Markup object" + assert isinstance(result.name, str), ( + f"Tag name '{tag_name}' should be a plain string" + ) + assert not isinstance(result.name, Markup), ( + f"Tag name '{tag_name}' should NOT be a Markup object" + ) assert result.name == tag_name, f"Tag name should match input: '{tag_name}'" @@ -106,12 +106,12 @@ def test_get_tag_with_html_characters() -> None: for tag_name in tag_names: result = get_tag(tag_name, mock_session, TagType.custom) - assert isinstance( - result.name, str - ), f"Tag name '{tag_name}' should be a plain string" - assert not isinstance( - result.name, Markup - ), f"Tag name '{tag_name}' should NOT be a Markup object" + assert isinstance(result.name, str), ( + f"Tag name '{tag_name}' should be a plain string" + ) + assert not isinstance(result.name, Markup), ( + f"Tag name '{tag_name}' should NOT be a Markup object" + ) # Name should remain unchanged (not HTML-escaped) assert result.name == tag_name, f"Tag name should not be escaped: '{tag_name}'" @@ -135,12 +135,12 @@ def test_get_tag_strips_whitespace() -> None: result = get_tag(input_name, mock_session, TagType.custom) assert isinstance(result.name, str), "Tag name should be a plain string" - assert not isinstance( - result.name, Markup - ), "Tag name should NOT be a Markup object" - assert ( - result.name == expected_name - ), f"Tag name should be stripped: '{expected_name}'" + assert not isinstance(result.name, Markup), ( + "Tag name should NOT be a Markup object" + ) + assert result.name == expected_name, ( + f"Tag name should be stripped: '{expected_name}'" + ) def test_get_tag_returns_existing_tag() -> None: @@ -190,9 +190,9 @@ def test_get_tag_creates_new_tag() -> None: added_tag = mock_session.add.call_args[0][0] assert isinstance(added_tag, Tag), "Should add a Tag object" assert isinstance(added_tag.name, str), "Tag name should be a plain string" - assert not isinstance( - added_tag.name, Markup - ), "Tag name should NOT be a Markup object" + assert not isinstance(added_tag.name, Markup), ( + "Tag name should NOT be a Markup object" + ) assert added_tag.name == tag_name, "Tag name should match" assert added_tag.type == tag_type, "Tag type should match" @@ -215,12 +215,12 @@ def test_get_tag_with_different_tag_types() -> None: tag_name = f"tag-{tag_type.name}" result = get_tag(tag_name, mock_session, tag_type) - assert isinstance( - result.name, str - ), f"Tag name for {tag_type} should be a plain string" - assert not isinstance( - result.name, Markup - ), f"Tag name for {tag_type} should NOT be a Markup object" + assert isinstance(result.name, str), ( + f"Tag name for {tag_type} should be a plain string" + ) + assert not isinstance(result.name, Markup), ( + f"Tag name for {tag_type} should NOT be a Markup object" + ) assert result.type == tag_type, f"Tag type should be {tag_type}" @@ -240,24 +240,24 @@ def test_tag_name_type_after_database_operation() -> None: result = get_tag(tag_name, mock_session, TagType.custom) # Verify the tag object before database operations - assert isinstance( - result.name, str - ), "Tag name should be a string before DB operations" - assert not isinstance( - result.name, Markup - ), "Tag name should NOT be Markup before DB operations" + assert isinstance(result.name, str), ( + "Tag name should be a string before DB operations" + ) + assert not isinstance(result.name, Markup), ( + "Tag name should NOT be Markup before DB operations" + ) # Verify that session.add was called with the correct tag mock_session.add.assert_called_once() added_tag = mock_session.add.call_args[0][0] # The critical check: ensure the name passed to the database is a plain string - assert isinstance( - added_tag.name, str - ), "Tag name should be a plain string when added to session" - assert not isinstance( - added_tag.name, Markup - ), "Tag name should NOT be Markup when added to session" - assert ( - added_tag.name.__class__ is str - ), "Tag name should be exactly str type, not a subclass" + assert isinstance(added_tag.name, str), ( + "Tag name should be a plain string when added to session" + ) + assert not isinstance(added_tag.name, Markup), ( + "Tag name should NOT be Markup when added to session" + ) + assert added_tag.name.__class__ is str, ( + "Tag name should be exactly str type, not a subclass" + ) diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py index b9478e79c782..37b8ed1877a1 100644 --- a/tests/unit_tests/utils/test_core.py +++ b/tests/unit_tests/utils/test_core.py @@ -637,9 +637,9 @@ def test_get_user_agent(mocker: MockerFixture, app_context: None) -> None: database_mock = mocker.MagicMock() database_mock.database_name = "mydb" - assert ( - get_user_agent(database_mock, QuerySource.DASHBOARD) == "Apache Superset" - ), "The default user agent should be returned" + assert get_user_agent(database_mock, QuerySource.DASHBOARD) == "Apache Superset", ( + "The default user agent should be returned" + ) @with_config( @@ -652,9 +652,9 @@ def test_get_user_agent_custom(mocker: MockerFixture, app_context: None) -> None database_mock = mocker.MagicMock() database_mock.database_name = "mydb" - assert ( - get_user_agent(database_mock, QuerySource.DASHBOARD) == "mydb DASHBOARD" - ), "the custom user agent function result should have been returned" + assert get_user_agent(database_mock, QuerySource.DASHBOARD) == "mydb DASHBOARD", ( + "the custom user agent function result should have been returned" + ) def test_merge_extra_filters(): diff --git a/tests/unit_tests/utils/test_screenshot_cache_fix.py b/tests/unit_tests/utils/test_screenshot_cache_fix.py index ce077480c532..8d0946abdf9e 100644 --- a/tests/unit_tests/utils/test_screenshot_cache_fix.py +++ b/tests/unit_tests/utils/test_screenshot_cache_fix.py @@ -163,9 +163,9 @@ def check_cache_during_screenshot(*args, **kwargs): cache_key = screenshot_obj.get_cache_key() cached_value = BaseScreenshot.cache.get(cache_key) # Cache should be empty during screenshot generation - assert ( - cached_value is None - ), "Cache should not be saved during COMPUTING state" + assert cached_value is None, ( + "Cache should not be saved during COMPUTING state" + ) return b"image_data" mocker.patch( From a2004e0d33405854cf284778cd4222dcedceb262 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 2 Dec 2025 16:49:20 -0500 Subject: [PATCH 13/18] fix: update logic of displaying submenu and update style using pre-commit --- .../ExploreChartHeader.test.tsx | 354 ++++++++++++++++-- .../useExploreAdditionalActionsMenu/index.jsx | 157 ++++---- 2 files changed, 404 insertions(+), 107 deletions(-) diff --git a/superset-frontend/src/explore/components/ExploreChartHeader/ExploreChartHeader.test.tsx b/superset-frontend/src/explore/components/ExploreChartHeader/ExploreChartHeader.test.tsx index f5512cdba463..d3c899efe30d 100644 --- a/superset-frontend/src/explore/components/ExploreChartHeader/ExploreChartHeader.test.tsx +++ b/superset-frontend/src/explore/components/ExploreChartHeader/ExploreChartHeader.test.tsx @@ -33,6 +33,9 @@ import * as exploreUtils from 'src/explore/exploreUtils'; import { FeatureFlag, VizType } from '@superset-ui/core'; import { useUnsavedChangesPrompt } from 'src/hooks/useUnsavedChangesPrompt'; import ExploreHeader from '.'; +import { getChartMetadataRegistry } from '@superset-ui/core'; +import fs from 'fs'; +import path from 'path'; const chartEndpoint = 'glob:*api/v1/chart/*'; @@ -46,6 +49,13 @@ jest.mock('src/hooks/useUnsavedChangesPrompt', () => ({ useUnsavedChangesPrompt: jest.fn(), })); +const mockExportCurrentViewBehavior = () => { + const registry = getChartMetadataRegistry(); + return jest.spyOn(registry, 'get').mockReturnValue({ + behaviors: ['EXPORT_CURRENT_VIEW'], + } as any); +}; + const createProps = (additionalProps = {}) => ({ chart: { id: 1, @@ -65,6 +75,7 @@ const createProps = (additionalProps = {}) => ({ link_length: '25', x_axis_label: 'age', y_axis_label: 'count', + server_pagination: false as any, }, chartStatus: 'rendered', }, @@ -407,7 +418,7 @@ describe('Additional actions tests', () => { expect( await screen.findByText('Edit chart properties'), ).toBeInTheDocument(); - expect(screen.getByText('Download')).toBeInTheDocument(); + expect(screen.getByText('Data Export Options')).toBeInTheDocument(); expect(screen.getByText('Share')).toBeInTheDocument(); expect(screen.getByText('View query')).toBeInTheDocument(); expect(screen.getByText('Run in SQL Lab')).toBeInTheDocument(); @@ -418,7 +429,7 @@ describe('Additional actions tests', () => { expect(screen.queryByText('Manage email report')).not.toBeInTheDocument(); }); - test('Should open download submenu', async () => { + test('Should open all data download submenu', async () => { const props = createProps(); render(, { useRedux: true, @@ -426,15 +437,45 @@ describe('Additional actions tests', () => { userEvent.click(screen.getByLabelText('Menu actions trigger')); - expect(screen.queryByText('Export to .CSV')).not.toBeInTheDocument(); - expect(screen.queryByText('Export to .JSON')).not.toBeInTheDocument(); - expect(screen.queryByText('Download as image')).not.toBeInTheDocument(); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); - expect(screen.getByText('Download')).toBeInTheDocument(); - userEvent.hover(screen.getByText('Download')); expect(await screen.findByText('Export to .CSV')).toBeInTheDocument(); expect(await screen.findByText('Export to .JSON')).toBeInTheDocument(); - expect(await screen.findByText('Download as image')).toBeInTheDocument(); + expect(await screen.findByText('Export to Excel')).toBeInTheDocument(); + expect( + await screen.findByText('Export screenshot (jpeg)'), + ).toBeInTheDocument(); + }); + + test('Should open current view data download submenu', async () => { + const props = createProps(); + props.chart.latestQueryFormData.viz_type = VizType.Table; + + // Force-enable EXPORT_CURRENT_VIEW for this viz in this test + const registry = getChartMetadataRegistry(); + const getSpy = jest.spyOn(registry, 'get').mockReturnValue({ + behaviors: ['EXPORT_CURRENT_VIEW'], + } as any); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + + // Now the submenu should exist + userEvent.hover(await screen.findByText('Export Current View')); + + expect(await screen.findByText('Export to .CSV')).toBeInTheDocument(); + expect(await screen.findByText('Export to .JSON')).toBeInTheDocument(); + expect( + await screen.findByText(/Export to (Excel|\.XLSX)/i), + ).toBeInTheDocument(); + expect( + await screen.findByText('Export screenshot (jpeg)'), + ).toBeInTheDocument(); + + getSpy.mockRestore(); }); test('Should open share submenu', async () => { @@ -508,7 +549,7 @@ describe('Additional actions tests', () => { }); // eslint-disable-next-line no-restricted-globals -- TODO: Migrate from describe blocks - describe('Download', () => { + describe('Export All Data', () => { let spyDownloadAsImage = sinon.spy(); let spyExportChart = sinon.spy(); @@ -532,7 +573,7 @@ describe('Additional actions tests', () => { await new Promise(resolve => setTimeout(resolve, 0)); }); - test('Should call downloadAsImage when click on "Download as image"', async () => { + test('Should call downloadAsImage when click on "Export screenshot (jpeg)"', async () => { const props = createProps(); const spy = jest.spyOn(downloadAsImage, 'default'); render(, { @@ -546,10 +587,12 @@ describe('Additional actions tests', () => { }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); - const downloadAsImageElement = - await screen.findByText('Download as image'); + const downloadAsImageElement = await screen.findByText( + 'Export screenshot (jpeg)', + ); userEvent.click(downloadAsImageElement); await waitFor(() => { @@ -563,7 +606,8 @@ describe('Additional actions tests', () => { useRedux: true, }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportCSVElement = await screen.findByText('Export to .CSV'); userEvent.click(exportCSVElement); expect(spyExportChart.callCount).toBe(0); @@ -578,7 +622,8 @@ describe('Additional actions tests', () => { }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportCSVElement = await screen.findByText('Export to .CSV'); userEvent.click(exportCSVElement); expect(spyExportChart.callCount).toBe(1); @@ -591,7 +636,8 @@ describe('Additional actions tests', () => { useRedux: true, }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportJsonElement = await screen.findByText('Export to .JSON'); userEvent.click(exportJsonElement); expect(spyExportChart.callCount).toBe(0); @@ -606,7 +652,8 @@ describe('Additional actions tests', () => { }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportJsonElement = await screen.findByText('Export to .JSON'); userEvent.click(exportJsonElement); expect(spyExportChart.callCount).toBe(1); @@ -620,7 +667,8 @@ describe('Additional actions tests', () => { }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportCSVElement = await screen.findByText( 'Export to pivoted .CSV', ); @@ -637,7 +685,8 @@ describe('Additional actions tests', () => { }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportCSVElement = await screen.findByText( 'Export to pivoted .CSV', ); @@ -651,7 +700,8 @@ describe('Additional actions tests', () => { useRedux: true, }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportExcelElement = await screen.findByText('Export to Excel'); userEvent.click(exportExcelElement); expect(spyExportChart.callCount).toBe(0); @@ -665,10 +715,272 @@ describe('Additional actions tests', () => { useRedux: true, }); userEvent.click(screen.getByLabelText('Menu actions trigger')); - userEvent.hover(screen.getByText('Download')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export All Data')); const exportExcelElement = await screen.findByText('Export to Excel'); userEvent.click(exportExcelElement); expect(spyExportChart.callCount).toBe(1); }); }); + + describe('Current View', () => { + let spyDownloadAsImage = sinon.spy(); + let spyExportChart = sinon.spy(); + + let originalURL: typeof URL; + let anchorClickSpy: jest.SpyInstance; + + beforeAll(() => { + originalURL = global.URL; + + // Replace global.URL with a version that has the blob helpers + const mockedURL = { + ...originalURL, + createObjectURL: jest.fn(() => 'blob:mock-url'), + revokeObjectURL: jest.fn(), + } as unknown as typeof URL; + + Object.defineProperty(global, 'URL', { + writable: true, + value: mockedURL, + }); + + // Avoid jsdom navigation side-effects on .click() + anchorClickSpy = jest + .spyOn(HTMLAnchorElement.prototype, 'click') + .mockImplementation(() => {}); + }); + + afterAll(() => { + // restore URL + Object.defineProperty(global, 'URL', { + writable: true, + value: originalURL, + }); + anchorClickSpy.mockRestore(); + }); + + beforeEach(() => { + spyDownloadAsImage = sinon.spy(downloadAsImage, 'default'); + spyExportChart = sinon.spy(exploreUtils, 'exportChart'); + + (useUnsavedChangesPrompt as jest.Mock).mockReturnValue({ + showModal: false, + setShowModal: jest.fn(), + handleConfirmNavigation: jest.fn(), + handleSaveAndCloseModal: jest.fn(), + triggerManualSave: jest.fn(), + }); + }); + + afterEach(async () => { + spyDownloadAsImage.restore(); + spyExportChart.restore(); + await new Promise(r => setTimeout(r, 0)); + }); + + test('Screenshot (Current View) calls downloadAsImage', async () => { + const props = createProps(); + props.chart.latestQueryFormData.viz_type = VizType.Table; + + const getSpy = mockExportCurrentViewBehavior(); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + // clear previous calls on the sinon spy you created in beforeEach + spyDownloadAsImage.resetHistory(); + + const item = await screen.findByText('Export screenshot (jpeg)'); + userEvent.click(item); + + await waitFor(() => { + expect(spyDownloadAsImage.called).toBe(true); + }); + + getSpy.mockRestore(); + }); + + test('CSV (Current View) uses client-side export when pagination disabled & clientView present', async () => { + const props = createProps({ + ownState: { + clientView: { + columns: [ + { key: 'a', label: 'A' }, + { key: 'b', label: 'B' }, + ], + rows: [ + { a: 1, b: 'x' }, + { a: 2, b: 'y' }, + ], + }, + }, + }); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = false; + + const getSpy = mockExportCurrentViewBehavior(); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + spyExportChart.resetHistory(); + + userEvent.click(await screen.findByText('Export to .CSV')); + + expect(spyExportChart.called).toBe(false); // or: expect(spyExportChart.callCount).toBe(0) + + getSpy.mockRestore(); + }); + + test('JSON (Current View) uses client-side export when pagination disabled & clientView present', async () => { + const props = createProps({ + ownState: { + clientView: { + columns: [{ key: 'a', label: 'A' }], + rows: [{ a: 123 }], + }, + }, + }); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = false; + + const getSpy = mockExportCurrentViewBehavior(); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + spyExportChart.resetHistory(); + userEvent.click(await screen.findByText('Export to .JSON')); + + expect(spyExportChart.called).toBe(false); + + getSpy.mockRestore(); + }); + + test('CSV (Current View) falls back to server export when server_pagination is true', async () => { + const props = createProps(); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = true; + + const getSpy = mockExportCurrentViewBehavior(); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + spyExportChart.resetHistory(); + userEvent.click(await screen.findByText('Export to .CSV')); + + expect(spyExportChart.callCount).toBe(1); + const args = spyExportChart.getCall(0).args[0]; + expect(args.resultType).toBe('results'); + expect(args.resultFormat).toBe('csv'); + + getSpy.mockRestore(); + }); + + test('Excel (Current View) uses client-side export when pagination disabled & clientView present', async () => { + const props = createProps({ + ownState: { + clientView: { + columns: [{ key: 'c', label: 'C' }], + rows: [{ c: 'foo' }], + }, + }, + }); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = false; + + const getSpy = mockExportCurrentViewBehavior(); + render(, { useRedux: true }); + + userEvent.click(await screen.findByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + spyExportChart.resetHistory(); + userEvent.click(await screen.findByText(/Export to (Excel|\.XLSX)/i)); + + expect(spyExportChart.called).toBe(false); + getSpy.mockRestore(); + }); + + test('Excel (Current View) falls back to server export when server_pagination is true', async () => { + const props = createProps(); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = true; + + const getSpy = mockExportCurrentViewBehavior(); + render(, { useRedux: true }); + + userEvent.click(await screen.findByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + spyExportChart.resetHistory(); + userEvent.click(await screen.findByText(/Export to (Excel|\.XLSX)/i)); + + expect(spyExportChart.callCount).toBe(1); + const args = spyExportChart.getCall(0).args[0]; + expect(args.resultType).toBe('results'); + expect(args.resultFormat).toBe('xlsx'); + getSpy.mockRestore(); + + // delete test excel files + const cwd = process.cwd(); + for (const file of fs.readdirSync(cwd)) { + if (file.endsWith('.xlsx')) { + fs.unlinkSync(path.join(cwd, file)); + } + } + }); + + test('JSON (Current View) falls back to server export when server_pagination is true', async () => { + const props = createProps(); + props.canDownload = true; + props.chart.latestQueryFormData.viz_type = VizType.Table; + props.chart.latestQueryFormData.server_pagination = true; + + const getSpy = mockExportCurrentViewBehavior(); + + render(, { useRedux: true }); + + userEvent.click(screen.getByLabelText('Menu actions trigger')); + userEvent.hover(screen.getByText('Data Export Options')); + userEvent.hover(await screen.findByText('Export Current View')); + + // server path expected → use the sinon spy and inspect call args + spyExportChart.resetHistory(); + + const jsonItem = await screen.findByText('Export to .JSON'); + userEvent.click(jsonItem); + + await waitFor(() => { + expect(spyExportChart.callCount).toBe(1); + }); + + const args = spyExportChart.getCall(0).args[0]; + expect(args.resultType).toBe('results'); + expect(args.resultFormat).toBe('json'); + + getSpy.mockRestore(); + }); + }); }); diff --git a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx index f176aa9c5ae2..48f5b27a727c 100644 --- a/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx +++ b/superset-frontend/src/explore/components/useExploreAdditionalActionsMenu/index.jsx @@ -56,11 +56,19 @@ const MENU_KEYS = { EDIT_PROPERTIES: 'edit_properties', DASHBOARDS_ADDED_TO: 'dashboards_added_to', DOWNLOAD_SUBMENU: 'download_submenu', - EXPORT_TO_CSV: 'export_to_csv', + DATA_EXPORT_OPTIONS: 'data_export_options', + EXPORT_ALL_DATA_GROUP: 'export_all_data_group', + EXPORT_CURRENT_VIEW_GROUP: 'export_current_view_group', EXPORT_TO_CSV_PIVOTED: 'export_to_csv_pivoted', + EXPORT_TO_PIVOT_XLSX: 'export_to_pivot_xlsx', + EXPORT_TO_CSV: 'export_to_csv', EXPORT_TO_JSON: 'export_to_json', EXPORT_TO_XLSX: 'export_to_xlsx', - DOWNLOAD_AS_IMAGE: 'download_as_image', + EXPORT_ALL_SCREENSHOT: 'export_all_screenshot', + EXPORT_CURRENT_TO_CSV: 'export_current_to_csv', + EXPORT_CURRENT_TO_JSON: 'export_current_to_json', + EXPORT_CURRENT_SCREENSHOT: 'export_current_screenshot', + EXPORT_CURRENT_XLSX: 'export_current_xlsx', SHARE_SUBMENU: 'share_submenu', COPY_PERMALINK: 'copy_permalink', EMBED_CODE: 'embed_code', @@ -72,12 +80,6 @@ const MENU_KEYS = { DELETE_REPORT: 'delete_report', VIEW_QUERY: 'view_query', RUN_IN_SQL_LAB: 'run_in_sql_lab', - EXPORT_TO_PIVOT_XLSX: 'export_to_pivot_xlsx', - DATA_EXPORT_OPTIONS: 'data_export_options', - EXPORT_ALL_DATA_GROUP: 'export_all_data_group', - EXPORT_CURRENT_VIEW_GROUP: 'export_current_view_group', - EXPORT_CURRENT_TO_CSV: 'export_current_to_csv', - EXPORT_ALL_SCREENSHOT: 'export_all_screenshot', }; const VIZ_TYPES_PIVOTABLE = [VizType.PivotTable]; @@ -490,10 +492,10 @@ export const useExploreAdditionalActionsMenu = ( menuItems.push({ type: 'divider' }); // Download submenu - const downloadChildren = []; + const allDataChildren = []; if (VIZ_TYPES_PIVOTABLE.includes(latestQueryFormData.viz_type)) { - downloadChildren.push( + allDataChildren.push( { key: MENU_KEYS.EXPORT_TO_CSV, label: t('Export to original .CSV'), @@ -548,7 +550,7 @@ export const useExploreAdditionalActionsMenu = ( }, ); } else { - downloadChildren.push({ + allDataChildren.push({ key: MENU_KEYS.EXPORT_TO_CSV, label: t('Export to .CSV'), icon: , @@ -566,6 +568,61 @@ export const useExploreAdditionalActionsMenu = ( }); } + allDataChildren.push( + { + key: MENU_KEYS.EXPORT_TO_JSON, + label: t('Export to .JSON'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + exportJson(); + setIsDropdownVisible(false); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); + }, + }, + { + key: MENU_KEYS.EXPORT_ALL_SCREENSHOT, + label: t('Export screenshot (jpeg)'), + icon: , + onClick: e => { + downloadAsImage( + '.panel-body .chart-container', + slice?.slice_name ?? t('New chart'), + true, + theme, + )(e.domEvent); + setIsDropdownVisible(false); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_IMAGE, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); + }, + }, + { + key: MENU_KEYS.EXPORT_TO_XLSX, + label: t('Export to Excel'), + icon: , + disabled: !canDownloadCSV, + onClick: () => { + exportExcel(); + setIsDropdownVisible(false); + dispatch( + logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { + chartId: slice?.slice_id, + chartName: slice?.slice_name, + }), + ); + }, + }, + ); + const currentViewChildren = [ { key: MENU_KEYS.EXPORT_CURRENT_TO_CSV, @@ -604,7 +661,7 @@ export const useExploreAdditionalActionsMenu = ( }, }, { - key: MENU_KEYS.EXPORT_TO_JSON, + key: MENU_KEYS.EXPORT_CURRENT_TO_JSON, label: t('Export to .JSON'), icon: , disabled: !canDownloadCSV, @@ -633,7 +690,7 @@ export const useExploreAdditionalActionsMenu = ( }, }, { - key: MENU_KEYS.DOWNLOAD_AS_IMAGE, + key: MENU_KEYS.EXPORT_CURRENT_SCREENSHOT, label: t('Export screenshot (jpeg)'), icon: , onClick: e => { @@ -653,7 +710,7 @@ export const useExploreAdditionalActionsMenu = ( }, }, { - key: MENU_KEYS.EXPORT_TO_XLSX, + key: MENU_KEYS.EXPORT_CURRENT_XLSX, label: t('Export to Excel'), icon: , disabled: !canDownloadCSV, @@ -685,78 +742,6 @@ export const useExploreAdditionalActionsMenu = ( }, ]; - const allDataChildren = [ - { - key: 'export_full_csv', - label: t('Export to .CSV'), - icon: , - disabled: !canDownloadCSV, - onClick: () => { - exportCSV(); - setIsDropdownVisible(false); - dispatch( - logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_CSV, { - chartId: slice?.slice_id, - chartName: slice?.slice_name, - }), - ); - }, - }, - { - key: 'export_full_json', - label: t('Export to .JSON'), - icon: , - disabled: !canDownloadCSV, - onClick: () => { - exportJson(); - setIsDropdownVisible(false); - dispatch( - logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_JSON, { - chartId: slice?.slice_id, - chartName: slice?.slice_name, - }), - ); - }, - }, - { - key: MENU_KEYS.EXPORT_ALL_SCREENSHOT, - label: t('Export screenshot (jpeg)'), - icon: , - onClick: e => { - // Visual export of the currently visible chart - downloadAsImage( - '.panel-body .chart-container', - slice?.slice_name ?? t('New chart'), - true, - theme, - )(e.domEvent); - setIsDropdownVisible(false); - dispatch( - logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_IMAGE, { - chartId: slice?.slice_id, - chartName: slice?.slice_name, - }), - ); - }, - }, - { - key: 'export_full_xlsx', - label: t('Export to Excel'), - icon: , - disabled: !canDownloadCSV, - onClick: () => { - exportExcel(); - setIsDropdownVisible(false); - dispatch( - logEvent(LOG_ACTIONS_CHART_DOWNLOAD_AS_XLS, { - chartId: slice?.slice_id, - chartName: slice?.slice_name, - }), - ); - }, - }, - ]; - menuItems.push({ key: MENU_KEYS.DATA_EXPORT_OPTIONS, type: 'submenu', From 33cde887db72ccf0730c2eff45255b559780b9f4 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Tue, 2 Dec 2025 21:30:04 -0500 Subject: [PATCH 14/18] fix: try to fix e2e --- .../visualizations/download_chart.test.js | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js index fdede555bded..aa71a05669af 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js @@ -36,11 +36,23 @@ describe('Download Chart > Bar chart', () => { }; cy.visitChartByParams(formData); - cy.get('.header-with-actions .ant-dropdown-trigger').click(); - cy.get(':nth-child(3) > .ant-dropdown-menu-submenu-title').click(); - cy.get( - '.ant-dropdown-menu-submenu > .ant-dropdown-menu li:nth-child(3)', - ).click(); + // 1) Open "Data Export Options" + cy.get('body .ant-dropdown-menu-title-content') + .contains(/^Data Export Options$/) + .parents('.ant-dropdown-menu-submenu-title') + .trigger('mouseover', { force: true }) + .click({ force: true }); + // 2) Open "Export All Data" + cy.get('body .ant-dropdown-menu-title-content') + .contains(/^Export All Data$/) + .parents('.ant-dropdown-menu-submenu-title') + .trigger('mouseover', { force: true }) + .click({ force: true }); + // 3) Click the specific format (avoid partial "Export" matches) + cy.get('body li.ant-dropdown-menu-item') + .contains(/^Export to \.CSV$/) + .should('be.visible') + .click({ force: true }); cy.verifyDownload('.jpg', { contains: true, timeout: 25000, From 209e46b3b755c7b516ace4992ed888851035fae8 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Wed, 3 Dec 2025 14:18:48 -0500 Subject: [PATCH 15/18] fix: cypress test modification --- .../visualizations/download_chart.test.js | 21 ++++--------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js index aa71a05669af..63cdaec71c10 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js @@ -36,23 +36,10 @@ describe('Download Chart > Bar chart', () => { }; cy.visitChartByParams(formData); - // 1) Open "Data Export Options" - cy.get('body .ant-dropdown-menu-title-content') - .contains(/^Data Export Options$/) - .parents('.ant-dropdown-menu-submenu-title') - .trigger('mouseover', { force: true }) - .click({ force: true }); - // 2) Open "Export All Data" - cy.get('body .ant-dropdown-menu-title-content') - .contains(/^Export All Data$/) - .parents('.ant-dropdown-menu-submenu-title') - .trigger('mouseover', { force: true }) - .click({ force: true }); - // 3) Click the specific format (avoid partial "Export" matches) - cy.get('body li.ant-dropdown-menu-item') - .contains(/^Export to \.CSV$/) - .should('be.visible') - .click({ force: true }); + cy.get('.header-with-actions .ant-dropdown-trigger').click(); + cy.contains('.ant-dropdown-menu-submenu-title', 'Data Export Options').realHover(); + cy.contains('.ant-dropdown-menu-submenu-title', 'Export All Data').realHover(); + cy.contains('.ant-dropdown-menu-item', 'Export screenshot (jpeg)').click(); cy.verifyDownload('.jpg', { contains: true, timeout: 25000, From 19de663618ec6cc21eac1fc0085b3ae32c46ae02 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Wed, 3 Dec 2025 15:14:17 -0500 Subject: [PATCH 16/18] fix: update test --- .../visualizations/download_chart.test.js | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js index 63cdaec71c10..116d7771a113 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js @@ -37,9 +37,21 @@ describe('Download Chart > Bar chart', () => { cy.visitChartByParams(formData); cy.get('.header-with-actions .ant-dropdown-trigger').click(); - cy.contains('.ant-dropdown-menu-submenu-title', 'Data Export Options').realHover(); - cy.contains('.ant-dropdown-menu-submenu-title', 'Export All Data').realHover(); - cy.contains('.ant-dropdown-menu-item', 'Export screenshot (jpeg)').click(); + // Open first-level submenu: "Data Export Options" + cy.get('.ant-dropdown-menu-submenu-title') + .contains('Data Export Options') // <- simple text-only contains + .trigger('mouseover', { force: true }); + + // Open second-level submenu: "Export All Data" + cy.get('.ant-dropdown-menu-submenu-title') + .contains('Export All Data') + .trigger('mouseover', { force: true }); + + // Wait for nested submenu to appear, then click the export item + cy.get('.ant-dropdown-menu-submenu .ant-dropdown-menu') + .find('.ant-dropdown-menu-item') + .contains('Export screenshot (jpeg)') + .click({ force: true }); cy.verifyDownload('.jpg', { contains: true, timeout: 25000, From db04cc0b404181751ec12d9594aabe1a50e21abb Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Wed, 3 Dec 2025 16:03:21 -0500 Subject: [PATCH 17/18] fix: miss ssh_tunnel --- tests/unit_tests/databases/api_test.py | 152 ++++++++++++------------- 1 file changed, 76 insertions(+), 76 deletions(-) diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 499f06689394..0785d762e50e 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -226,92 +226,92 @@ def test_database_connection( response = client.get("/api/v1/database/1/connection") assert response.json == { + "id": 1, + "result": { + "allow_ctas": False, + "allow_cvas": False, + "allow_dml": False, + "allow_file_upload": False, + "allow_run_async": False, + "backend": "gsheets", + "cache_timeout": None, + "configuration_method": "sqlalchemy_form", + "database_name": "my_database", + "driver": "gsheets", + "engine_information": { + "disable_ssh_tunneling": True, + "supports_dynamic_catalog": False, + "supports_file_upload": True, + "supports_oauth2": True, + }, + "expose_in_sqllab": True, + "extra": '{\n "metadata_params": {},\n "engine_params": {},\n "metadata_cache_timeout": {},\n "schemas_allowed_for_file_upload": []\n}\n', # noqa: E501 + "force_ctas_schema": None, "id": 1, - "result": { - "allow_ctas": False, - "allow_cvas": False, - "allow_dml": False, - "allow_file_upload": False, - "allow_run_async": False, - "backend": "gsheets", - "cache_timeout": None, - "configuration_method": "sqlalchemy_form", - "database_name": "my_database", - "driver": "gsheets", - "engine_information": { - "disable_ssh_tunneling": True, - "supports_dynamic_catalog": False, - "supports_file_upload": True, - "supports_oauth2": True, - }, - "expose_in_sqllab": True, - "extra": '{\n "metadata_params": {},\n "engine_params": {},\n "metadata_cache_timeout": {},\n "schemas_allowed_for_file_upload": []\n}\n', # noqa: E501 - "force_ctas_schema": None, - "id": 1, - "impersonate_user": False, - "is_managed_externally": False, - "masked_encrypted_extra": json.dumps( - { - "service_account_info": { - "type": "service_account", - "project_id": "black-sanctum-314419", - "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", # noqa: E501 - "private_key": "XXXXXXXXXX", - "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 - "client_id": "114567578578109757129", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", - } - } - ), - "parameters": { + "impersonate_user": False, + "is_managed_externally": False, + "masked_encrypted_extra": json.dumps( + { "service_account_info": { - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "XXXXXXXXXX", "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 "client_id": "114567578578109757129", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", - "private_key": "XXXXXXXXXX", - "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", - "project_id": "black-sanctum-314419", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", "token_uri": "https://oauth2.googleapis.com/token", - "type": "service_account", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", } - }, - "parameters_schema": { - "properties": { - "catalog": {"type": "object"}, - "oauth2_client_info": { - "default": { - "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", - "scope": ( - "https://www.googleapis.com/auth/drive.readonly " - "https://www.googleapis.com/auth/spreadsheets " - "https://spreadsheets.google.com/feeds" - ), - "token_request_uri": "https://oauth2.googleapis.com/token", - }, - "description": "OAuth2 client information", - "nullable": True, - "type": "string", - "x-encrypted-extra": True, - }, - "service_account_info": { - "description": "Contents of GSheets JSON credentials.", - "type": "string", - "x-encrypted-extra": True, + } + ), + "parameters": { + "service_account_info": { + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", # noqa: E501 + "client_id": "114567578578109757129", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + "private_key": "XXXXXXXXXX", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "project_id": "black-sanctum-314419", + "token_uri": "https://oauth2.googleapis.com/token", + "type": "service_account", + } + }, + "parameters_schema": { + "properties": { + "catalog": {"type": "object"}, + "oauth2_client_info": { + "default": { + "authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth", + "scope": ( + "https://www.googleapis.com/auth/drive.readonly " + "https://www.googleapis.com/auth/spreadsheets " + "https://spreadsheets.google.com/feeds" + ), + "token_request_uri": "https://oauth2.googleapis.com/token", }, + "description": "OAuth2 client information", + "nullable": True, + "type": "string", + "x-encrypted-extra": True, + }, + "service_account_info": { + "description": "Contents of GSheets JSON credentials.", + "type": "string", + "x-encrypted-extra": True, }, - "type": "object", }, - "server_cert": None, - "sqlalchemy_uri": "gsheets://", - "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", + "type": "object", }, - } - ) + "server_cert": None, + "sqlalchemy_uri": "gsheets://", + "ssh_tunnel": None, + "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", + }, + } response = client.get("/api/v1/database/1") assert response.json == { From 0ce21f202597ae7bc4cebfc303f22a04bc314463 Mon Sep 17 00:00:00 2001 From: Frank Zhang Date: Thu, 4 Dec 2025 00:39:25 -0500 Subject: [PATCH 18/18] fix: update e2e download chart test case --- .../visualizations/download_chart.test.js | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js index 116d7771a113..029827b5e773 100644 --- a/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js +++ b/superset-frontend/cypress-base/cypress/e2e/explore/visualizations/download_chart.test.js @@ -37,21 +37,14 @@ describe('Download Chart > Bar chart', () => { cy.visitChartByParams(formData); cy.get('.header-with-actions .ant-dropdown-trigger').click(); - // Open first-level submenu: "Data Export Options" - cy.get('.ant-dropdown-menu-submenu-title') - .contains('Data Export Options') // <- simple text-only contains - .trigger('mouseover', { force: true }); + cy.get(':nth-child(3) > .ant-dropdown-menu-submenu-title').click(); + cy.get( + '.ant-dropdown-menu-submenu > .ant-dropdown-menu li:nth-child(1) > .ant-dropdown-menu-submenu-title', + ).click(); + cy.get( + '.ant-dropdown-menu-submenu > .ant-dropdown-menu li:nth-child(3)', + ).click(); - // Open second-level submenu: "Export All Data" - cy.get('.ant-dropdown-menu-submenu-title') - .contains('Export All Data') - .trigger('mouseover', { force: true }); - - // Wait for nested submenu to appear, then click the export item - cy.get('.ant-dropdown-menu-submenu .ant-dropdown-menu') - .find('.ant-dropdown-menu-item') - .contains('Export screenshot (jpeg)') - .click({ force: true }); cy.verifyDownload('.jpg', { contains: true, timeout: 25000,