-
Notifications
You must be signed in to change notification settings - Fork 50.6k
feat(editor): CSV download support for data tables #22048
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 9 commits
d49388a
5d3d9c1
76c03e4
1ad9df0
a6561ed
9d105cc
bbeab3d
e5a4f9c
74760e0
72e9e90
29eb2dd
fe90466
3215956
3b86f72
2a53fa2
8ef0bcf
2e169bd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -261,6 +261,36 @@ export class DataTableController { | |
| } | ||
| } | ||
|
|
||
| @Get('/:dataTableId/download-csv') | ||
| @ProjectScope('dataTable:read') | ||
| async downloadDataTableCsv( | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can we add a integration test for this endpoint? that will automatically add coverage to all the backend code (or most of it) |
||
| req: AuthenticatedRequest<{ projectId: string; dataTableId: string }>, | ||
| _res: Response, | ||
| ) { | ||
| try { | ||
| const { projectId, dataTableId } = req.params; | ||
|
|
||
| // Generate CSV content - this will validate that the table exists | ||
| const { csvContent, dataTableName } = await this.dataTableService.generateDataTableCsv( | ||
| dataTableId, | ||
| projectId, | ||
| ); | ||
|
|
||
| return { | ||
| csvContent, | ||
| dataTableName, | ||
| }; | ||
| } catch (e: unknown) { | ||
| if (e instanceof DataTableNotFoundError) { | ||
| throw new NotFoundError(e.message); | ||
| } else if (e instanceof Error) { | ||
| throw new InternalServerError(e.message, e); | ||
| } else { | ||
| throw e; | ||
| } | ||
| } | ||
| } | ||
|
|
||
| /** | ||
| * @returns the IDs of the inserted rows | ||
| */ | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -688,4 +688,115 @@ export class DataTableService { | |
| dataTables: accessibleDataTables, | ||
| }; | ||
| } | ||
|
|
||
| async generateDataTableCsv( | ||
| dataTableId: string, | ||
| projectId: string, | ||
| ): Promise<{ csvContent: string; dataTableName: string }> { | ||
| const dataTable = await this.validateDataTableExists(dataTableId, projectId); | ||
|
|
||
| // Fetch columns (ordered by index) | ||
|
||
| const columns = await this.dataTableColumnRepository.getColumns(dataTableId); | ||
|
|
||
| // Fetch ALL rows (no pagination for export) | ||
| const { data: rows } = await this.dataTableRowsRepository.getManyAndCount( | ||
| dataTableId, | ||
| { | ||
| skip: 0, | ||
| take: -1, // Get all rows | ||
| }, | ||
| columns, | ||
| ); | ||
|
|
||
| // Build CSV | ||
| const csvContent = this.buildCsvContent(rows, columns); | ||
|
|
||
| return { | ||
| csvContent, | ||
| dataTableName: dataTable.name, | ||
| }; | ||
| } | ||
|
|
||
| private buildCsvContent(rows: DataTableRowReturn[], columns: DataTableColumn[]): string { | ||
| // Sort columns once to avoid repeated sorting in the row loop | ||
| const sortedColumns = [...columns].sort((a, b) => a.index - b.index); | ||
|
|
||
| // Build header row: id + user columns + createdAt/updatedAt at the end | ||
| const userHeaders = sortedColumns.map((col) => col.name); | ||
| const headers = ['id', ...userHeaders, 'createdAt', 'updatedAt']; | ||
|
|
||
| // Escape and join headers | ||
| const csvRows: string[] = [headers.map((h) => this.escapeCsvValue(h)).join(',')]; | ||
|
|
||
| // Build data rows | ||
| for (const row of rows) { | ||
| const values: string[] = []; | ||
|
|
||
| // Add id first | ||
| values.push(this.escapeCsvValue(row.id)); | ||
|
|
||
| // Add user column values (in correct order) | ||
| for (const column of sortedColumns) { | ||
| const value = row[column.name]; | ||
| values.push(this.escapeCsvValue(this.formatValueForCsv(value, column.type))); | ||
| } | ||
|
|
||
| // Add createdAt and updatedAt at the end | ||
| values.push(this.escapeCsvValue(this.formatDateForCsv(row.createdAt))); | ||
| values.push(this.escapeCsvValue(this.formatDateForCsv(row.updatedAt))); | ||
|
|
||
| csvRows.push(values.join(',')); | ||
| } | ||
|
|
||
| return csvRows.join('\n'); | ||
| } | ||
|
|
||
| private formatValueForCsv(value: unknown, columnType: DataTableColumnType): string { | ||
| // Handle NULL/undefined | ||
| if (value === null || value === undefined) { | ||
| return ''; | ||
| } | ||
|
|
||
| // Handle dates - always use ISO format for CSV | ||
| if (columnType === 'date') { | ||
| if (value instanceof Date || typeof value === 'string') { | ||
| return this.formatDateForCsv(value); | ||
| } | ||
| } | ||
|
|
||
| // Handle booleans - already normalized to true/false by normalizeRows | ||
| if (columnType === 'boolean') { | ||
| return String(value); | ||
| } | ||
|
|
||
| // Handle numbers | ||
| if (columnType === 'number') { | ||
| return String(value); | ||
| } | ||
|
|
||
| // Handle strings and everything else | ||
| return String(value); | ||
| } | ||
|
|
||
| private formatDateForCsv(date: Date | string): string { | ||
| if (date instanceof Date) { | ||
| return date.toISOString(); | ||
| } | ||
| // If it's already a string, try to parse and format | ||
| const parsed = new Date(date); | ||
| return !isNaN(parsed.getTime()) ? parsed.toISOString() : String(date); | ||
| } | ||
|
|
||
| private escapeCsvValue(value: unknown): string { | ||
| const str = String(value); | ||
|
|
||
| // RFC 4180 compliant escaping: | ||
| // - If value contains comma, quote, or newline, wrap in quotes | ||
| // - Escape quotes by doubling them | ||
| if (str.includes(',') || str.includes('"') || str.includes('\n') || str.includes('\r')) { | ||
| return `"${str.replace(/"/g, '""')}"`; | ||
| } | ||
|
|
||
| return str; | ||
| } | ||
|
Comment on lines
775
to
795
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Looks like leading/trailing whitespace won't be preserved. Was this intentional? I guess we should keep the original data as it comes.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Good point. Made the change so that if there is leading or trailing whitespace, the string will be in quotes. This helps import to google sheets. |
||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -40,6 +40,11 @@ const telemetry = useTelemetry(); | |
|
|
||
| const actions = computed<Array<UserAction<IUser>>>(() => { | ||
| const availableActions = [ | ||
| { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm surprised that we added this here and not next to the search input |
||
| label: i18n.baseText('dataTable.download.csv'), | ||
| value: DATA_TABLE_CARD_ACTIONS.DOWNLOAD_CSV, | ||
| disabled: false, | ||
| }, | ||
| { | ||
| label: i18n.baseText('generic.delete'), | ||
| value: DATA_TABLE_CARD_ACTIONS.DELETE, | ||
|
|
@@ -67,6 +72,10 @@ const onAction = async (action: string) => { | |
| }); | ||
| break; | ||
| } | ||
| case DATA_TABLE_CARD_ACTIONS.DOWNLOAD_CSV: { | ||
| await downloadDataTableCsv(); | ||
| break; | ||
| } | ||
| case DATA_TABLE_CARD_ACTIONS.DELETE: { | ||
| const promptResponse = await message.confirm( | ||
| i18n.baseText('dataTable.delete.confirm.message', { | ||
|
|
@@ -86,6 +95,19 @@ const onAction = async (action: string) => { | |
| } | ||
| }; | ||
|
|
||
| const downloadDataTableCsv = async () => { | ||
| try { | ||
| await dataTableStore.downloadDataTableCsv(props.dataTable.id, props.dataTable.projectId); | ||
|
|
||
| telemetry.track('User downloaded data table CSV', { | ||
| data_table_id: props.dataTable.id, | ||
| data_table_project_id: props.dataTable.projectId, | ||
| }); | ||
| } catch (error) { | ||
| toast.showError(error, i18n.baseText('dataTable.download.error')); | ||
| } | ||
| }; | ||
|
|
||
| const deleteDataTable = async () => { | ||
| try { | ||
| const deleted = await dataTableStore.deleteDataTable( | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -15,6 +15,7 @@ import { | |
| updateDataTableRowsApi, | ||
| deleteDataTableRowsApi, | ||
| fetchDataTableGlobalLimitInBytes, | ||
| downloadDataTableCsvApi, | ||
| uploadCsvFileApi, | ||
| } from '@/features/core/dataTable/dataTable.api'; | ||
| import type { | ||
|
|
@@ -273,6 +274,36 @@ export const useDataTableStore = defineStore(DATA_TABLE_STORE, () => { | |
| return result; | ||
| }; | ||
|
|
||
| const downloadDataTableCsv = async (dataTableId: string, projectId: string) => { | ||
|
||
| // Fetch CSV content with authentication | ||
| const { csvContent, filename } = await downloadDataTableCsvApi( | ||
| rootStore.restApiContext, | ||
| dataTableId, | ||
| projectId, | ||
| ); | ||
|
|
||
| // Create blob with UTF-8 BOM for Excel compatibility | ||
| const bom = '\uFEFF'; | ||
| const blob = new Blob([bom + csvContent], { type: 'text/csv;charset=utf-8;' }); | ||
| const url = URL.createObjectURL(blob); | ||
|
|
||
| const tempElement = document.createElement('a'); | ||
| tempElement.setAttribute('href', url); | ||
| tempElement.setAttribute('download', filename); | ||
| tempElement.style.display = 'none'; | ||
| document.body.appendChild(tempElement); | ||
|
|
||
| try { | ||
| tempElement.click(); | ||
| } finally { | ||
| // Ensure cleanup happens even if click fails | ||
| if (document.body.contains(tempElement)) { | ||
| document.body.removeChild(tempElement); | ||
| } | ||
| URL.revokeObjectURL(url); | ||
| } | ||
| }; | ||
|
|
||
| return { | ||
| dataTables, | ||
| totalCount, | ||
|
|
@@ -295,6 +326,7 @@ export const useDataTableStore = defineStore(DATA_TABLE_STORE, () => { | |
| insertEmptyRow, | ||
| updateRow, | ||
| deleteRows, | ||
| downloadDataTableCsv, | ||
| projectPermissions, | ||
| }; | ||
| }); | ||
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This endpoint returns full table data but is guarded only by
dataTable:read, letting callers who can see metadata download entire datasets without thedataTable:readRowpermission that other row-returning routes require. Align the scope with existing row access to prevent unauthorized data export.Prompt for AI agents