diff --git a/src/database/csv.js b/src/database/csv.js new file mode 100644 index 0000000..5c40b44 --- /dev/null +++ b/src/database/csv.js @@ -0,0 +1,54 @@ +/** + * Convert a value to a CSV cell-friendly format. + * Primitives pass through; objects/arrays are stringified; dates are formatted. + */ +function toCsvValue(val) { + if (val === null || val === undefined) return ''; + if (val instanceof Date) return val.toISOString(); + if (typeof val === 'number' || typeof val === 'boolean') return String(val); + if (typeof val === 'string') return val; + if (typeof val === 'object') { + if (Array.isArray(val)) return val.map(toCsvValue).join(', '); + return JSON.stringify(val); + } + return String(val); +} + +/** + * Escape a CSV field per RFC 4180: wrap in double quotes, escape internal quotes by doubling. + */ +function escapeCsvField(str) { + if (str == null) return '""'; + const s = String(str); + if (s.includes('"') || s.includes(',') || s.includes('\n') || s.includes('\r')) { + return '"' + s.replace(/"/g, '""') + '"'; + } + return s; +} + +/** + * Generate a CSV buffer from tabular data. + * @param {Array} data - Array of row objects (keys = column headers) + * @param {Object} options - Options + * @param {string[]} [options.columnOrder] - Optional column order (uses Object.keys of first row if not provided) + * @returns {Buffer} CSV file as buffer + */ +export function generateCsvTable(data, options = {}) { + const { columnOrder } = options; + + if (!data || data.length === 0) { + return Buffer.from('', 'utf8'); + } + + const keys = columnOrder || Object.keys(data[0]).filter((k) => !k.startsWith('@')); + const headerRow = keys.map((k) => escapeCsvField(k)).join(','); + const lines = [headerRow]; + + for (const row of data) { + const values = keys.map((key) => escapeCsvField(toCsvValue(row[key]))); + lines.push(values.join(',')); + } + + const csv = lines.join('\n'); + return Buffer.from(csv, 'utf8'); +} diff --git a/src/index.js b/src/index.js index edcf35a..0ccc684 100644 --- a/src/index.js +++ b/src/index.js @@ -46,6 +46,7 @@ import { notificationRoutes, odataRoutes, excelRoutes, + csvRoutes, } from './routes/index.js'; import path from 'path'; import * as fs from 'fs'; @@ -159,6 +160,7 @@ app.use('/usernotifiers', userNotifierRoutes); app.use('/notifications', notificationRoutes); app.use('/odata', odataRoutes); app.use('/excel', excelRoutes); +app.use('/csv', csvRoutes); // Start the application if (process.env.NODE_ENV !== 'test') { diff --git a/src/routes/index.js b/src/routes/index.js index 113afa4..65091b4 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -39,6 +39,7 @@ import userNotifierRoutes from './misc/usernotifiers.js'; import notificationRoutes from './misc/notifications.js'; import odataRoutes from './misc/odata.js'; import excelRoutes from './misc/excel.js'; +import csvRoutes from './misc/csv.js'; export { userRoutes, @@ -82,4 +83,5 @@ export { notificationRoutes, odataRoutes, excelRoutes, + csvRoutes, }; diff --git a/src/routes/misc/csv.js b/src/routes/misc/csv.js new file mode 100644 index 0000000..81cbfd2 --- /dev/null +++ b/src/routes/misc/csv.js @@ -0,0 +1,9 @@ +import express from 'express'; +import { isAuthenticated } from '../../keycloak.js'; +import { csvExportRouteHandler } from '../../services/misc/csv.js'; + +const router = express.Router(); + +router.get('/:objectType', isAuthenticated, csvExportRouteHandler); + +export default router; diff --git a/src/services/misc/csv.js b/src/services/misc/csv.js new file mode 100644 index 0000000..6ee1186 --- /dev/null +++ b/src/services/misc/csv.js @@ -0,0 +1,178 @@ +import config from '../../config.js'; +import log4js from 'log4js'; +import { getModelByName } from './model.js'; +import { listObjectsOData } from '../../database/odata.js'; +import { getFilter } from '../../utils.js'; +import { generateCsvTable } from '../../database/csv.js'; + +const logger = log4js.getLogger('CSV'); +logger.level = config.server.logLevel; + +/** + * Flatten nested objects for CSV display. + * Objects become "key.subkey: value" or JSON string; arrays become comma-separated. + */ +function flattenForCsv(obj, prefix = '') { + if (obj === null || obj === undefined) return {}; + if (typeof obj !== 'object') return { [prefix]: obj }; + if (obj instanceof Date) return { [prefix]: obj }; + if (Array.isArray(obj)) { + const str = obj + .map((v) => (v && typeof v === 'object' && !(v instanceof Date) ? JSON.stringify(v) : v)) + .join(', '); + return { [prefix]: str }; + } + const result = {}; + for (const [k, v] of Object.entries(obj)) { + const key = prefix ? `${prefix}.${k}` : k; + if (v !== null && typeof v === 'object' && !(v instanceof Date) && !Array.isArray(v)) { + Object.assign(result, flattenForCsv(v, key)); + } else { + result[key] = v; + } + } + return result; +} + +/** + * Convert a row to flat key-value for CSV. Nested objects are flattened. + */ +function rowToFlat(row) { + const flat = {}; + for (const [key, val] of Object.entries(row)) { + if (key.startsWith('@')) continue; + if (val !== null && typeof val === 'object' && !(val instanceof Date) && !Array.isArray(val)) { + Object.assign(flat, flattenForCsv(val, key)); + } else { + flat[key] = val; + } + } + return flat; +} + +/** + * Get allowed filter fields for CSV export (reuse OData logic). + */ +function getModelFilterFields(objectType) { + const base = ['_id']; + const byType = { + note: ['parent._id', 'noteType', 'user'], + notification: ['user'], + userNotifier: ['user', 'object', 'objectType'], + printer: ['host'], + job: ['printer', 'gcodeFile'], + subJob: ['job'], + filamentStock: ['filament'], + partStock: ['part'], + purchaseOrder: ['vendor'], + orderItem: ['order._id', 'orderType', 'item._id', 'itemType', 'shipment._id'], + shipment: ['order._id', 'orderType', 'courierService._id'], + stockEvent: ['parent._id', 'parentType', 'owner._id', 'ownerType'], + stockAudit: ['filamentStock._id', 'partStock._id'], + documentJob: ['documentTemplate', 'documentPrinter', 'object._id', 'objectType'], + documentTemplate: ['parent._id', 'documentSize._id'], + salesOrder: ['client'], + invoice: ['to._id', 'from._id', 'order._id', 'orderType'], + auditLog: ['parent._id', 'parentType', 'owner._id', 'ownerType'], + appPassword: ['name', 'user', 'active'], + }; + const extra = byType[objectType] || []; + return [...base, ...extra]; +} + +function parseOrderBy(orderby) { + if (!orderby || typeof orderby !== 'string') { + return { sort: 'createdAt', order: 'ascend' }; + } + const trimmed = orderby.trim(); + const parts = trimmed.split(/\s+/); + const sort = parts[0] || 'createdAt'; + const dir = (parts[1] || 'asc').toLowerCase(); + const order = dir === 'desc' ? 'descend' : 'ascend'; + return { sort, order }; +} + +/** + * Generate CSV file for the given object type. + * @param {Object} options + * @param {string} options.objectType - Model type (e.g. 'appPassword', 'user') + * @param {Object} [options.filter] - Filter object + * @param {string} [options.sort] - Sort field + * @param {string} [options.order] - 'ascend' | 'descend' + * @param {number} [options.limit=10000] - Max rows to export + * @returns {Promise<{ buffer: Buffer, error?: Object }>} + */ +export async function exportToCsv({ objectType, filter = {}, sort, order, limit = 10000 }) { + logger.info('[CSV Export] Starting', { objectType, filter, sort, order }); + const entry = getModelByName(objectType); + if (!entry?.model) { + logger.warn('[CSV Export] Unknown object type:', objectType); + return { error: { message: `Unknown object type: ${objectType}` }, code: 404 }; + } + + const orderbyStr = sort ? `${sort} ${order === 'descend' ? 'desc' : 'asc'}` : undefined; + const { sort: sortField, order: orderDir } = parseOrderBy(orderbyStr); + + const result = await listObjectsOData({ + model: entry.model, + populate: [], + page: 1, + limit, + filter, + sort: sortField, + order: orderDir, + pagination: true, + project: undefined, + count: false, + }); + + if (result?.error) { + logger.error('[CSV Export] listObjectsOData error:', result.error); + return { error: result.error, code: result.code || 500 }; + } + + const rows = result?.value || []; + logger.info('[CSV Export] Rows fetched', { rowCount: rows.length }); + const flatRows = rows.map(rowToFlat); + + const allKeys = new Set(); + flatRows.forEach((r) => Object.keys(r).forEach((k) => allKeys.add(k))); + const columnOrder = Array.from(allKeys).sort(); + + let buffer; + try { + buffer = generateCsvTable(flatRows, { columnOrder }); + logger.info('[CSV Export] Buffer generated', { bufferLength: buffer?.length }); + } catch (err) { + logger.error('[CSV Export] generateCsvTable threw:', err?.message, err?.stack); + return { error: { message: err.message || 'Failed to generate CSV' }, code: 500 }; + } + + return { buffer }; +} + +/** + * Route handler for GET /csv/:objectType + */ +export const csvExportRouteHandler = async (req, res) => { + const objectType = req.params.objectType; + const allowedFilters = getModelFilterFields(objectType); + const filter = getFilter(req.query, allowedFilters); + const { sort, order } = parseOrderBy(req.query.$orderby); + + const result = await exportToCsv({ + objectType, + filter, + sort, + order, + }); + + if (result.error) { + return res.status(result.code || 500).json(result.error); + } + + const filename = `${objectType}-export-${new Date().toISOString().slice(0, 10)}.csv`; + res.set('Content-Type', 'text/csv'); + res.set('Content-Disposition', `attachment; filename="${filename}"`); + res.send(result.buffer); +};