1031 lines
30 KiB
JavaScript
1031 lines
30 KiB
JavaScript
import dotenv from 'dotenv';
|
|
import { fileModel } from './schemas/management/file.schema.js';
|
|
import _ from 'lodash';
|
|
import {
|
|
deleteAuditLog,
|
|
distributeDelete,
|
|
expandObjectIds,
|
|
modelHasRef,
|
|
getFieldsByRef,
|
|
getQueryToCacheKey,
|
|
} from '../utils.js';
|
|
import log4js from 'log4js';
|
|
import {
|
|
editAuditLog,
|
|
distributeUpdate,
|
|
newAuditLog,
|
|
distributeNew,
|
|
distributeChildUpdate,
|
|
distributeChildDelete,
|
|
distributeChildNew,
|
|
distributeStats,
|
|
} from '../utils.js';
|
|
import { getAllModels } from '../services/misc/model.js';
|
|
import { redisServer } from './redis.js';
|
|
import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
|
|
|
dotenv.config();
|
|
|
|
const logger = log4js.getLogger('Database');
|
|
logger.level = process.env.LOG_LEVEL;
|
|
|
|
const cacheLogger = log4js.getLogger('DatabaseCache');
|
|
cacheLogger.level = process.env.LOG_LEVEL;
|
|
|
|
const CACHE_TTL_SECONDS = parseInt(process.env.REDIS_CACHE_TTL || '30', 10);
|
|
|
|
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
|
if (!model || !id) return undefined;
|
|
|
|
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
|
|
|
|
cacheLogger.trace('Retrieving object from cache:', { model: model.modelName, id, populate });
|
|
|
|
try {
|
|
const cachedObject = await redisServer.getKey(cacheKey);
|
|
if (cachedObject == null) {
|
|
cacheLogger.trace('Cache miss:', { model: model.modelName, id });
|
|
return undefined;
|
|
}
|
|
|
|
cacheLogger.trace('Cache hit:', {
|
|
model: model.modelName,
|
|
id: id.toString(),
|
|
});
|
|
|
|
return cachedObject;
|
|
} catch (err) {
|
|
cacheLogger.error('Error retrieving object from Redis cache:', err);
|
|
return undefined;
|
|
}
|
|
};
|
|
|
|
export const updateObjectCache = async ({ model, id, object, populate = [] }) => {
|
|
if (!model || !id || !object) return object;
|
|
|
|
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
|
|
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
|
|
|
|
cacheLogger.trace('Updating object cache:', cacheKeyFilter);
|
|
|
|
try {
|
|
// Get all keys matching the filter pattern
|
|
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
|
|
// Merge the object with each cached object and update
|
|
const mergedObjects = [];
|
|
for (const key of matchingKeys) {
|
|
logger.trace('Updating object cache:', key);
|
|
const cachedObject = (await redisServer.getKey(key)) || {};
|
|
const mergedObject = _.merge(cachedObject, object);
|
|
await redisServer.setKey(key, mergedObject, CACHE_TTL_SECONDS);
|
|
mergedObjects.push(mergedObject);
|
|
}
|
|
|
|
const cacheObject = (await redisServer.getKey(cacheKey)) || {};
|
|
const mergedObject = _.merge(cacheObject, object);
|
|
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
|
|
|
|
cacheLogger.trace('Updated object cache:', {
|
|
filter: cacheKeyFilter,
|
|
keysUpdated: matchingKeys.length,
|
|
});
|
|
|
|
// Return the merged object
|
|
return mergedObject;
|
|
} catch (err) {
|
|
cacheLogger.error('Error updating object in Redis cache:', err);
|
|
// Fallback to returning the provided object if cache fails
|
|
return object;
|
|
}
|
|
};
|
|
|
|
export const deleteObjectCache = async ({ model, id }) => {
|
|
if (!model || !id) return;
|
|
|
|
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
|
|
|
|
cacheLogger.trace('Deleting object cache:', cacheKeyFilter);
|
|
|
|
try {
|
|
// Get all keys matching the filter pattern and delete them
|
|
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
|
|
|
|
for (const cacheKey of matchingKeys) {
|
|
await redisServer.deleteKey(cacheKey);
|
|
}
|
|
|
|
cacheLogger.trace('Deleted object cache:', {
|
|
filter: cacheKeyFilter,
|
|
keysDeleted: matchingKeys.length,
|
|
});
|
|
} catch (err) {
|
|
cacheLogger.error('Error deleting object from Redis cache:', err);
|
|
}
|
|
};
|
|
|
|
// Utility to run one or many rollup aggregations in a single query via $facet.
|
|
export const aggregateRollups = async ({ model, baseFilter = {}, rollupConfigs = [] }) => {
|
|
if (!rollupConfigs.length) {
|
|
return {};
|
|
}
|
|
|
|
const facetStage = rollupConfigs.reduce((facets, definition, index) => {
|
|
const key = definition.name || `rollup${index}`;
|
|
const matchStage = { $match: { ...baseFilter, ...(definition.filter || {}) } };
|
|
const groupStage = { $group: { _id: null } };
|
|
|
|
(definition.rollups || []).forEach((rollup) => {
|
|
switch (rollup.operation) {
|
|
case 'sum':
|
|
groupStage.$group[rollup.name] = { $sum: `$${rollup.property}` };
|
|
break;
|
|
case 'count':
|
|
groupStage.$group[rollup.name] = { $sum: 1 };
|
|
break;
|
|
case 'avg':
|
|
groupStage.$group[rollup.name] = { $avg: `$${rollup.property}` };
|
|
break;
|
|
default:
|
|
throw new Error(`Unsupported rollup operation: ${rollup.operation}`);
|
|
}
|
|
});
|
|
|
|
facets[key] = [matchStage, groupStage];
|
|
return facets;
|
|
}, {});
|
|
|
|
const [results] = await model.aggregate([{ $facet: facetStage }]);
|
|
|
|
return rollupConfigs.reduce((acc, definition, index) => {
|
|
const key = definition.name || `rollup${index}`;
|
|
const rawResult = results?.[key]?.[0] || {};
|
|
|
|
// Transform the result to nest rollup values under operation type
|
|
const transformedResult = {};
|
|
(definition.rollups || []).forEach((rollup) => {
|
|
const value = rawResult[rollup.name] || 0;
|
|
// If there's only one rollup and its name matches the key, flatten the structure
|
|
if (definition.rollups.length === 1 && rollup.name === key) {
|
|
transformedResult[rollup.operation] = value;
|
|
} else {
|
|
transformedResult[rollup.name] = { [rollup.operation]: value };
|
|
}
|
|
});
|
|
|
|
acc[key] = transformedResult;
|
|
return acc;
|
|
}, {});
|
|
};
|
|
|
|
// Reusable function to aggregate rollups over history using state reconstruction
|
|
export const aggregateRollupsHistory = async ({
|
|
model,
|
|
baseFilter = {},
|
|
rollupConfigs = [],
|
|
startDate,
|
|
endDate,
|
|
}) => {
|
|
if (!rollupConfigs.length) {
|
|
return [];
|
|
}
|
|
|
|
// Set default dates if not provided
|
|
const end = endDate ? new Date(endDate) : new Date();
|
|
const start = startDate ? new Date(startDate) : new Date(end.getTime() - 24 * 60 * 60 * 1000);
|
|
|
|
// Get model name for filtering audit logs
|
|
const parentType = model.modelName ? model.modelName : 'unknown';
|
|
|
|
// 1. Fetch all audit logs for this model type from start date to now
|
|
// Filter by parentType instead of fetching object IDs first
|
|
const auditLogs = await auditLogModel
|
|
.find({
|
|
parentType,
|
|
createdAt: { $gte: start },
|
|
})
|
|
.sort({ createdAt: -1 }) // Newest first
|
|
.lean();
|
|
|
|
// 2. Extract unique parent IDs from audit logs
|
|
const parentIds = [...new Set(auditLogs.map((log) => log.parent.toString()))];
|
|
|
|
if (parentIds.length === 0) {
|
|
return [];
|
|
}
|
|
|
|
// 3. Fetch current state of relevant objects that match baseFilter
|
|
// Note: This only includes objects that CURRENTLY match the baseFilter.
|
|
// Objects that matched in the past but don't match now are excluded.
|
|
const currentObjects = await model
|
|
.find({
|
|
_id: { $in: parentIds },
|
|
...baseFilter,
|
|
})
|
|
.lean();
|
|
const objectMap = new Map();
|
|
currentObjects.forEach((obj) => {
|
|
// Ensure _id is a string for map keys
|
|
objectMap.set(obj._id.toString(), expandObjectIds(obj));
|
|
});
|
|
|
|
if (objectMap.size === 0) {
|
|
return [];
|
|
}
|
|
|
|
// Helper to check if object matches filter
|
|
const matchesFilter = (obj, filter) => {
|
|
if (!filter || Object.keys(filter).length === 0) return true;
|
|
|
|
for (const [key, expectedValue] of Object.entries(filter)) {
|
|
const actualValue = _.get(obj, key);
|
|
|
|
// Handle simple equality
|
|
if (actualValue != expectedValue) {
|
|
return false;
|
|
}
|
|
}
|
|
return true;
|
|
};
|
|
|
|
// 3. Generate time buckets (1 minute intervals)
|
|
const buckets = [];
|
|
let currentTime = new Date(end);
|
|
// Round down to nearest minute
|
|
currentTime.setSeconds(0, 0);
|
|
|
|
while (currentTime >= start) {
|
|
buckets.push(new Date(currentTime));
|
|
currentTime = new Date(currentTime.getTime() - 60000); // -1 minute
|
|
}
|
|
|
|
// 4. Rewind state and snapshot
|
|
const results = [];
|
|
let logIndex = 0;
|
|
|
|
// Create a working copy of objects to mutate during rewind
|
|
// (deep clone to avoid issues if we need original later, though expandObjectIds creates new objs)
|
|
const workingObjects = new Map();
|
|
objectMap.forEach((val, key) => workingObjects.set(key, _.cloneDeep(val)));
|
|
|
|
// Iterate backwards through time
|
|
for (const bucketDate of buckets) {
|
|
// Apply all logs that happened AFTER this bucket time (between last bucket and this one)
|
|
// Since we iterate backwards, these are logs with createdAt > bucketDate
|
|
while (logIndex < auditLogs.length) {
|
|
const log = auditLogs[logIndex];
|
|
const logDate = new Date(log.createdAt);
|
|
|
|
if (logDate <= bucketDate) {
|
|
// This log happened at or before the current bucket time,
|
|
// so its effects are already present (or rather, will be handled in a future/earlier bucket).
|
|
// Stop processing logs for this step.
|
|
break;
|
|
}
|
|
|
|
// Revert this change
|
|
const objectId = log.parent.toString();
|
|
const object = workingObjects.get(objectId);
|
|
|
|
if (object) {
|
|
if (log.operation === 'new') {
|
|
// Object didn't exist before this creation event
|
|
workingObjects.delete(objectId);
|
|
} else if (log.changes && log.changes.old) {
|
|
// Apply old values to revert state
|
|
_.merge(object, log.changes.old);
|
|
}
|
|
}
|
|
|
|
logIndex++;
|
|
}
|
|
|
|
// Snapshot: Calculate rollups for current state of all objects
|
|
const bucketResult = {
|
|
date: bucketDate.toISOString(),
|
|
};
|
|
|
|
const activeObjects = Array.from(workingObjects.values());
|
|
|
|
rollupConfigs.forEach((config) => {
|
|
const configName = config.name;
|
|
|
|
// Filter objects for this config
|
|
// Note: We also check baseFilter here in case the object state reverted to something
|
|
// that no longer matches baseFilter (e.g. active: false)
|
|
const matchingObjects = activeObjects.filter(
|
|
(obj) => matchesFilter(obj, baseFilter) && matchesFilter(obj, config.filter)
|
|
);
|
|
|
|
// Calculate rollups
|
|
(config.rollups || []).forEach((rollup) => {
|
|
const rollupName = rollup.name;
|
|
|
|
let value = 0;
|
|
if (rollup.operation === 'count') {
|
|
value = matchingObjects.length;
|
|
} else if (rollup.operation === 'sum') {
|
|
value = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
|
|
} else if (rollup.operation === 'avg') {
|
|
const sum = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
|
|
value = matchingObjects.length ? sum / matchingObjects.length : 0;
|
|
}
|
|
|
|
// Nest the value under the operation type
|
|
bucketResult[rollupName] = { [rollup.operation]: value };
|
|
});
|
|
});
|
|
|
|
results.push(bucketResult);
|
|
}
|
|
|
|
// Reverse results to be chronological
|
|
return results.reverse();
|
|
};
|
|
|
|
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
|
export const listObjects = async ({
|
|
model,
|
|
populate = [],
|
|
page = 1,
|
|
limit = 25,
|
|
filter = {},
|
|
sort = '',
|
|
order = 'ascend',
|
|
project, // optional: override default projection
|
|
}) => {
|
|
try {
|
|
logger.trace('Listing object:', {
|
|
model,
|
|
populate,
|
|
page,
|
|
limit,
|
|
filter,
|
|
sort,
|
|
order,
|
|
project,
|
|
});
|
|
// Calculate the skip value based on the page number and limit
|
|
const skip = (page - 1) * limit;
|
|
// Fix: descend should be -1, ascend should be 1
|
|
const sortOrder = order === 'descend' ? -1 : 1;
|
|
|
|
if (!sort || sort === '') {
|
|
sort = 'createdAt';
|
|
}
|
|
|
|
if (filter) {
|
|
console.log('filter', filter);
|
|
}
|
|
|
|
// Translate any key ending with ._id to remove the ._id suffix for Mongoose
|
|
Object.keys(filter).forEach((key) => {
|
|
if (key.endsWith('._id')) {
|
|
const baseKey = key.slice(0, -4); // Remove '._id' suffix
|
|
filter[baseKey] = filter[key];
|
|
delete filter[key];
|
|
}
|
|
});
|
|
|
|
// Use find with population and filter
|
|
let query = model
|
|
.find(filter)
|
|
.sort({ [sort]: sortOrder })
|
|
.skip(skip)
|
|
.limit(Number(limit));
|
|
|
|
// Handle populate (array or single value)
|
|
if (populate) {
|
|
if (Array.isArray(populate)) {
|
|
for (const pop of populate) {
|
|
query = query.populate(pop);
|
|
}
|
|
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
|
query = query.populate(populate);
|
|
}
|
|
}
|
|
|
|
// Handle select (projection)
|
|
if (project) {
|
|
query = query.select(project);
|
|
}
|
|
|
|
query = query.lean();
|
|
|
|
const queryResult = await query;
|
|
return expandObjectIds(queryResult);
|
|
} catch (error) {
|
|
logger.error('Object list error:', error);
|
|
return { error: error, code: 500 };
|
|
}
|
|
};
|
|
|
|
// New function to list unique property values
|
|
export const listPropertyValues = async ({ model, property, filter = {}, search = '' }) => {
|
|
let aggregateCommand = [];
|
|
|
|
if (search) {
|
|
aggregateCommand.push({
|
|
$match: {
|
|
$text: { $search: search },
|
|
},
|
|
});
|
|
}
|
|
|
|
if (filter && Object.keys(filter).length > 0) {
|
|
aggregateCommand.push({ $match: filter });
|
|
}
|
|
|
|
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
|
|
|
return await model.aggregate(aggregateCommand);
|
|
};
|
|
|
|
// Helper to build nested structure for listObjectsByProperty
|
|
function nestGroups(groups, props, filter, idx = 0) {
|
|
if (idx >= props.length) return groups;
|
|
const prop = props[idx];
|
|
const filterPresent = Object.prototype.hasOwnProperty.call(filter, prop);
|
|
|
|
// Helper to extract a display key and possible filter values from a value
|
|
function getKeyAndFilterVals(value) {
|
|
if (value && typeof value === 'object') {
|
|
if (value.name) return { key: value.name, filterVals: [value._id?.toString?.(), value.name] };
|
|
if (value._id) return { key: value._id.toString(), filterVals: [value._id.toString()] };
|
|
}
|
|
return { key: value, filterVals: [value] };
|
|
}
|
|
|
|
// Build a map of key -> groups for this property
|
|
const keyToGroups = {};
|
|
for (const group of groups) {
|
|
const val = group._id[prop];
|
|
const { key } = getKeyAndFilterVals(val);
|
|
if (!keyToGroups[key]) {
|
|
keyToGroups[key] = {
|
|
groups: [],
|
|
value: val,
|
|
};
|
|
}
|
|
keyToGroups[key].groups.push(group);
|
|
}
|
|
|
|
let result = [];
|
|
|
|
if (filterPresent) {
|
|
const filterValue = filter[prop]?.toString?.() ?? filter[prop];
|
|
for (const [key, data] of Object.entries(keyToGroups)) {
|
|
const { groups: groupList, value } = data;
|
|
// Check if any group in this key matches the filter (by _id or name)
|
|
const matches = groupList.filter((group) => {
|
|
const { filterVals } = getKeyAndFilterVals(group._id[prop]);
|
|
console.log('filterVals', filterVals);
|
|
console.log('filterValue', filterValue);
|
|
return filterVals.some((val) => val?.toString() === filterValue);
|
|
});
|
|
|
|
let children = [];
|
|
if (matches.length > 0) {
|
|
if (idx === props.length - 1) {
|
|
// Last property in filter, return items
|
|
for (const group of matches) {
|
|
children = children.concat(group.objects.map(expandObjectIds));
|
|
}
|
|
} else {
|
|
children = nestGroups(matches, props, filter, idx + 1);
|
|
}
|
|
}
|
|
|
|
result.push({
|
|
property: prop,
|
|
value: expandObjectIds(value),
|
|
children: children,
|
|
});
|
|
}
|
|
} else {
|
|
// No filter for this property, just show all keys at this level with empty objects
|
|
for (const [key, data] of Object.entries(keyToGroups)) {
|
|
result.push({
|
|
property: prop,
|
|
value: expandObjectIds(data.value),
|
|
children: [],
|
|
});
|
|
}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
// Group objects by multiple properties and return nested groupings
|
|
export const listObjectsByProperties = async ({
|
|
model,
|
|
properties = [],
|
|
filter = {},
|
|
masterFilter = {},
|
|
populate,
|
|
}) => {
|
|
try {
|
|
console.log('Props', properties);
|
|
const propertiesPresent = !(
|
|
!Array.isArray(properties) ||
|
|
properties.length === 0 ||
|
|
properties[0] == ''
|
|
);
|
|
|
|
// Build aggregation pipeline
|
|
const pipeline = [];
|
|
|
|
// Handle populate (array or single value)
|
|
if (populate) {
|
|
const populates = Array.isArray(populate) ? populate : [populate];
|
|
for (const pop of populates) {
|
|
// Support both string and object syntax for populate
|
|
if (typeof pop === 'string') {
|
|
pipeline.push({
|
|
$lookup: {
|
|
from: pop.toLowerCase() + 's', // crude pluralization, adjust if needed
|
|
localField: pop,
|
|
foreignField: '_id',
|
|
as: pop,
|
|
},
|
|
});
|
|
// Unwind if it's a single reference
|
|
pipeline.push({
|
|
$unwind: {
|
|
path: `$${pop}`,
|
|
preserveNullAndEmptyArrays: true,
|
|
},
|
|
});
|
|
} else if (typeof pop === 'object' && pop.path) {
|
|
pipeline.push({
|
|
$lookup: {
|
|
from: pop.from ? pop.from : pop.path.toLowerCase(),
|
|
localField: pop.path,
|
|
foreignField: '_id',
|
|
as: pop.path,
|
|
},
|
|
});
|
|
if (pop?.multiple == false || pop?.multiple == undefined) {
|
|
// default to unwind unless justOne is explicitly false
|
|
pipeline.push({
|
|
$unwind: {
|
|
path: `$${pop.path}`,
|
|
preserveNullAndEmptyArrays: true,
|
|
},
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (masterFilter != {}) {
|
|
pipeline.push({ $match: { ...masterFilter } });
|
|
}
|
|
|
|
if (propertiesPresent) {
|
|
// Build the $group _id object for all properties
|
|
const groupId = {};
|
|
for (const prop of properties) {
|
|
groupId[prop] = `$${prop}`;
|
|
}
|
|
pipeline.push({
|
|
$group: {
|
|
_id: groupId,
|
|
objects: { $push: '$$ROOT' },
|
|
},
|
|
});
|
|
|
|
// Run aggregation
|
|
const results = await model.aggregate(pipeline);
|
|
console.log('results', results);
|
|
return nestGroups(results, properties, filter);
|
|
} else {
|
|
// If no properties specified, just return all objects without grouping
|
|
// Ensure pipeline is not empty by adding a $match stage if needed
|
|
if (pipeline.length === 0) {
|
|
pipeline.push({ $match: {} });
|
|
}
|
|
const results = await model.aggregate(pipeline);
|
|
return results;
|
|
}
|
|
} catch (error) {
|
|
logger.error('listObjectsByProperty error:', error);
|
|
return { error: error.message, code: 500 };
|
|
}
|
|
};
|
|
|
|
// Reusable function to get a single object by ID
|
|
export const getObject = async ({ model, id, populate }) => {
|
|
try {
|
|
logger.trace('Getting object:', {
|
|
model,
|
|
id,
|
|
populate,
|
|
});
|
|
|
|
// Try cache
|
|
const cachedObject = await retrieveObjectCache({ model, id, populate });
|
|
|
|
let query = model.findById(id).lean();
|
|
|
|
// Auto-populate file references if the model has them
|
|
if (modelHasRef(model, 'file')) {
|
|
const fileFields = getFieldsByRef(model, 'file');
|
|
|
|
// Populate all file reference fields
|
|
for (const field of fileFields) {
|
|
query = query.populate(field);
|
|
}
|
|
}
|
|
|
|
// Handle populate (array or single value)
|
|
if (populate) {
|
|
if (Array.isArray(populate)) {
|
|
for (const pop of populate) {
|
|
query = query.populate(pop);
|
|
}
|
|
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
|
query = query.populate(populate);
|
|
}
|
|
}
|
|
const result = await query;
|
|
|
|
if (!result) {
|
|
return { error: 'Object not found.', code: 404 };
|
|
}
|
|
|
|
const expanded = _.merge(cachedObject || {}, expandObjectIds(result));
|
|
|
|
// Update cache with the expanded object
|
|
await updateObjectCache({
|
|
model,
|
|
id: expanded._id,
|
|
object: expanded,
|
|
populate,
|
|
});
|
|
|
|
return expanded;
|
|
} catch (error) {
|
|
return { error: error, code: 500 };
|
|
}
|
|
};
|
|
|
|
export const getModelStats = async ({ model }) => {
|
|
if (!model.stats) {
|
|
logger.warn(`Model ${model.modelName} does not have a stats method.`);
|
|
return { error: 'Model does not have a stats method.', code: 500 };
|
|
}
|
|
return await model.stats();
|
|
};
|
|
|
|
export const getModelHistory = async ({ model, from, to }) => {
|
|
if (!model.history && !from && !to) {
|
|
logger.warn(`Model ${model.modelName} does not have a history method.`);
|
|
return { error: 'Model does not have a history method.', code: 500 };
|
|
}
|
|
return await model.history(from, to);
|
|
};
|
|
|
|
export const listObjectDependencies = async ({ model, id }) => {
|
|
try {
|
|
const dependencies = [];
|
|
const parentModelName = model?.modelName;
|
|
if (!parentModelName || !id) {
|
|
return [];
|
|
}
|
|
|
|
const allModelEntries = getAllModels();
|
|
|
|
for (const entry of allModelEntries) {
|
|
const targetModel = entry?.model;
|
|
if (!targetModel || !targetModel.schema) continue;
|
|
|
|
const referencingPaths = [];
|
|
|
|
targetModel.schema.eachPath((pathName, schemaType) => {
|
|
const directRef = schemaType?.options?.ref;
|
|
const arrayRef = schemaType?.caster?.options?.ref;
|
|
const refName = directRef || arrayRef;
|
|
if (refName === parentModelName) {
|
|
referencingPaths.push(pathName);
|
|
}
|
|
});
|
|
|
|
if (referencingPaths.length === 0) continue;
|
|
|
|
for (const pathName of referencingPaths) {
|
|
const filter = { [pathName]: id };
|
|
const results = await targetModel.find(filter).lean();
|
|
for (const doc of results) {
|
|
const object = expandObjectIds(doc);
|
|
dependencies.push({
|
|
objectType: targetModel.modelName,
|
|
_id: object._id,
|
|
name: object?.name,
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
return dependencies;
|
|
} catch (error) {
|
|
logger.error('listObjectDependencies error:', error);
|
|
return { error: error.message, code: 500 };
|
|
}
|
|
};
|
|
|
|
// Reusable function to edit an object by ID, with audit logging and distribution
|
|
export const editObject = async ({ model, id, updateData, user, populate }) => {
|
|
try {
|
|
// Determine parentType from model name
|
|
const parentType = model.modelName ? model.modelName : 'unknown';
|
|
// Fetch the and update object
|
|
var query = model.findByIdAndUpdate(id, updateData).lean();
|
|
|
|
if (populate) {
|
|
if (Array.isArray(populate)) {
|
|
for (const pop of populate) {
|
|
query = query.populate(pop);
|
|
}
|
|
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
|
query = query.populate(populate);
|
|
}
|
|
}
|
|
|
|
const previousObject = await query;
|
|
|
|
if (!previousObject) {
|
|
return { error: `${parentType} not found.`, code: 404 };
|
|
}
|
|
|
|
const previousExpandedObject = expandObjectIds(previousObject);
|
|
|
|
// Check if any model parameters have ref: 'file' and flush files if so
|
|
if (modelHasRef(model, 'file')) {
|
|
logger.debug(`Model ${model.modelName} has file references, checking for files to flush`);
|
|
const fileFields = getFieldsByRef(model, 'file');
|
|
|
|
for (const fieldName of fileFields) {
|
|
const fieldValue = previousExpandedObject[fieldName];
|
|
|
|
if (fieldValue) {
|
|
if (Array.isArray(fieldValue)) {
|
|
// Handle file arrays
|
|
for (const fileRef of fieldValue) {
|
|
if (fileRef && fileRef._id) {
|
|
logger.debug(`Flushing file from array field ${fieldName}: ${fileRef._id}`);
|
|
await flushFile({ id: fileRef._id, user });
|
|
}
|
|
}
|
|
} else if (fieldValue._id) {
|
|
// Handle single file reference
|
|
logger.debug(`Flushing file from field ${fieldName}: ${fieldValue._id}`);
|
|
await flushFile({ id: fieldValue._id, user });
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Audit log before update
|
|
await editAuditLog(
|
|
previousExpandedObject,
|
|
{ ...previousExpandedObject, ...updateData },
|
|
id,
|
|
parentType,
|
|
user
|
|
);
|
|
// Distribute update
|
|
await distributeUpdate(updateData, id, parentType);
|
|
// Call childUpdate event for any child objects
|
|
await distributeChildUpdate(
|
|
previousExpandedObject,
|
|
{ ...previousExpandedObject, ...updateData },
|
|
id,
|
|
model
|
|
);
|
|
const updatedObject = { ...previousExpandedObject, ...updateData };
|
|
|
|
// Update cache with the new version
|
|
await updateObjectCache({
|
|
model,
|
|
id,
|
|
object: updatedObject,
|
|
populate,
|
|
});
|
|
|
|
if (model.recalculate) {
|
|
logger.debug(`Recalculating ${model.modelName}`);
|
|
await model.recalculate(updatedObject, user);
|
|
}
|
|
|
|
if (model.stats) {
|
|
logger.debug(`Getting stats for ${model.modelName}`);
|
|
const statsData = await model.stats();
|
|
await distributeStats(statsData, parentType);
|
|
}
|
|
|
|
return updatedObject;
|
|
} catch (error) {
|
|
logger.error('editObject error:', error);
|
|
return { error: error.message, code: 500 };
|
|
}
|
|
};
|
|
|
|
// Reusable function to create a new object
|
|
export const newObject = async ({ model, newData, user = null }, distributeChanges = true) => {
|
|
try {
|
|
const parentType = model.modelName ? model.modelName : 'unknown';
|
|
|
|
const result = await model.create(newData);
|
|
if (!result || result.length === 0) {
|
|
return { error: 'No object created.', code: 500 };
|
|
}
|
|
const created = expandObjectIds(result.toObject());
|
|
|
|
await newAuditLog(newData, created._id, parentType, user);
|
|
if (distributeChanges == true) {
|
|
await distributeNew(created, parentType);
|
|
}
|
|
await distributeChildNew(created, created._id, model);
|
|
|
|
// Cache the newly created object
|
|
await updateObjectCache({
|
|
model,
|
|
id: created._id,
|
|
object: created,
|
|
populate: [],
|
|
});
|
|
|
|
if (model.recalculate) {
|
|
logger.debug(`Recalculating ${model.modelName}`);
|
|
await model.recalculate(created, user);
|
|
}
|
|
|
|
if (model.stats) {
|
|
logger.debug(`Getting stats for ${model.modelName}`);
|
|
const statsData = await model.stats();
|
|
await distributeStats(statsData, parentType);
|
|
}
|
|
|
|
return created;
|
|
} catch (error) {
|
|
logger.error('newObject error:', error);
|
|
return { error: error.message, code: 500 };
|
|
}
|
|
};
|
|
|
|
// Reusable function to delete an object by ID, with audit logging and distribution
|
|
export const deleteObject = async ({ model, id, user = null }, distributeChanges = true) => {
|
|
try {
|
|
const parentType = model.modelName ? model.modelName : 'unknown';
|
|
// Delete the object
|
|
const result = await model.findByIdAndDelete(id);
|
|
|
|
if (!result) {
|
|
return { error: `${parentType} not found.`, code: 404 };
|
|
}
|
|
|
|
const deleted = expandObjectIds(result.toObject());
|
|
// Audit log the deletion
|
|
await deleteAuditLog(deleted, id, parentType, user, 'delete');
|
|
|
|
if (distributeChanges == true) {
|
|
await distributeDelete(deleted, parentType);
|
|
}
|
|
|
|
await distributeChildDelete(deleted, id, model);
|
|
|
|
// Invalidate cache for this object
|
|
await deleteObjectCache({ model, id });
|
|
|
|
if (model.recalculate) {
|
|
logger.debug(`Recalculating ${model.modelName}`);
|
|
await model.recalculate(deleted, user);
|
|
}
|
|
|
|
if (model.stats) {
|
|
logger.debug(`Getting stats for ${model.modelName}`);
|
|
const statsData = await model.stats();
|
|
await distributeStats(statsData, parentType);
|
|
}
|
|
|
|
return { deleted: true, object: deleted };
|
|
} catch (error) {
|
|
logger.error('deleteObject error:', error);
|
|
return { error: error.message, code: 500 };
|
|
}
|
|
};
|
|
|
|
export const flushFile = async ({ id, user }) => {
|
|
try {
|
|
logger.info(`Starting file deletion process for file ID: ${id}`);
|
|
|
|
// First, check if the file exists
|
|
const file = await fileModel.findById(id).lean();
|
|
if (!file) {
|
|
logger.warn(`File with ID ${id} not found`);
|
|
return {
|
|
error: 'File not found',
|
|
code: 404,
|
|
};
|
|
}
|
|
|
|
logger.info(`Found file: ${file.name} (${file._id})`);
|
|
|
|
// Check if this file has any dependencies
|
|
const dependencies = await listObjectDependencies({
|
|
model: fileModel,
|
|
id: file._id,
|
|
});
|
|
|
|
if (dependencies.length > 0) {
|
|
logger.info(
|
|
`File ${file._id} (${file.name}) has ${dependencies.length} dependencies, cannot delete`
|
|
);
|
|
return {
|
|
error: 'File has dependencies and cannot be deleted',
|
|
code: 409,
|
|
dependencies: dependencies.length,
|
|
dependencyDetails: dependencies,
|
|
};
|
|
}
|
|
|
|
logger.debug(`File ${file._id} (${file.name}) has no dependencies, proceeding with deletion`);
|
|
|
|
// Delete from database first
|
|
const deleteResult = await deleteObject({
|
|
model: fileModel,
|
|
id: file._id,
|
|
user,
|
|
});
|
|
|
|
if (deleteResult.error) {
|
|
logger.error(`Failed to delete file ${file._id} from database:`, deleteResult.error);
|
|
return {
|
|
error: deleteResult.error,
|
|
code: deleteResult.code || 500,
|
|
};
|
|
}
|
|
|
|
// Try to delete from Ceph storage if it exists
|
|
if (file.extension) {
|
|
try {
|
|
const { deleteFile } = await import('../services/storage/ceph.js');
|
|
const { BUCKETS } = await import('../services/storage/ceph.js');
|
|
const cephKey = `files/${file._id}${file.extension}`;
|
|
|
|
await deleteFile(BUCKETS.FILES, cephKey);
|
|
logger.debug(`Deleted file from Ceph storage: ${cephKey}`);
|
|
} catch (cephError) {
|
|
logger.warn(`Failed to delete file ${file._id} from Ceph storage:`, cephError.message);
|
|
// Don't treat Ceph deletion failure as a critical error since DB record is already deleted
|
|
}
|
|
}
|
|
|
|
const result = {
|
|
success: true,
|
|
deletedFile: {
|
|
fileId: file._id,
|
|
fileName: file.name,
|
|
deletedAt: new Date(),
|
|
},
|
|
};
|
|
|
|
logger.info(`Successfully deleted file: ${file.name} (${file._id})`);
|
|
return result;
|
|
} catch (error) {
|
|
logger.error('Error in flushFile:', error);
|
|
return {
|
|
error: error.message,
|
|
code: 500,
|
|
};
|
|
}
|
|
};
|
|
|
|
// Helper function to recursively delete objects and their children
|
|
export const recursivelyDeleteChildObjects = async (
|
|
{ model, id, user = null },
|
|
distributeChanges = true
|
|
) => {
|
|
const deletedIds = [];
|
|
|
|
// Find all objects that have this object as their parent
|
|
const childObjects = await model.find({ parent: id });
|
|
|
|
// Recursively delete all children first
|
|
for (const childObject of childObjects) {
|
|
const childDeletedIds = await recursivelyDeleteChildObjects(
|
|
{ model, id: childObject._id, user },
|
|
false
|
|
);
|
|
deletedIds.push(...childDeletedIds);
|
|
}
|
|
|
|
// Delete the current object
|
|
|
|
await deleteObject({ model, id, user }, distributeChanges);
|
|
|
|
deletedIds.push(id);
|
|
|
|
return deletedIds;
|
|
};
|