Refactor audit logging: centralized audit log retrieval in utils, updated service methods to utilize new getAuditLogs function, and standardized model references in audit log creation across services. Enhanced filtering in management routes for improved query handling.

This commit is contained in:
Tom Butcher 2025-06-29 22:34:58 +01:00
parent a5f3b75be8
commit dd4e48f125
33 changed files with 265 additions and 303 deletions

View File

@ -1,20 +1,31 @@
import express from 'express';
import { listAuditLogsRouteHandler, getAuditLogRouteHandler } from '../../services/management/auditlogs.js';
import { isAuthenticated } from '../../keycloak.js';
import {
listAuditLogsRouteHandler,
getAuditLogRouteHandler,
} from '../../services/management/auditlogs.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
/**
* @route GET /api/auditlogs
* @desc Get all audit logs with pagination and filtering
* @access Private
*/
router.get('/', async (req, res) => {
const page = parseInt(req.query.page) || 1;
const limit = parseInt(req.query.limit) || 25;
const property = req.query.property || "";
const filter = req.query.filter ? JSON.parse(req.query.filter) : {};
// List note types
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
await listAuditLogsRouteHandler(req, res, page, limit, property, filter);
const allowedFilters = ['target', 'owner'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listAuditLogsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
/**

View File

@ -1,41 +1,41 @@
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listUsersRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
} from '../../services/management/users.js';
// list of users
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listUsersRouteHandler(req, res, page, limit, property, filter);
});
router.get('/:id', isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});
// update user info
router.put('/:id', isAuthenticated, async (req, res) => {
editUserRouteHandler(req, res);
});
export default router;
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listUsersRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
} from '../../services/management/users.js';
// list of users
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listUsersRouteHandler(req, res, page, limit, property, filter);
});
router.get('/:id', isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});
// update user info
router.put('/:id', isAuthenticated, async (req, res) => {
editUserRouteHandler(req, res);
});
export default router;

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main filamentStock schema
@ -12,22 +12,19 @@ const filamentStockSchema = new Schema(
startingNetWeight: { type: Number, required: true },
currentGrossWeight: { type: Number, required: true },
currentNetWeight: { type: Number, required: true },
filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" },
stockEvents: [{ type: mongoose.Schema.Types.ObjectId, ref: "StockEvent" }]
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament' },
stockEvents: [{ type: mongoose.Schema.Types.ObjectId, ref: 'stockEvent' }],
},
{ timestamps: true },
{ timestamps: true }
);
// Add virtual id getter
filamentStockSchema.virtual("id").get(function () {
filamentStockSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
filamentStockSchema.set("toJSON", { virtuals: true });
filamentStockSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const filamentStockModel = mongoose.model(
"FilamentStock",
filamentStockSchema,
);
export const filamentStockModel = mongoose.model('filamentStock', filamentStockSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main partStock schema
@ -6,20 +6,20 @@ const partStockSchema = new Schema(
{
name: { type: String, required: true },
fileName: { type: String, required: false },
part: { type: mongoose.Schema.Types.ObjectId, ref: "Part" },
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part' },
startingQuantity: { type: Number, required: true },
currentQuantity: { type: Number, required: true },
},
{ timestamps: true },
{ timestamps: true }
);
// Add virtual id getter
partStockSchema.virtual("id").get(function () {
partStockSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
partStockSchema.set("toJSON", { virtuals: true });
partStockSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const partStockModel = mongoose.model("PartStock", partStockSchema);
export const partStockModel = mongoose.model('partStock', partStockSchema);

View File

@ -1,38 +1,38 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const stockAuditItemSchema = new Schema({
type: { type: String, enum: ["filament", "part"], required: true },
type: { type: String, enum: ['filament', 'part'], required: true },
stock: { type: Schema.Types.ObjectId, required: true },
expectedQuantity: { type: Number, required: true },
actualQuantity: { type: Number, required: true },
notes: { type: String }
notes: { type: String },
});
const stockAuditSchema = new Schema(
{
type: { type: String, required: true },
status: {
type: String,
enum: ["pending", "in_progress", "completed", "cancelled"],
default: "pending",
required: true
status: {
type: String,
enum: ['pending', 'in_progress', 'completed', 'cancelled'],
default: 'pending',
required: true,
},
notes: { type: String },
items: [stockAuditItemSchema],
createdBy: { type: Schema.Types.ObjectId, ref: "User", required: true },
completedAt: { type: Date }
createdBy: { type: Schema.Types.ObjectId, ref: 'user', required: true },
completedAt: { type: Date },
},
{ timestamps: true }
);
// Add virtual id getter
stockAuditSchema.virtual("id").get(function () {
stockAuditSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockAuditSchema.set("toJSON", { virtuals: true });
stockAuditSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const stockAuditModel = mongoose.model("StockAudit", stockAuditSchema);
export const stockAuditModel = mongoose.model('stockAudit', stockAuditSchema);

View File

@ -1,26 +1,26 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const stockEventSchema = new Schema(
{
type: { type: String, required: true },
value: { type: Number, required: true },
unit: { type: String, required: true},
subJob: { type: Schema.Types.ObjectId, ref: "SubJob", required: false },
job: { type: Schema.Types.ObjectId, ref: "Job", required: false },
filamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock", required: true },
timestamp: { type: Date, default: Date.now }
unit: { type: String, required: true },
subJob: { type: Schema.Types.ObjectId, ref: 'subJob', required: false },
job: { type: Schema.Types.ObjectId, ref: 'job', required: false },
filamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock', required: true },
timestamp: { type: Date, default: Date.now },
},
{ timestamps: true }
);
// Add virtual id getter
stockEventSchema.virtual("id").get(function () {
stockEventSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockEventSchema.set("toJSON", { virtuals: true });
stockEventSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const stockEventModel = mongoose.model("StockEvent", stockEventSchema);
export const stockEventModel = mongoose.model('stockEvent', stockEventSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const auditLogSchema = new Schema(
@ -8,34 +8,49 @@ const auditLogSchema = new Schema(
target: {
type: Schema.Types.ObjectId,
refPath: 'targetModel',
required: true
required: true,
},
targetModel: {
type: String,
required: true,
enum: ['Printer', 'Job', 'SubJob', 'FilamentStock', 'StockEvent', 'Vendor', 'Part', 'Product', 'Material', 'Filament', 'GCodeFile', 'NoteType', 'Note', 'User'] // Add other models as needed
enum: [
'printer',
'job',
'subJob',
'filamentStock',
'stockEvent',
'vendor',
'part',
'product',
'material',
'filament',
'gcodeFile',
'noteType',
'note',
'user',
], // Add other models as needed
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerModel',
required: true
required: true,
},
ownerModel: {
type: String,
required: true,
enum: ['User', 'Printer']
}
enum: ['user', 'printer'],
},
},
{ timestamps: true }
);
// Add virtual id getter
auditLogSchema.virtual("id").get(function () {
auditLogSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
auditLogSchema.set("toJSON", { virtuals: true });
auditLogSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const auditLogModel = mongoose.model("AuditLog", auditLogSchema);
export const auditLogModel = mongoose.model('auditLog', auditLogSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const filamentSchema = new mongoose.Schema({
@ -7,7 +7,7 @@ const filamentSchema = new mongoose.Schema({
url: { required: false, type: String },
image: { required: false, type: Buffer },
color: { required: true, type: String },
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true },
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
type: { required: true, type: String },
cost: { required: true, type: Number },
diameter: { required: true, type: Number },
@ -17,10 +17,10 @@ const filamentSchema = new mongoose.Schema({
emptySpoolWeight: { required: true, type: Number },
});
filamentSchema.virtual("id").get(function () {
filamentSchema.virtual('id').get(function () {
return this._id.toHexString();
});
filamentSchema.set("toJSON", { virtuals: true });
filamentSchema.set('toJSON', { virtuals: true });
export const filamentModel = mongoose.model("Filament", filamentSchema);
export const filamentModel = mongoose.model('filament', filamentSchema);

View File

@ -1,5 +1,4 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
import mongoose from 'mongoose';
const hostSchema = new mongoose.Schema({
online: { required: true, type: Boolean },
@ -8,10 +7,10 @@ const hostSchema = new mongoose.Schema({
status: { type: { required: true, type: String } },
});
hostSchema.virtual("id").get(function () {
hostSchema.virtual('id').get(function () {
return this._id.toHexString();
});
hostSchema.set("toJSON", { virtuals: true });
hostSchema.set('toJSON', { virtuals: true });
export const hostModel = mongoose.model("Host", hostSchema);
export const hostModel = mongoose.model('host', hostSchema);

View File

@ -13,4 +13,4 @@ materialSchema.virtual('id').get(function () {
materialSchema.set('toJSON', { virtuals: true });
export const materialModel = mongoose.model('Material', materialSchema);
export const materialModel = mongoose.model('material', materialSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const noteTypeSchema = new Schema(
@ -16,17 +16,17 @@ const noteTypeSchema = new Schema(
type: Boolean,
required: true,
default: true,
}
},
},
{ timestamps: true }
);
// Add virtual id getter
noteTypeSchema.virtual("id").get(function () {
noteTypeSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
noteTypeSchema.set("toJSON", { virtuals: true });
noteTypeSchema.set('toJSON', { virtuals: true });
export const noteTypeModel = mongoose.model("NoteType", noteTypeSchema);
export const noteTypeModel = mongoose.model('noteType', noteTypeSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main part schema
@ -6,21 +6,21 @@ const partSchema = new Schema(
{
name: { type: String, required: true },
fileName: { type: String, required: false },
product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" },
product: { type: mongoose.Schema.Types.ObjectId, ref: 'product' },
globalPricing: { type: Boolean, default: true },
priceMode: { type: String, default: 'margin' },
price: { type: Number, required: false },
},
{ timestamps: true },
{ timestamps: true }
);
// Add virtual id getter
partSchema.virtual("id").get(function () {
partSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
partSchema.set("toJSON", { virtuals: true });
partSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const partModel = mongoose.model("Part", partSchema);
export const partModel = mongoose.model('part', partSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main product schema
@ -9,19 +9,18 @@ const productSchema = new Schema(
version: { type: String },
priceMode: { type: String, default: 'margin' },
price: { type: Number, required: false },
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true },
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: "Part" }],
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: 'part' }],
},
{ timestamps: true },
{ timestamps: true }
);
// Add virtual id getter
productSchema.virtual("id").get(function () {
productSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
productSchema.set("toJSON", { virtuals: true });
productSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const productModel = mongoose.model("Product", productSchema);
export const productModel = mongoose.model('product', productSchema);

View File

@ -1,17 +1,20 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const userSchema = new mongoose.Schema({
username: { required: true, type: String},
name: { required: true, type: String},
firstName: { required: false, type: String },
lastName: { required: false, type: String },
email: { required: true, type: String },
}, { timestamps: true },);
const userSchema = new mongoose.Schema(
{
username: { required: true, type: String },
name: { required: true, type: String },
firstName: { required: false, type: String },
lastName: { required: false, type: String },
email: { required: true, type: String },
},
{ timestamps: true }
);
userSchema.virtual("id").get(function () {
userSchema.virtual('id').get(function () {
return this._id.toHexString();
});
userSchema.set("toJSON", { virtuals: true });
userSchema.set('toJSON', { virtuals: true });
export const userModel = mongoose.model("User", userSchema);
export const userModel = mongoose.model('user', userSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const vendorSchema = new mongoose.Schema(
{
@ -9,13 +9,13 @@ const vendorSchema = new mongoose.Schema(
contact: { required: false, type: String },
country: { required: false, type: String },
},
{ timestamps: true },
{ timestamps: true }
);
vendorSchema.virtual("id").get(function () {
vendorSchema.virtual('id').get(function () {
return this._id.toHexString();
});
vendorSchema.set("toJSON", { virtuals: true });
vendorSchema.set('toJSON', { virtuals: true });
export const vendorModel = mongoose.model("Vendor", vendorSchema);
export const vendorModel = mongoose.model('vendor', vendorSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const noteSchema = new mongoose.Schema({
@ -12,7 +12,7 @@ const noteSchema = new mongoose.Schema({
},
noteType: {
type: Schema.Types.ObjectId,
ref: "NoteType",
ref: 'noteType',
required: true,
},
createdAt: {
@ -27,15 +27,15 @@ const noteSchema = new mongoose.Schema({
},
user: {
type: Schema.Types.ObjectId,
ref: "User",
ref: 'user',
required: false,
}
},
});
noteSchema.virtual("id").get(function () {
noteSchema.virtual('id').get(function () {
return this._id.toHexString();
});
noteSchema.set("toJSON", { virtuals: true });
noteSchema.set('toJSON', { virtuals: true });
export const noteModel = mongoose.model("Note", noteSchema);
export const noteModel = mongoose.model('note', noteSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const gcodeFileSchema = new mongoose.Schema({
@ -6,19 +6,19 @@ const gcodeFileSchema = new mongoose.Schema({
gcodeFileName: { required: false, type: String },
gcodeFileInfo: { required: true, type: Object },
size: { type: Number, required: false },
filament: { type: Schema.Types.ObjectId, ref: "Filament", required: true },
parts: [{ type: Schema.Types.ObjectId, ref: "Part", required: true }],
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
parts: [{ type: Schema.Types.ObjectId, ref: 'part', required: true }],
cost: { type: Number, required: false },
createdAt: { type: Date },
updatedAt: { type: Date },
});
gcodeFileSchema.index({ name: "text", brand: "text" });
gcodeFileSchema.index({ name: 'text', brand: 'text' });
gcodeFileSchema.virtual("id").get(function () {
gcodeFileSchema.virtual('id').get(function () {
return this._id.toHexString();
});
gcodeFileSchema.set("toJSON", { virtuals: true });
gcodeFileSchema.set('toJSON', { virtuals: true });
export const gcodeFileModel = mongoose.model("GCodeFile", gcodeFileSchema);
export const gcodeFileModel = mongoose.model('gcodeFile', gcodeFileSchema);

View File

@ -1,18 +1,18 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const jobSchema = new mongoose.Schema({
state: {
type: { required: true, type: String },
},
printers: [{ type: Schema.Types.ObjectId, ref: "Printer", required: false }],
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
gcodeFile: {
type: Schema.Types.ObjectId,
ref: "GCodeFile",
ref: 'gcodeFile',
required: false,
},
quantity: {
@ -21,18 +21,14 @@ const jobSchema = new mongoose.Schema({
default: 1,
min: 1,
},
subJobs: [
{ type: Schema.Types.ObjectId, ref: "SubJob", required: false },
],
notes: [
{ type: Schema.Types.ObjectId, ref: "Note", required: false }
],
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
});
jobSchema.virtual("id").get(function () {
jobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
jobSchema.set("toJSON", { virtuals: true });
jobSchema.set('toJSON', { virtuals: true });
export const jobModel = mongoose.model("Job", jobSchema);
export const jobModel = mongoose.model('job', jobSchema);

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the moonraker connection schema
@ -9,7 +9,7 @@ const moonrakerSchema = new Schema(
protocol: { type: String, required: true },
apiKey: { type: String, default: null, required: false },
},
{ _id: false },
{ _id: false }
);
// Define the alert schema
@ -27,35 +27,35 @@ const printerSchema = new Schema(
name: { type: String, required: true },
online: { type: Boolean, required: true, default: false },
state: {
type: { type: String, required: true, default: "Offline" },
type: { type: String, required: true, default: 'offline' },
percent: { type: Number, required: false },
},
connectedAt: { type: Date, default: null },
loadedFilament: {
type: Schema.Types.ObjectId,
ref: "Filament",
ref: 'filament',
default: null,
},
moonraker: { type: moonrakerSchema, required: true },
tags: [{ type: String }],
firmware: { type: String },
currentJob: { type: Schema.Types.ObjectId, ref: "Job" },
currentSubJob: { type: Schema.Types.ObjectId, ref: "SubJob" },
currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" },
subJobs: [{ type: Schema.Types.ObjectId, ref: "SubJob" }],
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null },
currentJob: { type: Schema.Types.ObjectId, ref: 'job' },
currentSubJob: { type: Schema.Types.ObjectId, ref: 'subJob' },
currentFilamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock' },
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', default: null },
alerts: [alertSchema],
},
{ timestamps: true },
{ timestamps: true }
);
// Add virtual id getter
printerSchema.virtual("id").get(function () {
printerSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
printerSchema.set("toJSON", { virtuals: true });
printerSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const printerModel = mongoose.model("Printer", printerSchema);
export const printerModel = mongoose.model('printer', printerSchema);

View File

@ -1,15 +1,15 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const { Schema } = mongoose;
const subJobSchema = new mongoose.Schema({
printer: {
type: Schema.Types.ObjectId,
ref: "Printer",
ref: 'printer',
required: true,
},
job: {
type: Schema.Types.ObjectId,
ref: "Job",
ref: 'job',
required: true,
},
subJobId: {
@ -18,7 +18,7 @@ const subJobSchema = new mongoose.Schema({
},
gcodeFile: {
type: Schema.Types.ObjectId,
ref: "GCodeFile",
ref: 'gcodeFile',
required: true,
},
state: {
@ -41,13 +41,10 @@ const subJobSchema = new mongoose.Schema({
finishedAt: { required: false, type: Date },
});
subJobSchema.virtual("id").get(function () {
subJobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
subJobSchema.set("toJSON", { virtuals: true });
subJobSchema.set('toJSON', { virtuals: true });
export const subJobModel = mongoose.model(
"SubJob",
subJobSchema,
);
export const subJobModel = mongoose.model('subJob', subJobSchema);

View File

@ -4,7 +4,7 @@ import { filamentModel } from '../../schemas/management/filament.schema.js';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { getAuditLogs } from '../../utils.js';
dotenv.config();
@ -106,11 +106,7 @@ export const getFilamentStockRouteHandler = async (req, res) => {
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...filamentStock._doc, auditLogs: auditLogs });
} catch (error) {

View File

@ -2,7 +2,7 @@ import dotenv from 'dotenv';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { getAuditLogs } from '../../utils.js';
dotenv.config();
@ -77,11 +77,7 @@ export const getStockAuditRouteHandler = async (req, res) => {
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...stockAudit._doc, auditLogs: auditLogs });
} catch (error) {

View File

@ -2,8 +2,7 @@ import dotenv from 'dotenv';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Filaments');
@ -83,11 +82,7 @@ export const getFilamentRouteHandler = async (req, res) => {
logger.trace(`Filament with ID: ${id}:`, filament);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...filament._doc, auditLogs: auditLogs });
} catch (error) {
@ -128,7 +123,7 @@ export const editFilamentRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(filament.toObject(), updateData, id, 'Filament', req.user._id, 'User');
await newAuditLog(filament.toObject(), updateData, id, 'filament', req.user._id, 'user');
const result = await filamentModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -172,7 +167,7 @@ export const newFilamentRouteHandler = async (req, res) => {
}
// Create audit log for new filament
await newAuditLog({}, newFilament, result._id, 'Filament', req.user._id, 'User');
await newAuditLog({}, newFilament, result._id, 'filament', req.user._id, 'user');
res.status(200).send({ status: 'ok' });
} catch (updateError) {

View File

@ -2,8 +2,7 @@ import dotenv from 'dotenv';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
dotenv.config();
@ -64,11 +63,7 @@ export const getNoteTypeRouteHandler = async (req, res) => {
logger.trace(`Note type with ID: ${id}:`, noteType);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...noteType._doc, auditLogs: auditLogs });
} catch (error) {
@ -98,7 +93,7 @@ export const editNoteTypeRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(noteType.toObject(), updateData, id, 'NoteType', req.user._id, 'User');
await newAuditLog(noteType.toObject(), updateData, id, 'noteType', req.user._id, 'user');
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -128,7 +123,7 @@ export const newNoteTypeRouteHandler = async (req, res) => {
}
// Create audit log for new note type
await newAuditLog({}, newNoteType, result._id, 'NoteType', req.user._id, 'User');
await newAuditLog({}, newNoteType, result._id, 'noteType', req.user._id, 'user');
res.status(200).send({ status: 'ok' });
} catch (updateError) {

View File

@ -5,8 +5,8 @@ import mongoose from 'mongoose';
import multer from 'multer';
import fs from 'fs';
import path from 'path';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Parts');
@ -152,11 +152,7 @@ export const getPartRouteHandler = async (req, res) => {
logger.trace(`Part with ID: ${id}:`, part);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...part._doc, auditLogs: auditLogs });
} catch (error) {
@ -184,7 +180,7 @@ export const editPartRouteHandler = async (req, res) => {
const updateData = req.body;
// Create audit log before updating
await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'User');
await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'user');
const result = await partModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -222,7 +218,7 @@ export const newPartRouteHandler = async (req, res) => {
// Create audit logs for each new part
for (const result of results) {
await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'User');
await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'user');
}
return res.status(200).send(results);
@ -238,7 +234,7 @@ export const newPartRouteHandler = async (req, res) => {
const result = await partModel.create(newPart);
// Create audit log for new part
await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'User');
await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'user');
return res.status(200).send(result);
}

View File

@ -3,8 +3,7 @@ import { productModel } from '../../schemas/management/product.schema.js';
import { partModel } from '../../schemas/management/part.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Products');
@ -82,11 +81,7 @@ export const getProductRouteHandler = async (req, res) => {
logger.trace(`Product with ID: ${id}:`, product);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...product._doc, auditLogs: auditLogs });
} catch (error) {
@ -130,7 +125,7 @@ export const editProductRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(product.toObject(), updateData, id, 'Product', req.user._id, 'User');
await newAuditLog(product.toObject(), updateData, id, 'product', req.user._id, 'user');
const result = await productModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -165,7 +160,7 @@ export const newProductRouteHandler = async (req, res) => {
}
// Create audit log for new product
await newAuditLog({}, newProduct, newProductResult._id, 'Product', req.user._id, 'User');
await newAuditLog({}, newProduct, newProductResult._id, 'product', req.user._id, 'user');
const parts = req.body.parts || [];
const productId = newProductResult._id;
@ -188,7 +183,7 @@ export const newProductRouteHandler = async (req, res) => {
partIds.push(newPartResult._id);
// Create audit log for each new part
await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'User');
await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'user');
}
const editProductResult = await productModel.updateOne(

View File

@ -3,7 +3,8 @@ import { userModel } from '../../schemas/management/user.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { getAuditLogs } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Users');
@ -65,11 +66,7 @@ export const getUserRouteHandler = async (req, res) => {
logger.trace(`User with ID: ${id}:`, user);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...user._doc, auditLogs: auditLogs });
} catch (error) {
@ -106,7 +103,7 @@ export const editUserRouteHandler = async (req, res) => {
console.log(req.user);
// Create audit log before updating
await newAuditLog(user.toObject(), updateData, id, 'User', req.user._id, 'User');
await newAuditLog(user.toObject(), updateData, id, 'user', req.user._id, 'user');
const result = await userModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {

View File

@ -109,7 +109,7 @@ export const editVendorRouteHandler = async (req, res) => {
console.log(req.user);
// Create audit log before updating
await newAuditLog(vendor.toObject(), updateData, id, 'Vendor', req.user._id, 'User');
await newAuditLog(vendor.toObject(), updateData, id, 'vendor', req.user._id, 'user');
const result = await vendorModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -143,9 +143,9 @@ export const newVendorRouteHandler = async (req, res) => {
{},
newVendor,
result._id,
'Vendor',
'vendor',
req.user.id, // Assuming user ID is available in req.user
'User'
'user'
);
res.status(200).send({ status: 'ok' });

View File

@ -2,8 +2,7 @@ import dotenv from 'dotenv';
import { noteModel } from '../../schemas/misc/note.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger('Notes');
@ -87,13 +86,7 @@ export const getNoteRouteHandler = async (req, res) => {
logger.trace(`Note with ID: ${id}:`, note);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...note._doc, auditLogs: auditLogs });
res.send({ ...note._doc });
} catch (error) {
logger.error('Error fetching note:', error);
res.status(500).send({ error: error.message });
@ -120,9 +113,6 @@ export const editNoteRouteHandler = async (req, res) => {
isActive: req.body.isActive,
};
// Create audit log before updating
await newAuditLog(note.toObject(), updateData, id, 'Note', req.user._id, 'User');
const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error('No note updated.');
@ -150,9 +140,6 @@ export const newNoteRouteHandler = async (req, res) => {
res.status(500).send({ error: 'No note created.' });
}
// Create audit log for new note
await newAuditLog({}, newNote, result._id, 'Note', req.user._id, 'User');
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error('Error creating note:', updateError);
@ -181,9 +168,6 @@ export const deleteNoteRouteHandler = async (req, res) => {
// Recursively find and delete all child notes
const deletedNoteIds = await recursivelyDeleteNotes(id);
// Create audit log for the deletion
await newAuditLog(note.toObject(), {}, id, 'Note', req.user._id, 'User', 'DELETE');
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
res.send({
status: 'ok',

View File

@ -6,8 +6,7 @@ import multer from 'multer';
import path from 'path';
import fs from 'fs';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
import { extractConfigBlock } from '../../utils.js';
dotenv.config();
@ -222,7 +221,7 @@ export const editGCodeFileRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(gcodeFile.toObject(), updateData, id, 'GCodeFile', req.user._id, 'User');
await newAuditLog(gcodeFile.toObject(), updateData, id, 'gcodeFile', req.user._id, 'user');
const result = await gcodeFileModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -278,7 +277,7 @@ export const newGCodeFileRouteHandler = async (req, res) => {
}
// Create audit log for new gcodefile
await newAuditLog({}, newGCodeFile, result._id, 'GCodeFile', req.user._id, 'User');
await newAuditLog({}, newGCodeFile, result._id, 'gcodeFile', req.user._id, 'user');
res.status(200).send({ status: 'ok' });
} catch (updateError) {
@ -403,11 +402,7 @@ export const getGCodeFileRouteHandler = async (req, res) => {
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...gcodeFile._doc, auditLogs: auditLogs });
} catch (error) {

View File

@ -3,8 +3,7 @@ import mongoose from 'mongoose';
import { jobModel } from '../../schemas/production/job.schema.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { getAuditLogs } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Jobs');
@ -54,11 +53,7 @@ export const getJobRouteHandler = async (req, res) => {
logger.trace(`Job with ID: ${id}:`, job);
const targetIds = [id, ...job.subJobs.map((subJob) => subJob._id)];
const auditLogs = await auditLogModel
.find({
target: { $in: targetIds.map((id) => new mongoose.Types.ObjectId(id)) },
})
.populate('owner');
const auditLogs = await getAuditLogs(targetIds.map((id) => new mongoose.Types.ObjectId(id)));
res.send({ ...job._doc, auditLogs: auditLogs });
} catch (error) {

View File

@ -1,9 +1,7 @@
import dotenv from 'dotenv';
import { printerModel } from '../../schemas/production/printer.schema.js';
import log4js from 'log4js';
import { newAuditLog } from '../../utils.js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { newAuditLog, getAuditLogs } from '../../utils.js';
dotenv.config();
@ -63,11 +61,7 @@ export const getPrinterRouteHandler = async (req, res) => {
logger.trace(`Printer with id ${id}:`, printer);
const auditLogs = await auditLogModel
.find({
target: new mongoose.Types.ObjectId(id),
})
.populate('owner');
const auditLogs = await getAuditLogs(id);
res.send({ ...printer._doc, auditLogs: auditLogs });
} catch (error) {
@ -97,7 +91,7 @@ export const editPrinterRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(printer.toObject(), updateData, id, 'Printer', req.user._id, 'User');
await newAuditLog(printer.toObject(), updateData, id, 'printer', req.user._id, 'user');
const result = await printerModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
@ -151,7 +145,7 @@ export const createPrinterRouteHandler = async (req, res) => {
const savedPrinter = await newPrinter.save();
// Create audit log for new printer
await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'Printer', req.user._id, 'User');
await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'printer', req.user._id, 'user');
logger.info(`Created new printer: ${name}`);
res.status(201).send(savedPrinter);

View File

@ -1,4 +1,5 @@
import { ObjectId } from 'mongodb'; // Only needed in Node.js with MongoDB driver
import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
function parseFilter(property, value) {
if (typeof value === 'string') {
@ -278,8 +279,6 @@ function getChangedValues(oldObj, newObj) {
}
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
const { auditLogModel } = await import('./schemas/management/auditlog.schema.js');
// Get only the changed values
const changedValues = getChangedValues(oldValue, newValue);
@ -300,4 +299,12 @@ async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, o
await auditLog.save();
}
export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog };
async function getAuditLogs(idOrIds) {
if (Array.isArray(idOrIds)) {
return auditLogModel.find({ target: { $in: idOrIds } }).populate('owner');
} else {
return auditLogModel.find({ target: idOrIds }).populate('owner');
}
}
export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog, getAuditLogs };