Refactor routes and schemas: replaced print job references with job, added user routes, and implemented new audit logging functionality. Updated filtering methods across various services to enhance query capabilities.

This commit is contained in:
Tom Butcher 2025-06-28 00:18:21 +01:00
parent 5a5701088a
commit 11d80fb76e
49 changed files with 2127 additions and 423 deletions

View File

@ -5,10 +5,10 @@ import dotenv from "dotenv";
import { expressSession, keycloak } from "./keycloak.js"; import { expressSession, keycloak } from "./keycloak.js";
import { dbConnect } from "./mongo/index.js"; import { dbConnect } from "./mongo/index.js";
import { import {
apiRoutes,
authRoutes, authRoutes,
userRoutes,
printerRoutes, printerRoutes,
printJobRoutes, jobRoutes,
gcodeFileRoutes, gcodeFileRoutes,
filamentRoutes, filamentRoutes,
spotlightRoutes, spotlightRoutes,
@ -18,12 +18,18 @@ import {
materialRoutes, materialRoutes,
partStockRoutes, partStockRoutes,
filamentStockRoutes, filamentStockRoutes,
stockAuditRoutes,
stockEventRoutes,
auditLogRoutes,
noteTypeRoutes,
noteRoutes
} from "./routes/index.js"; } from "./routes/index.js";
import path from "path"; import path from "path";
import * as fs from "fs"; import * as fs from "fs";
import cron from "node-cron"; import cron from "node-cron";
import ReseedAction from "./mongo/ReseedAction.js"; import ReseedAction from "./mongo/ReseedAction.js";
import log4js from "log4js"; import log4js from "log4js";
import { populateUserMiddleware } from "./services/auth/index.js";
dotenv.config(); dotenv.config();
@ -56,6 +62,7 @@ app.use(
app.use(express.json()); app.use(express.json());
app.use(expressSession); app.use(expressSession);
app.use(keycloak.middleware()); app.use(keycloak.middleware());
app.use(populateUserMiddleware);
app.get("/", function (req, res) { app.get("/", function (req, res) {
const __dirname = fs.realpathSync("."); const __dirname = fs.realpathSync(".");
@ -63,10 +70,10 @@ app.get("/", function (req, res) {
}); });
app.use("/auth", authRoutes); app.use("/auth", authRoutes);
app.use("/overview", apiRoutes); app.use("/users", userRoutes)
app.use("/spotlight", spotlightRoutes); app.use("/spotlight", spotlightRoutes);
app.use("/printers", printerRoutes); app.use("/printers", printerRoutes);
app.use("/printjobs", printJobRoutes); app.use("/jobs", jobRoutes);
app.use("/gcodefiles", gcodeFileRoutes); app.use("/gcodefiles", gcodeFileRoutes);
app.use("/filaments", filamentRoutes); app.use("/filaments", filamentRoutes);
app.use("/parts", partRoutes); app.use("/parts", partRoutes);
@ -75,6 +82,11 @@ app.use("/vendors", vendorRoutes);
app.use("/materials", materialRoutes); app.use("/materials", materialRoutes);
app.use("/partstocks", partStockRoutes); app.use("/partstocks", partStockRoutes);
app.use("/filamentstocks", filamentStockRoutes); app.use("/filamentstocks", filamentStockRoutes);
app.use("/stockevents", stockEventRoutes);
app.use("/stockaudits", stockAuditRoutes);
app.use("/auditlogs", auditLogRoutes);
app.use("/notetypes", noteTypeRoutes);
app.use("/notes", noteRoutes)
if (process.env.SCHEDULE_HOUR) { if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => { cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {

View File

@ -7,6 +7,9 @@ logger.level = process.env.LOG_LEVEL;
dotenv.config(); dotenv.config();
// Set strictQuery to false to prepare for Mongoose 7
mongoose.set('strictQuery', false);
function dbConnect() { function dbConnect() {
mongoose.connection.once("open", () => logger.info("Database connected.")); mongoose.connection.once("open", () => logger.info("Database connected."));
return mongoose.connect( return mongoose.connect(

View File

@ -1,7 +1,7 @@
import bcrypt from "bcrypt"; import bcrypt from "bcrypt";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { userModel } from "../schemas/user.schema.js"; import { userModel } from "../schemas/user.schema.js";
import { printJobModel } from "../schemas/printjob.schema.js"; import { jobModel } from "../schemas/job.schema.js";
import { dbConnect } from "../mongo/index.js"; import { dbConnect } from "../mongo/index.js";
async function seedDB() { async function seedDB() {
@ -21,7 +21,7 @@ async function seedDB() {
const admin = new userModel(user); const admin = new userModel(user);
await admin.save(); await admin.save();
const printJob = { const job = {
_id: new mongoose.Types.ObjectId(1), _id: new mongoose.Types.ObjectId(1),
status : { status : {
type: "Queued" type: "Queued"
@ -31,8 +31,8 @@ async function seedDB() {
started_at: new Date(), started_at: new Date(),
}; };
const newPrintJob = new printJobModel(printJob); const newJob = new jobModel(job);
await newPrintJob.save(); await newJob.save();
console.log("DB seeded"); console.log("DB seeded");
} }

View File

@ -1,27 +0,0 @@
import express from "express";
import { keycloak, isAuthenticated } from "../../keycloak.js";
const router = express.Router();
import {
getProfileRouteHandler,
patchProfileRouteHandler,
getDashboardRouteHandler,
} from "../../services/api/index.js";
// get main dashboard info profile
router.get("/", keycloak.protect(), (req, res) => {
getDashboardRouteHandler(req, res);
});
// get user's profile
router.get("/user", isAuthenticated, (req, res) => {
getProfileRouteHandler(req, res);
});
// update user's profile
router.patch("/", isAuthenticated, async (req, res) => {
patchProfileRouteHandler(req, res);
});
export default router;

View File

@ -0,0 +1,29 @@
import express from 'express';
import { listAuditLogsRouteHandler, getAuditLogRouteHandler } from '../../services/auditlogs/index.js';
const router = express.Router();
/**
* @route GET /api/auditlogs
* @desc Get all audit logs with pagination and filtering
* @access Private
*/
router.get('/', async (req, res) => {
const page = parseInt(req.query.page) || 1;
const limit = parseInt(req.query.limit) || 25;
const property = req.query.property || "";
const filter = req.query.filter ? JSON.parse(req.query.filter) : {};
await listAuditLogsRouteHandler(req, res, page, limit, property, filter);
});
/**
* @route GET /api/auditlogs/:id
* @desc Get a single audit log by ID
* @access Private
*/
router.get('/:id', async (req, res) => {
await getAuditLogRouteHandler(req, res);
});
export default router;

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -16,12 +16,12 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["type", "vendor.name", "diameter", "color"]; const allowedFilters = ["type", "vendor.name", "diameter", "color"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); filter = {...filter, ...parseFilter(key, value)};
} }
} }
} }

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -12,21 +12,22 @@ import {
// list of filamentStocks // list of filamentStocks
router.get("/", isAuthenticated, (req, res) => { router.get("/", isAuthenticated, (req, res) => {
const { page, limit, property } = req.query; const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["country"]; const allowedFilters = ["country"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }
listFilamentStocksRouteHandler(req, res, page, limit, property, filter); listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
}); });
router.post("/", isAuthenticated, (req, res) => { router.post("/", isAuthenticated, (req, res) => {

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -24,12 +24,13 @@ router.get("/", isAuthenticated, (req, res) => {
"filament.color", "filament.color",
]; ];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

View File

@ -1,8 +1,7 @@
import userRoutes from "./users/index.js"; import userRoutes from "./users/index.js";
import apiRoutes from "./api/index.js";
import authRoutes from "./auth/index.js"; import authRoutes from "./auth/index.js";
import printerRoutes from "./printers/index.js"; import printerRoutes from "./printers/index.js";
import printJobRoutes from "./printjobs/index.js"; import jobRoutes from "./jobs/index.js";
import gcodeFileRoutes from "./gcodefiles/index.js"; import gcodeFileRoutes from "./gcodefiles/index.js";
import filamentRoutes from "./filaments/index.js"; import filamentRoutes from "./filaments/index.js";
import spotlightRoutes from "./spotlight/index.js"; import spotlightRoutes from "./spotlight/index.js";
@ -12,13 +11,17 @@ import vendorRoutes from "./vendors/index.js";
import materialRoutes from "./materials/index.js"; import materialRoutes from "./materials/index.js";
import partStockRoutes from "./partstocks/index.js"; import partStockRoutes from "./partstocks/index.js";
import filamentStockRoutes from "./filamentstocks/index.js"; import filamentStockRoutes from "./filamentstocks/index.js";
import stockEventRoutes from "./stockevents/index.js";
import stockAuditRoutes from "./stockaudits/index.js";
import auditLogRoutes from "./auditlogs/index.js";
import noteTypeRoutes from "./notetypes/index.js";
import noteRoutes from "./notes/index.js"
export { export {
userRoutes, userRoutes,
apiRoutes,
authRoutes, authRoutes,
printerRoutes, printerRoutes,
printJobRoutes, jobRoutes,
gcodeFileRoutes, gcodeFileRoutes,
filamentRoutes, filamentRoutes,
spotlightRoutes, spotlightRoutes,
@ -28,4 +31,9 @@ export {
materialRoutes, materialRoutes,
partStockRoutes, partStockRoutes,
filamentStockRoutes, filamentStockRoutes,
stockEventRoutes,
stockAuditRoutes,
auditLogRoutes,
noteTypeRoutes,
noteRoutes
}; };

View File

@ -3,36 +3,36 @@ import { isAuthenticated } from "../../keycloak.js";
const router = express.Router(); const router = express.Router();
import { import {
listPrintJobsRouteHandler, listJobsRouteHandler,
getPrintJobRouteHandler, getJobRouteHandler,
editPrintJobRouteHandler, editJobRouteHandler,
createPrintJobRouteHandler, createJobRouteHandler,
getPrintJobStatsRouteHandler getJobStatsRouteHandler
} from "../../services/printjobs/index.js"; } from "../../services/jobs/index.js";
// list of print jobs // list of print jobs
router.get("/", isAuthenticated, (req, res) => { router.get("/", isAuthenticated, (req, res) => {
const { page, limit } = req.body; const { page, limit } = req.body;
listPrintJobsRouteHandler(req, res, page, limit); listJobsRouteHandler(req, res, page, limit);
}); });
// get printer stats // get printer stats
router.get("/stats", isAuthenticated, (req, res) => { router.get("/stats", isAuthenticated, (req, res) => {
getPrintJobStatsRouteHandler(req, res); getJobStatsRouteHandler(req, res);
}); });
// create new print job // create new print job
router.post("/", isAuthenticated, (req, res) => { router.post("/", isAuthenticated, (req, res) => {
createPrintJobRouteHandler(req, res); createJobRouteHandler(req, res);
}); });
router.get("/:id", isAuthenticated, (req, res) => { router.get("/:id", isAuthenticated, (req, res) => {
getPrintJobRouteHandler(req, res); getJobRouteHandler(req, res);
}); });
// update job info // update job info
router.put("/:id", isAuthenticated, async (req, res) => { router.put("/:id", isAuthenticated, async (req, res) => {
editPrintJobRouteHandler(req, res); editJobRouteHandler(req, res);
}); });
export default router; export default router;

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["type", "brand", "diameter", "color"]; const allowedFilters = ["type", "brand", "diameter", "color"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

63
src/routes/notes/index.js Normal file
View File

@ -0,0 +1,63 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import {
listNotesRouteHandler,
getNoteRouteHandler,
editNoteRouteHandler,
newNoteRouteHandler,
deleteNoteRouteHandler
} from "../../services/notes/index.js";
import { parseFilter } from "../../util/index.js";
const router = express.Router();
// List notes
router.get("/", isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["parent", "user._id"];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const filterObject = parseFilter(key, value);
filter = {...filter, ...filterObject}
}
}
}
listNotesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
}
);
// Get single note
router.get(
"/:id",
isAuthenticated,
getNoteRouteHandler
);
// Edit note
router.put(
"/:id",
isAuthenticated,
editNoteRouteHandler
);
// Delete note
router.delete(
"/:id",
isAuthenticated,
deleteNoteRouteHandler
);
// Create new note
router.post(
"/",
isAuthenticated,
newNoteRouteHandler
);
export default router;

View File

@ -0,0 +1,55 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import {
listNoteTypesRouteHandler,
getNoteTypeRouteHandler,
editNoteTypeRouteHandler,
newNoteTypeRouteHandler,
} from "../../services/notetypes/index.js";
import { parseFilter } from "../../util/index.js";
const router = express.Router();
// List note types
router.get("/", isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["name", "active"];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
}
}
}
listNoteTypesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
}
);
// Get single note type
router.get(
"/:id",
isAuthenticated,
getNoteTypeRouteHandler
);
// Edit note type
router.put(
"/:id",
isAuthenticated,
editNoteTypeRouteHandler
);
// Create new note type
router.post(
"/",
isAuthenticated,
newNoteTypeRouteHandler
);
export default router;

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -18,12 +18,13 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["products", "name"]; const allowedFilters = ["products", "name"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = { $regex: parseStringIfNumber(value), $options: 'i' } const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["country"]; const allowedFilters = ["country"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["type", "brand", "diameter", "color"]; const allowedFilters = ["type", "brand", "diameter", "color"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

View File

@ -0,0 +1,54 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
const router = express.Router();
import {
listStockAuditsRouteHandler,
getStockAuditRouteHandler,
newStockAuditRouteHandler,
updateStockAuditRouteHandler,
deleteStockAuditRouteHandler,
} from "../../services/stockaudits/index.js";
// List stock audits
router.get("/", isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["status", "type", "createdBy"];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
}
}
}
listStockAuditsRouteHandler(req, res, page, limit, property, filter);
});
// Create new stock audit
router.post("/", isAuthenticated, (req, res) => {
newStockAuditRouteHandler(req, res);
});
// Get specific stock audit
router.get("/:id", isAuthenticated, (req, res) => {
getStockAuditRouteHandler(req, res);
});
// Update stock audit
router.put("/:id", isAuthenticated, (req, res) => {
updateStockAuditRouteHandler(req, res);
});
// Delete stock audit
router.delete("/:id", isAuthenticated, (req, res) => {
deleteStockAuditRouteHandler(req, res);
});
export default router;

View File

@ -0,0 +1,41 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
const router = express.Router();
import {
listStockEventsRouteHandler,
getStockEventRouteHandler,
newStockEventRouteHandler,
} from "../../services/stockevents/index.js";
// List stock events
router.get("/", isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["type", "filamentStock"];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
}
}
}
listStockEventsRouteHandler(req, res, page, limit, property, filter, sort, order);
});
// Create new stock event
router.post("/", isAuthenticated, (req, res) => {
newStockEventRouteHandler(req, res);
});
// Get specific stock event
router.get("/:id", isAuthenticated, (req, res) => {
getStockEventRouteHandler(req, res);
});
export default router;

View File

@ -1,22 +1,41 @@
import express from 'express'; import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import {
listUsersRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
} from "../../services/users/index.js";
router.get('/', (req, res) => { // list of users
res.send({ router.get("/", isAuthenticated, (req, res) => {
data: [ const { page, limit, property } = req.query;
{
id: 1, const allowedFilters = ["username", "name", "firstName", "lastName"];
firstName: 'John',
lastName: 'Smith', var filter = {};
},
{ for (const [key, value] of Object.entries(req.query)) {
id: 2, for (var i = 0; i < allowedFilters.length; i++) {
firstName: 'Stacey', if (key == allowedFilters[i]) {
lastName: 'Smith', const parsedFilter = parseFilter(key, value)
}, filter = {...filter, ...parsedFilter};
], }
}); }
}
listUsersRouteHandler(req, res, page, limit, property, filter);
});
router.get("/:id", isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});
// update user info
router.put("/:id", isAuthenticated, async (req, res) => {
editUserRouteHandler(req, res);
}); });
export default router; export default router;

View File

@ -1,6 +1,6 @@
import express from "express"; import express from "express";
import { isAuthenticated } from "../../keycloak.js"; import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js"; import { parseFilter } from "../../util/index.js";
const router = express.Router(); const router = express.Router();
import { import {
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
const allowedFilters = ["country"]; const allowedFilters = ["country"];
const filter = {}; var filter = {};
for (const [key, value] of Object.entries(req.query)) { for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) { for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) { if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value); const parsedFilter = parseFilter(key, value)
filter = {...filter, ...parsedFilter};
} }
} }
} }

View File

@ -0,0 +1,41 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const auditLogSchema = new Schema(
{
oldValue: { type: Object, required: true },
newValue: { type: Object, required: true },
target: {
type: Schema.Types.ObjectId,
refPath: 'targetModel',
required: true
},
targetModel: {
type: String,
required: true,
enum: ['Printer', 'Job', 'SubJob', 'FilamentStock', 'StockEvent', 'Vendor', 'Part', 'Product', 'Material', 'Filament', 'GCodeFile', 'NoteType', 'Note', 'User'] // Add other models as needed
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerModel',
required: true
},
ownerModel: {
type: String,
required: true,
enum: ['User', 'Printer']
}
},
{ timestamps: true }
);
// Add virtual id getter
auditLogSchema.virtual("id").get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
auditLogSchema.set("toJSON", { virtuals: true });
// Create and export the model
export const auditLogModel = mongoose.model("AuditLog", auditLogSchema);

View File

@ -13,13 +13,7 @@ const filamentStockSchema = new Schema(
currentGrossWeight: { type: Number, required: true }, currentGrossWeight: { type: Number, required: true },
currentNetWeight: { type: Number, required: true }, currentNetWeight: { type: Number, required: true },
filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" }, filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" },
stockEvents: [{ stockEvents: [{ type: mongoose.Schema.Types.ObjectId, ref: "StockEvent" }]
type: { type: String, required: true },
value: { type: Number, required: true },
subJob: { type: mongoose.Schema.Types.ObjectId, ref: "PrintSubJob", required: false },
job: { type: mongoose.Schema.Types.ObjectId, ref: "PrintJob", required: false },
timestamp: { type: Date, default: Date.now }
}]
}, },
{ timestamps: true }, { timestamps: true },
); );

View File

@ -1,7 +1,7 @@
import mongoose from "mongoose"; import mongoose from "mongoose";
const { Schema } = mongoose; const { Schema } = mongoose;
const printJobSchema = new mongoose.Schema({ const jobSchema = new mongoose.Schema({
state: { state: {
type: { required: true, type: String }, type: { required: true, type: String },
}, },
@ -9,6 +9,7 @@ const printJobSchema = new mongoose.Schema({
createdAt: { required: true, type: Date }, createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date }, updatedAt: { required: true, type: Date },
startedAt: { required: false, type: Date }, startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
gcodeFile: { gcodeFile: {
type: Schema.Types.ObjectId, type: Schema.Types.ObjectId,
ref: "GCodeFile", ref: "GCodeFile",
@ -21,14 +22,17 @@ const printJobSchema = new mongoose.Schema({
min: 1, min: 1,
}, },
subJobs: [ subJobs: [
{ type: Schema.Types.ObjectId, ref: "PrintSubJob", required: false }, { type: Schema.Types.ObjectId, ref: "SubJob", required: false },
],
notes: [
{ type: Schema.Types.ObjectId, ref: "Note", required: false }
], ],
}); });
printJobSchema.virtual("id").get(function () { jobSchema.virtual("id").get(function () {
return this._id.toHexString(); return this._id.toHexString();
}); });
printJobSchema.set("toJSON", { virtuals: true }); jobSchema.set("toJSON", { virtuals: true });
export const printJobModel = mongoose.model("PrintJob", printJobSchema); export const jobModel = mongoose.model("Job", jobSchema);

View File

@ -0,0 +1,41 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const noteSchema = new mongoose.Schema({
parent: {
type: Schema.Types.ObjectId,
required: true,
},
content: {
type: String,
required: true,
},
noteType: {
type: Schema.Types.ObjectId,
ref: "NoteType",
required: true,
},
createdAt: {
type: Date,
required: true,
default: Date.now,
},
updatedAt: {
type: Date,
required: true,
default: Date.now,
},
user: {
type: Schema.Types.ObjectId,
ref: "User",
required: false,
}
});
noteSchema.virtual("id").get(function () {
return this._id.toHexString();
});
noteSchema.set("toJSON", { virtuals: true });
export const noteModel = mongoose.model("Note", noteSchema);

View File

@ -0,0 +1,32 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const noteTypeSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
color: {
type: String,
required: false,
},
active: {
type: Boolean,
required: true,
default: true,
}
},
{ timestamps: true }
);
// Add virtual id getter
noteTypeSchema.virtual("id").get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
noteTypeSchema.set("toJSON", { virtuals: true });
export const noteTypeModel = mongoose.model("NoteType", noteTypeSchema);

View File

@ -6,7 +6,9 @@ const partStockSchema = new Schema(
{ {
name: { type: String, required: true }, name: { type: String, required: true },
fileName: { type: String, required: false }, fileName: { type: String, required: false },
product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" }, part: { type: mongoose.Schema.Types.ObjectId, ref: "Part" },
startingQuantity: { type: Number, required: true },
currentQuantity: { type: Number, required: true },
}, },
{ timestamps: true }, { timestamps: true },
); );

View File

@ -39,10 +39,10 @@ const printerSchema = new Schema(
moonraker: { type: moonrakerSchema, required: true }, moonraker: { type: moonrakerSchema, required: true },
tags: [{ type: String }], tags: [{ type: String }],
firmware: { type: String }, firmware: { type: String },
currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" }, currentJob: { type: Schema.Types.ObjectId, ref: "Job" },
currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" }, currentSubJob: { type: Schema.Types.ObjectId, ref: "SubJob" },
currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" }, currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" },
subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }], subJobs: [{ type: Schema.Types.ObjectId, ref: "SubJob" }],
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null }, vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null },
alerts: [alertSchema], alerts: [alertSchema],
}, },

View File

@ -0,0 +1,38 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const stockAuditItemSchema = new Schema({
type: { type: String, enum: ["filament", "part"], required: true },
stock: { type: Schema.Types.ObjectId, required: true },
expectedQuantity: { type: Number, required: true },
actualQuantity: { type: Number, required: true },
notes: { type: String }
});
const stockAuditSchema = new Schema(
{
type: { type: String, required: true },
status: {
type: String,
enum: ["pending", "in_progress", "completed", "cancelled"],
default: "pending",
required: true
},
notes: { type: String },
items: [stockAuditItemSchema],
createdBy: { type: Schema.Types.ObjectId, ref: "User", required: true },
completedAt: { type: Date }
},
{ timestamps: true }
);
// Add virtual id getter
stockAuditSchema.virtual("id").get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockAuditSchema.set("toJSON", { virtuals: true });
// Create and export the model
export const stockAuditModel = mongoose.model("StockAudit", stockAuditSchema);

View File

@ -0,0 +1,26 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const stockEventSchema = new Schema(
{
type: { type: String, required: true },
value: { type: Number, required: true },
unit: { type: String, required: true},
subJob: { type: Schema.Types.ObjectId, ref: "SubJob", required: false },
job: { type: Schema.Types.ObjectId, ref: "Job", required: false },
filamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock", required: true },
timestamp: { type: Date, default: Date.now }
},
{ timestamps: true }
);
// Add virtual id getter
stockEventSchema.virtual("id").get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockEventSchema.set("toJSON", { virtuals: true });
// Create and export the model
export const stockEventModel = mongoose.model("StockEvent", stockEventSchema);

View File

@ -1,15 +1,15 @@
import mongoose from "mongoose"; import mongoose from "mongoose";
const { Schema } = mongoose; const { Schema } = mongoose;
const printSubJobSchema = new mongoose.Schema({ const subJobSchema = new mongoose.Schema({
printer: { printer: {
type: Schema.Types.ObjectId, type: Schema.Types.ObjectId,
ref: "Printer", ref: "Printer",
required: true, required: true,
}, },
printJob: { job: {
type: Schema.Types.ObjectId, type: Schema.Types.ObjectId,
ref: "PrintJob", ref: "Job",
required: true, required: true,
}, },
subJobId: { subJobId: {
@ -37,15 +37,17 @@ const printSubJobSchema = new mongoose.Schema({
type: Date, type: Date,
default: Date.now, default: Date.now,
}, },
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
}); });
printSubJobSchema.virtual("id").get(function () { subJobSchema.virtual("id").get(function () {
return this._id.toHexString(); return this._id.toHexString();
}); });
printSubJobSchema.set("toJSON", { virtuals: true }); subJobSchema.set("toJSON", { virtuals: true });
export const printSubJobModel = mongoose.model( export const subJobModel = mongoose.model(
"PrintSubJob", "SubJob",
printSubJobSchema, subJobSchema,
); );

View File

@ -1,25 +1,12 @@
import { Binary } from "mongodb";
import mongoose from "mongoose"; import mongoose from "mongoose";
const userSchema = new mongoose.Schema({ const userSchema = new mongoose.Schema({
name: { required: true, type: String }, username: { required: true, type: String},
name: { required: true, type: String},
firstName: { required: false, type: String },
lastName: { required: false, type: String },
email: { required: true, type: String }, email: { required: true, type: String },
emailVerifiedAt: { type: Date }, }, { timestamps: true },);
password: { required: true, type: String },
webAuthnCredentials: [
{
id: String,
publicKey: Buffer,
counter: Number,
deviceType: String,
backedUp: Boolean,
transports: [String],
},
],
profileImage: { type: String },
createdAt: { type: Date },
updatedAt: { type: Date },
});
userSchema.virtual("id").get(function () { userSchema.virtual("id").get(function () {
return this._id.toHexString(); return this._id.toHexString();

View File

@ -1,92 +0,0 @@
import bcrypt from "bcrypt";
import dotenv from 'dotenv';
import { userModel } from "../../schemas/user.schema.js";
import jwt from 'jsonwebtoken';
dotenv.config();
export const getDashboardRouteHandler = (req, res) => {
const sentData = {
data: {}
}
res.send(sentData);
}
export const getProfileRouteHandler = (req, res) => {
const meUser = req.user;
const stringId = req.user.id;
const decId = stringId.substring(4, 8);
const intId = parseInt(decId, 16);
const sentData = {
data: {
type: 'users',
id: intId === 1 ? intId : meUser.id,
attributes: {
name: meUser.name,
email: meUser.email,
profile_image: null,
createdAt: meUser.createdAt,
updateAt: meUser.updateAt
},
links: {
self: `${process.env.APP_URL_API}/users/${meUser.id}`
}
}
}
res.send(sentData);
}
export const patchProfileRouteHandler = async (req, res) => {
const currentDataOfUser = req.user;
const { name, email, newPassword, confirmPassword } = req.body.data.attributes;
const foundUser = await userModel.findOne({ email: currentDataOfUser.email});
if (!foundUser) {
res.status(400).json({error: 'No user matches the credentials'});
} else {
// check password more than 8 characters, new password matched the password confirmation
if (newPassword && newPassword < 7 || newPassword != confirmPassword) {
res.status(400).json({errors: { password: ["The password should have at lest 8 characters and match the password confirmation."] }});
} else if (newPassword && newPassword > 7 && newPassword == confirmPassword) {
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash(newPassword, salt);
try{
await userModel.updateOne( { email: foundUser.email }, { $set :{ "name": name, "email": email, "password": hashPassword } });
} catch(err) {
console.error(err);
}
const sentData = {
data: {
type: 'users',
id: foundUser.id,
attributes: {
name: name,
email: email,
profile_image: null,
}
}
}
res.send(sentData);
} else if (!newPassword) {
try {
await userModel.updateOne( { email: foundUser.email }, { $set :{ "name": name, "email": email } });
} catch(err) {
console.error(err);
}
const sentData = {
data: {
type: 'users',
id: foundUser.id,
attributes: {
name: name,
email: email,
profile_image: null,
}
}
}
res.send(sentData);
}
}
}

View File

@ -0,0 +1,62 @@
import dotenv from "dotenv";
import { auditLogModel } from '../../schemas/auditlog.schema.js';
import log4js from "log4js";
import mongoose from "mongoose";
dotenv.config();
const logger = log4js.getLogger("AuditLogs");
logger.level = process.env.LOG_LEVEL;
export const listAuditLogsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
// Use find with population
const auditLogs = await auditLogModel
.find(filter)
.skip(skip)
.limit(Number(limit))
.sort({ createdAt: -1 })
.populate('owner', 'name _id')
logger.trace(
`List of audit logs (Page ${page}, Limit ${limit}):`,
auditLogs,
);
res.send(auditLogs);
} catch (error) {
logger.error("Error listing audit logs:", error);
res.status(500).send({ error: error });
}
};
export const getAuditLogRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the audit log with the given ID
const auditLog = await auditLogModel.findOne({
_id: id,
}).populate('printer').populate('owner').populate('target');
if (!auditLog) {
logger.warn(`Audit log not found with supplied id.`);
return res.status(404).send({ error: "Audit log not found." });
}
logger.trace(`Audit log with ID: ${id}:`, auditLog);
res.send(auditLog);
} catch (error) {
logger.error("Error fetching audit log:", error);
res.status(500).send({ error: error.message });
}
};

View File

@ -2,6 +2,7 @@ import dotenv from "dotenv";
import { keycloak } from "../../keycloak.js"; import { keycloak } from "../../keycloak.js";
import log4js from "log4js"; import log4js from "log4js";
import axios from "axios"; import axios from "axios";
import { userModel } from "../../schemas/user.schema.js";
dotenv.config(); dotenv.config();
@ -27,6 +28,49 @@ export const loginRouteHandler = (req, res) => {
); );
}; };
// Function to fetch user from Keycloak and store in database and session
const fetchAndStoreUser = async (req, token) => {
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
try {
const response = await axios.post(
userInfoUrl,
new URLSearchParams({
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
}),
{
headers: {
Authorization: `Bearer ${token.access_token}`,
},
}
);
const userInfo = {
access_token: token.access_token,
expires_at: token.expires_at,
roles: token.realm_access?.roles || [],
username: response.data.preferred_username,
email: response.data.email,
name: response.data.name,
firstName: response.data.given_name,
lastName: response.data.family_name,
};
// Create or update user in database
const user = await createOrUpdateUser(userInfo);
const fullUserInfo = { ...userInfo, _id: user._id };
// Store user info in session
req.session.user = fullUserInfo;
return fullUserInfo;
} catch (error) {
logger.error("Error fetching and storing user:", error);
throw error;
}
};
// Login callback handler // Login callback handler
export const loginCallbackRouteHandler = (req, res) => { export const loginCallbackRouteHandler = (req, res) => {
// Don't use keycloak.protect() here as it expects an already authenticated session // Don't use keycloak.protect() here as it expects an already authenticated session
@ -60,20 +104,30 @@ export const loginCallbackRouteHandler = (req, res) => {
}, },
}, },
) )
.then((response) => { .then(async (response) => {
// Store tokens in session // Store tokens in session
req.session["keycloak-token"] = { const tokenData = {
access_token: response.data.access_token, access_token: response.data.access_token,
refresh_token: response.data.refresh_token, refresh_token: response.data.refresh_token,
id_token: response.data.id_token, id_token: response.data.id_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000, expires_at: new Date().getTime() + response.data.expires_in * 1000,
}; };
// Save session and redirect to the original URL req.session["keycloak-token"] = tokenData;
req.session.save(() => {
res.redirect( try {
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state, // Fetch and store user data
); await fetchAndStoreUser(req, tokenData);
});
// Save session and redirect to the original URL
req.session.save(() => {
res.redirect(
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
);
});
} catch (error) {
logger.error("Error during user setup:", error);
res.status(500).send("Error setting up user session");
}
}) })
.catch((error) => { .catch((error) => {
console.error( console.error(
@ -84,51 +138,65 @@ export const loginCallbackRouteHandler = (req, res) => {
}); });
}; };
export const userRouteHandler = (req, res) => { // Function to create or update user
if (req.session && req.session["keycloak-token"]) { const createOrUpdateUser = async (userInfo) => {
const token = req.session["keycloak-token"]; try {
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`; const { username, email, name, firstName, lastName } = userInfo;
// User is authenticated
// Extract user info from the token // Find existing user by username
// const existingUser = await userModel.findOne({ username });
logger.info("Fetching user from keycloak...");
axios if (existingUser) {
.post( // Check if any values have changed
userInfoUrl, const hasChanges =
new URLSearchParams({ existingUser.email !== email ||
client_id: process.env.KEYCLOAK_CLIENT_ID, existingUser.name !== name ||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET, existingUser.firstName !== firstName ||
}), existingUser.lastName !== lastName;
{
headers: { if (hasChanges) {
Authorization: `Bearer ${token.access_token}`, // Update existing user only if there are changes
}, const updateData = {
}, email,
) name,
.then((response) => { firstName,
const userInfo = { lastName,
// Extract user details from token updatedAt: new Date()
// This depends on your token structure
access_token: token.access_token,
expires_at: token.expires_at,
roles: token.realm_access?.roles || [],
username: response.data.preferred_username,
email: response.data.email,
name: response.data.name,
firstName: response.data.given_name,
lastName: response.data.family_name,
}; };
res.json(userInfo);
}) await userModel.updateOne(
.catch((error) => { { username },
logger.error( { $set: updateData }
"Token exchange error:",
error.response?.data || error.message,
); );
res.status(500).send("Authentication failed");
// Fetch the updated user to return
return await userModel.findOne({ username });
}
return existingUser;
} else {
// Create new user
const newUser = new userModel({
username,
email,
name,
firstName,
lastName
}); });
await newUser.save();
return newUser;
}
} catch (error) {
logger.error("Error creating/updating user:", error);
throw error;
}
};
export const userRouteHandler = (req, res) => {
if (req.session && req.session.user) {
res.json(req.session.user);
} else { } else {
// User is not authenticated
res.status(401).json({ error: "Not authenticated" }); res.status(401).json({ error: "Not authenticated" });
} }
}; };
@ -270,6 +338,16 @@ export const refreshTokenRouteHandler = (req, res) => {
}); });
}; };
// Middleware to populate req.user from session
export const populateUserMiddleware = (req, res, next) => {
if (req.session && req.session.user) {
req.user = req.session.user;
} else {
req.user = null;
}
next();
};
// Example of how to set up your routes in Express // Example of how to set up your routes in Express
/* /*
import express from "express"; import express from "express";

View File

@ -3,6 +3,8 @@ import { filamentModel } from "../../schemas/filament.schema.js";
import jwt from "jsonwebtoken"; import jwt from "jsonwebtoken";
import log4js from "log4js"; import log4js from "log4js";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -82,7 +84,12 @@ export const getFilamentRouteHandler = async (req, res) => {
} }
logger.trace(`Filament with ID: ${id}:`, filament); logger.trace(`Filament with ID: ${id}:`, filament);
res.send(filament);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...filament._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching Filament:", error); logger.error("Error fetching Filament:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -112,7 +119,7 @@ export const editFilamentRouteHandler = async (req, res) => {
url: req.body.url, url: req.body.url,
image: req.body.image, image: req.body.image,
color: req.body.color, color: req.body.color,
vendor: req.body.vendor.id, vendor: req.body.vendor._id,
type: req.body.type, type: req.body.type,
price: req.body.price, price: req.body.price,
diameter: req.body.diameter, diameter: req.body.diameter,
@ -120,6 +127,16 @@ export const editFilamentRouteHandler = async (req, res) => {
emptySpoolWeight: req.body.emptySpoolWeight, emptySpoolWeight: req.body.emptySpoolWeight,
}; };
// Create audit log before updating
await newAuditLog(
filament.toObject(),
updateData,
id,
'Filament',
req.user._id,
'User'
);
const result = await filamentModel.updateOne( const result = await filamentModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
@ -164,6 +181,16 @@ export const newFilamentRouteHandler = async (req, res) => {
res.status(500).send({ error: "No filament created." }); res.status(500).send({ error: "No filament created." });
} }
// Create audit log for new filament
await newAuditLog(
{},
newFilament,
result._id,
'Filament',
req.user._id,
'User'
);
res.status(200).send({ status: "ok" }); res.status(200).send({ status: "ok" });
} catch (updateError) { } catch (updateError) {
logger.error("Error updating filament:", updateError); logger.error("Error updating filament:", updateError);

View File

@ -1,9 +1,11 @@
import dotenv from "dotenv"; import dotenv from "dotenv";
import { filamentStockModel } from "../../schemas/filamentstock.schema.js"; import { filamentStockModel } from "../../schemas/filamentstock.schema.js";
import { filamentModel } from "../../schemas/filament.schema.js"; import { filamentModel } from "../../schemas/filament.schema.js";
import { stockEventModel } from "../../schemas/stockevent.schema.js";
import jwt from "jsonwebtoken"; import jwt from "jsonwebtoken";
import log4js from "log4js"; import log4js from "log4js";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -17,6 +19,8 @@ export const listFilamentStocksRouteHandler = async (
limit = 25, limit = 25,
property = "", property = "",
filter = {}, filter = {},
sort = "",
order = "ascend"
) => { ) => {
try { try {
// Calculate the skip value based on the page number and limit // Calculate the skip value based on the page number and limit
@ -48,6 +52,12 @@ export const listFilamentStocksRouteHandler = async (
aggregateCommand.push({ $project: { image: 0, url: 0 } }); aggregateCommand.push({ $project: { image: 0, url: 0 } });
} }
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
aggregateCommand.push({ $skip: skip }); aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) }); aggregateCommand.push({ $limit: Number(limit) });
@ -75,19 +85,20 @@ export const getFilamentStockRouteHandler = async (req, res) => {
.findOne({ .findOne({
_id: id, _id: id,
}) })
.populate("filament").populate({ .populate("filament")
path: 'stockEvents', .populate({
populate: [ path: 'stockEvents',
{ populate: [
path: 'subJob', {
select: 'number' path: 'subJob',
}, select: 'number'
{ },
path: 'job', {
select: 'startedAt' path: 'job',
} select: 'startedAt'
] }
}); ]
});
if (!filamentStock) { if (!filamentStock) {
logger.warn(`Filament stock not found with supplied id.`); logger.warn(`Filament stock not found with supplied id.`);
@ -95,7 +106,12 @@ export const getFilamentStockRouteHandler = async (req, res) => {
} }
logger.trace(`Filament stock with ID: ${id}:`, filamentStock); logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
res.send(filamentStock);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...filamentStock._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching filament stock:", error); logger.error("Error fetching filament stock:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -189,6 +205,29 @@ export const newFilamentStockRouteHandler = async (req, res) => {
logger.error("No filament stock created."); logger.error("No filament stock created.");
return res.status(500).send({ error: "No filament stock created." }); return res.status(500).send({ error: "No filament stock created." });
} }
// Create initial stock event
const stockEvent = {
type: "initial",
value: startingNetWeight,
unit: "g",
filamentStock: result._id,
createdAt: new Date(),
updatedAt: new Date(),
};
const eventResult = await stockEventModel.create(stockEvent);
if (!eventResult) {
logger.error("Failed to create initial stock event.");
return res.status(500).send({ error: "Failed to create initial stock event." });
}
// Update the filament stock with the stock event reference
await filamentStockModel.updateOne(
{ _id: result._id },
{ $push: { stockEvents: eventResult._id } }
);
return res.send({ status: "ok" }); return res.send({ status: "ok" });
} catch (updateError) { } catch (updateError) {
logger.error("Error adding filament stock:", updateError); logger.error("Error adding filament stock:", updateError);

View File

@ -8,6 +8,8 @@ import crypto from "crypto";
import path from "path"; import path from "path";
import fs from "fs"; import fs from "fs";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
import { extractConfigBlock } from "../../util/index.js"; import { extractConfigBlock } from "../../util/index.js";
@ -225,6 +227,16 @@ export const editGCodeFileRouteHandler = async (req, res) => {
filament: req.body?.filament?._id, filament: req.body?.filament?._id,
}; };
// Create audit log before updating
await newAuditLog(
gcodeFile.toObject(),
updateData,
id,
'GCodeFile',
req.user._id,
'User'
);
const result = await gcodeFileModel.updateOne( const result = await gcodeFileModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
@ -280,7 +292,18 @@ export const newGCodeFileRouteHandler = async (req, res) => {
logger.error("No gcode file created."); logger.error("No gcode file created.");
res.status(500).send({ error: "No gcode file created." }); res.status(500).send({ error: "No gcode file created." });
} }
res.status(200).send(result);
// Create audit log for new gcodefile
await newAuditLog(
{},
newGCodeFile,
result._id,
'GCodeFile',
req.user._id,
'User'
);
res.status(200).send({ status: "ok" });
} catch (updateError) { } catch (updateError) {
logger.error("Error creating gcode file:", updateError); logger.error("Error creating gcode file:", updateError);
res.status(500).send({ error: updateError.message }); res.status(500).send({ error: updateError.message });
@ -407,8 +430,13 @@ export const getGCodeFileRouteHandler = async (req, res) => {
return res.status(404).send({ error: "Print job not found." }); return res.status(404).send({ error: "Print job not found." });
} }
logger.trace(`GCodeFile with ID: ${id}:`); logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
res.send(gcodeFile);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...gcodeFile._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching GCodeFile:", error); logger.error("Error fetching GCodeFile:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });

View File

@ -1,16 +1,18 @@
import dotenv from "dotenv"; import dotenv from "dotenv";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { printJobModel } from "../../schemas/printjob.schema.js"; import { jobModel } from "../../schemas/job.schema.js";
import { printSubJobModel } from "../../schemas/printsubjob.schema.js"; import { subJobModel } from "../../schemas/subjob.schema.js";
import { noteModel } from "../../schemas/note.schema.js";
import jwt from "jsonwebtoken"; import jwt from "jsonwebtoken";
import log4js from "log4js"; import log4js from "log4js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
const logger = log4js.getLogger("PrintJobs"); const logger = log4js.getLogger("Jobs");
logger.level = process.env.LOG_LEVEL; logger.level = process.env.LOG_LEVEL;
export const listPrintJobsRouteHandler = async ( export const listJobsRouteHandler = async (
req, req,
res, res,
page = 1, page = 1,
@ -21,7 +23,7 @@ export const listPrintJobsRouteHandler = async (
const skip = (page - 1) * limit; const skip = (page - 1) * limit;
// Fetch users with pagination // Fetch users with pagination
const printJobs = await printJobModel const jobs = await jobModel
.find() .find()
.sort({ createdAt: -1 }) .sort({ createdAt: -1 })
.skip(skip) .skip(skip)
@ -30,75 +32,82 @@ export const listPrintJobsRouteHandler = async (
.populate("gcodeFile", "name"); .populate("gcodeFile", "name");
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`); logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
res.send(printJobs); res.send(jobs);
} catch (error) { } catch (error) {
logger.error("Error listing print jobs:", error); logger.error("Error listing print jobs:", error);
res.status(500).send({ error: error }); res.status(500).send({ error: error });
} }
}; };
export const getPrintJobRouteHandler = async (req, res) => { export const getJobRouteHandler = async (req, res) => {
try { try {
// Get ID from params // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the printJob with the given remote address // Fetch the job with the given remote address
const printJob = await printJobModel const job = await jobModel
.findOne({ .findOne({
_id: id, _id: id,
}) })
.populate("printers", "name state") .populate("printers", "name state")
.populate("gcodeFile") .populate("gcodeFile")
.populate("subJobs"); .populate("subJobs")
.populate("notes");
if (!printJob) { if (!job) {
logger.warn(`PrintJob not found with supplied id.`); logger.warn(`Job not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." }); return res.status(404).send({ error: "Print job not found." });
} }
logger.trace(`PrintJob with ID: ${id}:`, printJob); logger.trace(`Job with ID: ${id}:`, job);
res.send(printJob);
const targetIds = [id, ...job.subJobs.map(subJob => subJob._id)];
const auditLogs = await auditLogModel.find({
target: { $in: targetIds.map(id => new mongoose.Types.ObjectId(id)) }
}).populate('owner');
res.send({...job._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching printJob:", error); logger.error("Error fetching job:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
} }
}; };
export const editPrintJobRouteHandler = async (req, res) => { export const editJobRouteHandler = async (req, res) => {
try { try {
// Get ID from params // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the printJob with the given remote address // Fetch the job with the given remote address
const printJob = await printJobModel.findOne({ _id: id }); const job = await jobModel.findOne({ _id: id });
if (!printJob) { if (!job) {
logger.warn(`PrintJob not found with supplied id.`); logger.warn(`Job not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." }); return res.status(404).send({ error: "Print job not found." });
} }
logger.trace(`PrintJob with ID: ${id}:`, printJob); logger.trace(`Job with ID: ${id}:`, job);
const { createdAt, updatedAt, started_at, status, ...updateData } = const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body; req.body;
const result = await printJobModel.updateOne( const result = await jobModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
); );
if (result.nModified === 0) { if (result.nModified === 0) {
logger.warn("No printJobs updated."); logger.warn("No jobs updated.");
return res.status(400).send({ error: "No printJobs updated." }); return res.status(400).send({ error: "No jobs updated." });
} }
res.send({ message: "Print job updated successfully" }); res.send({ message: "Print job updated successfully" });
} catch (error) { } catch (error) {
logger.error("Error updating printJob:", error); logger.error("Error updating job:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
} }
}; };
export const createPrintJobRouteHandler = async (req, res) => { export const createJobRouteHandler = async (req, res) => {
try { try {
const { gcodeFile, printers, quantity = 1 } = req.body; const { gcodeFile, printers, quantity = 1 } = req.body;
@ -112,7 +121,7 @@ export const createPrintJobRouteHandler = async (req, res) => {
const printerIds = printers.map((id) => new mongoose.Types.ObjectId(id)); const printerIds = printers.map((id) => new mongoose.Types.ObjectId(id));
// Create new print job // Create new print job
const newPrintJob = new printJobModel({ const newJob = new jobModel({
state: { type: "draft" }, state: { type: "draft" },
printers: printerIds, printers: printerIds,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null, gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
@ -124,14 +133,14 @@ export const createPrintJobRouteHandler = async (req, res) => {
}); });
// Save the print job first to get its ID // Save the print job first to get its ID
const savedPrintJob = await newPrintJob.save(); const savedJob = await newJob.save();
// Create subjobs array with sequential numbers based on quantity // Create subjobs array with sequential numbers based on quantity
const subJobs = await Promise.all( const subJobs = await Promise.all(
Array.from({ length: quantity }, (_, index) => { Array.from({ length: quantity }, (_, index) => {
const subJob = new printSubJobModel({ const subJob = new subJobModel({
printer: printerIds[index % printerIds.length], // Distribute across available printers printer: printerIds[index % printerIds.length], // Distribute across available printers
printJob: savedPrintJob._id, job: savedJob._id,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null, gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
subJobId: `subjob-${index + 1}`, subJobId: `subjob-${index + 1}`,
state: { type: "draft" }, state: { type: "draft" },
@ -144,22 +153,22 @@ export const createPrintJobRouteHandler = async (req, res) => {
); );
// Update the print job with the subjob references // Update the print job with the subjob references
savedPrintJob.subJobs = subJobs.map((subJob) => subJob._id); savedJob.subJobs = subJobs.map((subJob) => subJob._id);
await savedPrintJob.save(); await savedJob.save();
logger.trace( logger.trace(
`Created new print job with ID: ${savedPrintJob._id} and ${subJobs.length} subjobs`, `Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`,
); );
res.status(201).send({ printJob: savedPrintJob, subJobs }); res.status(201).send({ job: savedJob, subJobs });
} catch (error) { } catch (error) {
logger.error("Error creating print job:", error); logger.error("Error creating print job:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
} }
}; };
export const getPrintJobStatsRouteHandler = async (req, res) => { export const getJobStatsRouteHandler = async (req, res) => {
try { try {
const stats = await printJobModel.aggregate([ const stats = await jobModel.aggregate([
{ {
$group: { $group: {
_id: "$state.type", _id: "$state.type",

246
src/services/notes/index.js Normal file
View File

@ -0,0 +1,246 @@
import dotenv from "dotenv";
import { noteModel } from "../../schemas/note.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config();
const logger = log4js.getLogger("Notes");
logger.level = process.env.LOG_LEVEL;
export const listNotesRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
try {
const skip = (page - 1) * limit;
let notes;
let aggregateCommand = [];
if (Object.keys(filter).length > 0) {
aggregateCommand.push({ $match: filter });
}
aggregateCommand.push({
$lookup: {
from: "users", // The collection name (usually lowercase plural)
localField: "user", // The field in your current model
foreignField: "_id", // The field in the users collection
as: "user", // The output field name
},
});
aggregateCommand.push({ $unwind: "$user" });
aggregateCommand.push({
$lookup: {
from: "notetypes", // The collection name (usually lowercase plural)
localField: "noteType", // The field in your current model
foreignField: "_id", // The field in the users collection
as: "noteType", // The output field name
},
});
aggregateCommand.push({ $unwind: "$noteType" });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
"noteType._id": 1,
"noteType.name": 1,
"noteType.color": 1,
"user._id": 1,
"user.name": 1,
content: 1,
parent: 1
},
});
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
notes = await noteModel.aggregate(aggregateCommand);
logger.trace(
`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`,
notes,
);
res.send(notes);
} catch (error) {
logger.error("Error listing notes:", error);
res.status(500).send({ error: error });
}
};
export const getNoteRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({
_id: id,
});
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
}
logger.trace(`Note with ID: ${id}:`, note);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...note._doc, auditLogs: auditLogs});
} catch (error) {
logger.error("Error fetching note:", error);
res.status(500).send({ error: error.message });
}
};
export const editNoteRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({ _id: id });
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
}
logger.trace(`Note with ID: ${id}:`, note);
try {
const updateData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
isActive: req.body.isActive,
};
// Create audit log before updating
await newAuditLog(
note.toObject(),
updateData,
id,
'Note',
req.user._id,
'User'
);
const result = await noteModel.updateOne(
{ _id: id },
{ $set: updateData },
);
if (result.nModified === 0) {
logger.error("No note updated.");
res.status(500).send({ error: "No notes updated." });
}
} catch (updateError) {
logger.error("Error updating note:", updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching note:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const newNoteRouteHandler = async (req, res) => {
try {
let { ...newNote } = req.body;
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user._id };
const result = await noteModel.create(newNote);
if (result.nCreated === 0) {
logger.error("No note created.");
res.status(500).send({ error: "No note created." });
}
// Create audit log for new note
await newAuditLog(
{},
newNote,
result._id,
'Note',
req.user._id,
'User'
);
res.status(200).send({ status: "ok" });
} catch (updateError) {
logger.error("Error creating note:", updateError);
res.status(500).send({ error: updateError.message });
}
};
export const deleteNoteRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({ _id: id });
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
}
// Check if the current user owns this note
if (note.user.toString() !== req.user._id.toString()) {
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
return res.status(403).send({ error: "You can only delete your own notes." });
}
logger.trace(`Deleting note with ID: ${id} and all its children`);
// Recursively find and delete all child notes
const deletedNoteIds = await recursivelyDeleteNotes(id);
// Create audit log for the deletion
await newAuditLog(
note.toObject(),
{},
id,
'Note',
req.user._id,
'User',
'DELETE'
);
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
res.send({
status: "ok",
deletedNoteIds: deletedNoteIds,
message: `Deleted ${deletedNoteIds.length} notes`
});
} catch (error) {
logger.error("Error deleting note:", error);
res.status(500).send({ error: error.message });
}
};
// Helper function to recursively delete notes and their children
const recursivelyDeleteNotes = async (noteId) => {
const deletedIds = [];
// Find all notes that have this note as their parent
const childNotes = await noteModel.find({ parent: noteId });
// Recursively delete all children first
for (const childNote of childNotes) {
const childDeletedIds = await recursivelyDeleteNotes(childNote._id);
deletedIds.push(...childDeletedIds);
}
// Delete the current note
await noteModel.deleteOne({ _id: noteId });
deletedIds.push(noteId);
return deletedIds;
};

View File

@ -0,0 +1,154 @@
import dotenv from "dotenv";
import { noteTypeModel } from "../../schemas/notetype.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config();
const logger = log4js.getLogger("NoteTypes");
logger.level = process.env.LOG_LEVEL;
export const listNoteTypesRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
try {
const skip = (page - 1) * limit;
let noteTypes;
let aggregateCommand = [];
if (Object.keys(filter).length > 0) {
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand)
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
logger.trace(
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
noteTypes,
);
res.send(noteTypes);
} catch (error) {
logger.error("Error listing note types:", error);
res.status(500).send({ error: error });
}
};
export const getNoteTypeRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const noteType = await noteTypeModel.findOne({
_id: id,
});
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: "Note type not found." });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...noteType._doc, auditLogs: auditLogs});
} catch (error) {
logger.error("Error fetching note type:", error);
res.status(500).send({ error: error.message });
}
};
export const editNoteTypeRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const noteType = await noteTypeModel.findOne({ _id: id });
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: "Note type not found." });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
try {
const updateData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
active: req.body.active,
};
// Create audit log before updating
await newAuditLog(
noteType.toObject(),
updateData,
id,
'NoteType',
req.user._id,
'User'
);
const result = await noteTypeModel.updateOne(
{ _id: id },
{ $set: updateData },
);
if (result.nModified === 0) {
logger.error("No note type updated.");
res.status(500).send({ error: "No note types updated." });
}
} catch (updateError) {
logger.error("Error updating note type:", updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching note type:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const newNoteTypeRouteHandler = async (req, res) => {
try {
let { ...newNoteType } = req.body;
newNoteType = { ...newNoteType, createdAt: new Date(), updatedAt: new Date() };
const result = await noteTypeModel.create(newNoteType);
if (result.nCreated === 0) {
logger.error("No note type created.");
res.status(500).send({ error: "No note type created." });
}
// Create audit log for new note type
await newAuditLog(
{},
newNoteType,
result._id,
'NoteType',
req.user._id,
'User'
);
res.status(200).send({ status: "ok" });
} catch (updateError) {
logger.error("Error creating note type:", updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -5,6 +5,8 @@ import mongoose from "mongoose";
import multer from "multer"; import multer from "multer";
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -139,7 +141,12 @@ export const getPartRouteHandler = async (req, res) => {
} }
logger.trace(`Part with ID: ${id}:`, part); logger.trace(`Part with ID: ${id}:`, part);
res.send(part);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...part._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching Part:", error); logger.error("Error fetching Part:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -165,6 +172,16 @@ export const editPartRouteHandler = async (req, res) => {
const { createdAt, updatedAt, started_at, status, ...updateData } = const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body; req.body;
// Create audit log before updating
await newAuditLog(
part.toObject(),
updateData,
id,
'Part',
req.user._id,
'User'
);
const result = await partModel.updateOne( const result = await partModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
@ -201,6 +218,19 @@ export const newPartRouteHandler = async (req, res) => {
logger.error("No parts created."); logger.error("No parts created.");
return res.status(500).send({ error: "No parts created." }); return res.status(500).send({ error: "No parts created." });
} }
// Create audit logs for each new part
for (const result of results) {
await newAuditLog(
{},
result.toObject(),
result._id,
'Part',
req.user._id,
'User'
);
}
return res.status(200).send(results); return res.status(200).send(results);
} else { } else {
// Handle single part // Handle single part
@ -212,6 +242,17 @@ export const newPartRouteHandler = async (req, res) => {
fileName: req.body?.fileName, fileName: req.body?.fileName,
}; };
const result = await partModel.create(newPart); const result = await partModel.create(newPart);
// Create audit log for new part
await newAuditLog(
{},
newPart,
result._id,
'Part',
req.user._id,
'User'
);
return res.status(200).send(result); return res.status(200).send(result);
} }
} catch (error) { } catch (error) {

View File

@ -1,6 +1,9 @@
import dotenv from "dotenv"; import dotenv from "dotenv";
import { printerModel } from "../../schemas/printer.schema.js"; import { printerModel } from "../../schemas/printer.schema.js";
import log4js from "log4js"; import log4js from "log4js";
import { newAuditLog } from "../../util/index.js";
import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -47,7 +50,7 @@ export const getPrinterRouteHandler = async (req, res) => {
.populate({ .populate({
path: "subJobs", path: "subJobs",
populate: { populate: {
path: "printJob", path: "job",
}, },
}) })
.populate("vendor") .populate("vendor")
@ -62,7 +65,12 @@ export const getPrinterRouteHandler = async (req, res) => {
} }
logger.trace(`Printer with id ${id}:`, printer); logger.trace(`Printer with id ${id}:`, printer);
res.send(printer);
const auditLogs = await auditLogModel.find({
target: new mongoose.Types.ObjectId(id)
}).populate('owner');
res.send({...printer._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching printer:", error); logger.error("Error fetching printer:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -72,6 +80,14 @@ export const getPrinterRouteHandler = async (req, res) => {
export const editPrinterRouteHandler = async (req, res) => { export const editPrinterRouteHandler = async (req, res) => {
const id = req.params.id; const id = req.params.id;
try { try {
// Fetch the printer first to get the old state
const printer = await printerModel.findOne({ _id: id });
if (!printer) {
logger.warn(`Printer not found with supplied id.`);
return res.status(404).send({ error: "Printer not found." });
}
try { try {
const updateData = { const updateData = {
updatedAt: new Date(), updatedAt: new Date(),
@ -81,6 +97,16 @@ export const editPrinterRouteHandler = async (req, res) => {
vendor: req.body.vendor.id, vendor: req.body.vendor.id,
}; };
// Create audit log before updating
await newAuditLog(
printer.toObject(),
updateData,
id,
'Printer',
req.user._id,
'User'
);
const result = await printerModel.updateOne( const result = await printerModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
@ -139,6 +165,16 @@ export const createPrinterRouteHandler = async (req, res) => {
// Save the printer // Save the printer
const savedPrinter = await newPrinter.save(); const savedPrinter = await newPrinter.save();
// Create audit log for new printer
await newAuditLog(
{},
newPrinter.toObject(),
savedPrinter._id,
'Printer',
req.user._id,
'User'
);
logger.info(`Created new printer: ${name}`); logger.info(`Created new printer: ${name}`);
res.status(201).send(savedPrinter); res.status(201).send(savedPrinter);
} catch (error) { } catch (error) {

View File

@ -3,6 +3,8 @@ import { productModel } from "../../schemas/product.schema.js";
import { partModel } from "../../schemas/part.schema.js"; import { partModel } from "../../schemas/part.schema.js";
import log4js from "log4js"; import log4js from "log4js";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -83,7 +85,12 @@ export const getProductRouteHandler = async (req, res) => {
} }
logger.trace(`Product with ID: ${id}:`, product); logger.trace(`Product with ID: ${id}:`, product);
res.send(product);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...product._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching Product:", error); logger.error("Error fetching Product:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -93,10 +100,11 @@ export const getProductRouteHandler = async (req, res) => {
export const editProductRouteHandler = async (req, res) => { export const editProductRouteHandler = async (req, res) => {
// Get ID from params // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = new mongoose.Types.ObjectId(req.params.id);
var product = null;
try { try {
// Fetch the product with the given remote address // Fetch the product with the given remote address
const product = await productModel.findOne({ _id: id }); product = await productModel.findOne({ _id: id });
if (!product) { if (!product) {
// Error handling // Error handling
@ -123,7 +131,15 @@ export const editProductRouteHandler = async (req, res) => {
marginOrPrice: req.body.marginOrPrice, marginOrPrice: req.body.marginOrPrice,
}; };
console.log("ID:", id); // Create audit log before updating
await newAuditLog(
product.toObject(),
updateData,
id,
'Product',
req.user._id,
'User'
);
const result = await productModel.updateOne( const result = await productModel.updateOne(
{ _id: id }, { _id: id },
@ -160,6 +176,16 @@ export const newProductRouteHandler = async (req, res) => {
res.status(500).send({ error: "No product created." }); res.status(500).send({ error: "No product created." });
} }
// Create audit log for new product
await newAuditLog(
{},
newProduct,
newProductResult._id,
'Product',
req.user._id,
'User'
);
const parts = req.body.parts || []; const parts = req.body.parts || [];
const productId = newProductResult._id; const productId = newProductResult._id;
@ -179,6 +205,16 @@ export const newProductRouteHandler = async (req, res) => {
res.status(500).send({ error: "No parts created." }); res.status(500).send({ error: "No parts created." });
} }
partIds.push(newPartResult._id); partIds.push(newPartResult._id);
// Create audit log for each new part
await newAuditLog(
{},
newPart,
newPartResult._id,
'Part',
req.user._id,
'User'
);
} }
const editProductResult = await productModel.updateOne( const editProductResult = await productModel.updateOne(

View File

@ -1,56 +1,98 @@
import dotenv from "dotenv"; import dotenv from "dotenv";
import { printJobModel } from "../../schemas/printjob.schema.js"; import { jobModel } from "../../schemas/job.schema.js";
import { printSubJobModel } from "../../schemas/printsubjob.schema.js"; import { subJobModel } from "../../schemas/subjob.schema.js";
import log4js from "log4js"; import log4js from "log4js";
import { printerModel } from "../../schemas/printer.schema.js"; import { printerModel } from "../../schemas/printer.schema.js";
import { filamentModel } from "../../schemas/filament.schema.js"; import { filamentModel } from "../../schemas/filament.schema.js";
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js"; import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
import { partModel } from "../../schemas/part.schema.js";
import { productModel } from "../../schemas/product.schema.js";
import { vendorModel } from "../../schemas/vendor.schema.js";
import { filamentStockModel } from "../../schemas/filamentstock.schema.js";
import { stockEventModel } from "../../schemas/stockevent.schema.js";
import { stockAuditModel } from "../../schemas/stockaudit.schema.js";
import { partStockModel } from "../../schemas/partstock.schema.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
import { userModel } from "../../schemas/user.schema.js";
import { noteTypeModel } from "../../schemas/notetype.schema.js";
import { noteModel } from "../../schemas/note.schema.js";
import mongoose from "mongoose";
dotenv.config(); dotenv.config();
const logger = log4js.getLogger("PrintJobs"); const logger = log4js.getLogger("Jobs");
logger.level = process.env.LOG_LEVEL; logger.level = process.env.LOG_LEVEL;
const formatPrintersResponse = (printers) => { // Map prefixes to models and id fields
return printers.map((printer) => ({ const PREFIX_MODEL_MAP = {
id: printer.id, PRN: { model: printerModel, idField: '_id', type: 'printer' },
name: printer.name, FIL: { model: filamentModel, idField: '_id', type: 'filament' },
link: `/production/printers/info?printerId=${printer.id}`, SPL: { model: null, idField: '_id', type: 'spool' }, // No spool model found
printer: printer, GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodefile' },
})); JOB: { model: jobModel, idField: '_id', type: 'job' },
PRT: { model: partModel, idField: '_id', type: 'part' },
PRD: { model: productModel, idField: '_id', type: 'product' },
VEN: { model: vendorModel, idField: '_id', type: 'vendor' },
SJB: { model: subJobModel, idField: '_id', type: 'subjob' },
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentstock' },
SEV: { model: stockEventModel, idField: '_id', type: 'stockevent' },
SAU: { model: stockAuditModel, idField: '_id', type: 'stockaudit' },
PTS: { model: partStockModel, idField: '_id', type: 'partstock' },
PDS: { model: null, idField: '_id', type: 'productstock' }, // No productStockModel found
ADL: { model: auditLogModel, idField: '_id', type: 'auditlog' },
USR: { model: userModel, idField: '_id', type: 'user' },
NTY: { model: noteTypeModel, idField: '_id', type: 'notetype' },
NTE: { model: noteModel, idField: '_id', type: 'note' },
}; };
const formatJobsResponse = (jobs) => { // Helper function to build search filter from query parameters
return jobs.map((job) => ({ const buildSearchFilter = (params) => {
id: job.id, const filter = {};
name: job.gcodeFile.name,
link: `/production/printjobs/info?printJobId=${job.id}`, for (const [key, value] of Object.entries(params)) {
job: job, // Skip pagination and limit parameters as they're not search filters
})); if (key === 'limit' || key === 'page') continue;
// Handle different field types
if (key === 'name') {
filter.name = { $regex: value, $options: 'i' }; // Case-insensitive search
} else if (key === 'id' || key === '_id') {
if (mongoose.Types.ObjectId.isValid(value)) {
filter._id = value;
}
} else if (key === 'tags') {
filter.tags = { $in: [new RegExp(value, 'i')] };
} else if (key === 'state') {
filter['state.type'] = value;
} else if (key.includes('.')) {
// Handle nested fields like 'state.type', 'address.city', etc.
filter[key] = { $regex: value, $options: 'i' };
} else {
// For all other fields, do a case-insensitive search
filter[key] = { $regex: value, $options: 'i' };
}
}
return filter;
}; };
const formatFilamentsResponse = (filaments) => { const trimSpotlightObject = (object) => {
return filaments.map((filament) => ({ return {
id: filament.id, _id: object._id,
name: filament.name, name: object.name || undefined,
link: `/management/filaments/info?filamentId=${filament.id}`, state: object.state && object?.state.type? { type: object.state.type } : undefined,
filament: filament, tags: object.tags || undefined,
})); email: object.email || undefined,
}; color: object.color || undefined,
updatedAt: object.updatedAt || undefined,
const formatGCodeFilesResponse = (gcodeFiles) => { };
return gcodeFiles.map((gcodeFile) => ({ }
id: gcodeFile.id,
name: gcodeFile.name,
link: `/management/gcodefiles/info?gcodeFileId=${gcodeFile.id}`,
gcodeFile: gcodeFile,
}));
};
export const getSpotlightRouteHandler = async (req, res) => { export const getSpotlightRouteHandler = async (req, res) => {
try { try {
const query = req.params.query; const query = req.params.query;
if (query.length <= 4) { const queryParams = req.query;
if (query.length < 3) {
res.status(200).send([]); res.status(200).send([]);
return; return;
} }
@ -59,55 +101,69 @@ export const getSpotlightRouteHandler = async (req, res) => {
const suffix = query.substring(4); const suffix = query.substring(4);
if (delimiter == ":") { if (delimiter == ":") {
switch (prefix) { const prefixEntry = PREFIX_MODEL_MAP[prefix];
case "PRN": if (!prefixEntry || !prefixEntry.model) {
const printer = await printerModel.findOne({ id: suffix }); res.status(400).send({ error: "Invalid or unsupported prefix" });
if (!printer) { return;
res.status(404).send({ error: "Job not found" });
} else {
res.status(200).send(formatPrintersResponse([printer]));
}
break;
case "JOB":
const job = await printJobModel
.findOne({ _id: suffix })
.populate("gcodeFile", "name");
if (!job) {
res.status(404).send({ error: "Job not found" });
} else {
res.status(200).send(formatJobsResponse([job]));
}
break;
case "FIL":
const filament = await filamentModel.findOne({ _id: suffix });
if (!filament) {
res.status(404).send({ error: "Filament not found" });
} else {
res.status(200).send(formatFilamentsResponse([filament]));
}
break;
case "GCF":
const gcodeFile = await gcodeFileModel.findOne({ _id: suffix });
if (!gcodeFile) {
res.status(404).send({ error: "Filament not found" });
} else {
res.status(200).send(formatGCodeFilesResponse([gcodeFile]));
}
break;
case "SBJ":
const subJob = await printSubJobModel.findOne({ id: suffix });
if (!subJob) {
res.status(404).send({ error: "SubJob not found" });
} else {
res.status(200).send([subJob]);
}
break;
default:
res.status(400).send({ error: "Invalid prefix" });
} }
const { model, idField } = prefixEntry;
// Validate ObjectId if the idField is '_id'
if (idField === '_id' && !mongoose.Types.ObjectId.isValid(suffix)) {
res.status(404).send({ error: `${prefix} not found` });
return;
}
// Find the object by the correct field
const queryObj = {};
queryObj[idField] = suffix.toLowerCase();
let doc = await model.findOne(queryObj).lean();
if (!doc) {
res.status(404).send({ error: `${prefix} not found` });
return;
}
// Build the response with only the required fields
const response = trimSpotlightObject(doc)
res.status(200).send(response);
return;
}
console.log(queryParams)
if (Object.keys(queryParams).length > 0) {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
console.log(prefixEntry)
if (!prefixEntry || !prefixEntry.model) {
res.status(400).send({ error: "Invalid or unsupported prefix" });
return;
}
const { model } = prefixEntry;
// Use req.query for search parameters
if (Object.keys(queryParams).length === 0) {
res.status(400).send({ error: "No search parameters provided" });
return;
}
// Build search filter
const searchFilter = buildSearchFilter(queryParams);
// Perform search with limit
const limit = parseInt(req.query.limit) || 10;
const docs = await model.find(searchFilter)
.limit(limit)
.sort({ updatedAt: -1 })
.lean();
// Format response
const response = docs.map(doc => (trimSpotlightObject(doc)));
res.status(200).send(response);
return;
} }
} catch (error) { } catch (error) {
logger.error("Error listing print jobs:", error); logger.error("Error in spotlight lookup:", error);
res.status(500).send({ error: error }); res.status(500).send({ error: error });
} }
}; };

View File

@ -0,0 +1,174 @@
import dotenv from "dotenv";
import { stockAuditModel } from "../../schemas/stockaudit.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config();
const logger = log4js.getLogger("Stock Audits");
logger.level = process.env.LOG_LEVEL;
export const listStockAuditsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
try {
const skip = (page - 1) * limit;
let stockAudits;
let aggregateCommand = [];
// Lookup createdBy user
aggregateCommand.push({
$lookup: {
from: "users",
localField: "createdBy",
foreignField: "_id",
as: "createdBy",
},
});
aggregateCommand.push({ $unwind: "$createdBy" });
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
stockAudits = await stockAuditModel.aggregate(aggregateCommand);
logger.trace(
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
stockAudits,
);
res.send(stockAudits);
} catch (error) {
logger.error("Error listing stock audits:", error);
res.status(500).send({ error: error });
}
};
export const getStockAuditRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const stockAudit = await stockAuditModel
.findOne({
_id: id,
})
.populate("createdBy")
.populate("items.filamentStock")
.populate("items.partStock");
if (!stockAudit) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
}
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...stockAudit._doc, auditLogs: auditLogs});
} catch (error) {
logger.error("Error fetching stock audit:", error);
res.status(500).send({ error: error.message });
}
};
export const newStockAuditRouteHandler = async (req, res) => {
try {
const newStockAudit = {
type: req.body.type,
status: req.body.status || "pending",
notes: req.body.notes,
items: req.body.items.map(item => ({
type: item.type,
stock: item.type === "filament"
? new mongoose.Types.ObjectId(item.filamentStock)
: new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
notes: item.notes
})),
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
completedAt: req.body.status === "completed" ? new Date() : null
};
const result = await stockAuditModel.create(newStockAudit);
if (!result) {
logger.error("No stock audit created.");
return res.status(500).send({ error: "No stock audit created." });
}
return res.send({ status: "ok", id: result._id });
} catch (error) {
logger.error("Error adding stock audit:", error);
return res.status(500).send({ error: error.message });
}
};
export const updateStockAuditRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const updateData = {
...req.body,
items: req.body.items?.map(item => ({
type: item.type,
stock: item.type === "filament"
? new mongoose.Types.ObjectId(item.filamentStock)
: new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
notes: item.notes
})),
completedAt: req.body.status === "completed" ? new Date() : null
};
const result = await stockAuditModel.findByIdAndUpdate(
id,
{ $set: updateData },
{ new: true }
);
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
}
logger.trace(`Updated stock audit with ID: ${id}:`, result);
res.send(result);
} catch (error) {
logger.error("Error updating stock audit:", error);
res.status(500).send({ error: error.message });
}
};
export const deleteStockAuditRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const result = await stockAuditModel.findByIdAndDelete(id);
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
}
logger.trace(`Deleted stock audit with ID: ${id}`);
res.send({ status: "ok" });
} catch (error) {
logger.error("Error deleting stock audit:", error);
res.status(500).send({ error: error.message });
}
};

View File

@ -0,0 +1,139 @@
import dotenv from "dotenv";
import { stockEventModel } from "../../schemas/stockevent.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
dotenv.config();
const logger = log4js.getLogger("Stock Events");
logger.level = process.env.LOG_LEVEL;
export const listStockEventsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
sort = "",
order = "ascend"
) => {
try {
const skip = (page - 1) * limit;
let stockEvents;
let aggregateCommand = [];
// Lookup filamentStock
aggregateCommand.push({
$lookup: {
from: "filamentstocks",
localField: "filamentStock",
foreignField: "_id",
as: "filamentStock",
},
});
aggregateCommand.push({ $unwind: "$filamentStock" });
// Conditionally lookup subJob only if it exists
aggregateCommand.push({
$lookup: {
from: "subjobs",
localField: "subJob",
foreignField: "_id",
as: "subJob",
},
});
aggregateCommand.push({
$addFields: {
subJob: {
$cond: {
if: { $eq: [{ $size: "$subJob" }, 0] },
then: null,
else: { $arrayElemAt: ["$subJob", 0] }
}
}
}
});
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
// Add pagination after sorting
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log('Aggregation pipeline:', JSON.stringify(aggregateCommand, null, 2));
stockEvents = await stockEventModel.aggregate(aggregateCommand);
logger.trace(
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
stockEvents,
);
res.send(stockEvents);
} catch (error) {
logger.error("Error listing stock events:", error);
res.status(500).send({ error: error });
}
};
export const getStockEventRouteHandler = async (req, res) => {
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const stockEvent = await stockEventModel
.findOne({
_id: id,
})
.populate("filamentStock")
.populate("subJob")
.populate("job");
if (!stockEvent) {
logger.warn(`Stock event not found with supplied id.`);
return res.status(404).send({ error: "Stock event not found." });
}
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
res.send(stockEvent);
} catch (error) {
logger.error("Error fetching stock event:", error);
res.status(500).send({ error: error.message });
}
};
export const newStockEventRouteHandler = async (req, res) => {
try {
const newStockEvent = {
type: req.body.type,
value: req.body.value,
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
timestamp: new Date()
};
const result = await stockEventModel.create(newStockEvent);
if (!result) {
logger.error("No stock event created.");
return res.status(500).send({ error: "No stock event created." });
}
return res.send({ status: "ok", id: result._id });
} catch (error) {
logger.error("Error adding stock event:", error);
return res.status(500).send({ error: error.message });
}
};

139
src/services/users/index.js Normal file
View File

@ -0,0 +1,139 @@
import dotenv from "dotenv";
import { userModel } from "../../schemas/user.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config();
const logger = log4js.getLogger("Users");
logger.level = process.env.LOG_LEVEL;
export const listUsersRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
let user;
let aggregateCommand = [];
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
user = await userModel.aggregate(aggregateCommand);
logger.trace(
`List of users (Page ${page}, Limit ${limit}, Property ${property}):`,
user,
);
res.send(user);
} catch (error) {
logger.error("Error listing users:", error);
res.status(500).send({ error: error });
}
};
export const getUserRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the user with the given ID
const user = await userModel.findOne({
_id: id,
});
if (!user) {
logger.warn(`User not found with supplied id.`);
return res.status(404).send({ error: "User not found." });
}
logger.trace(`User with ID: ${id}:`, user);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...user._doc, auditLogs: auditLogs});
} catch (error) {
logger.error("Error fetching User:", error);
res.status(500).send({ error: error.message });
}
};
export const editUserRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the user with the given ID
const user = await userModel.findOne({ _id: id });
if (!user) {
// Error handling
logger.warn(`User not found with supplied id.`);
return res.status(404).send({ error: "User not found." });
}
logger.trace(`User with ID: ${id}:`, user);
try {
const updateData = {
updatedAt: new Date(),
username: req.body.username,
name: req.body.name,
firstName: req.body.firstName,
lastName: req.body.lastName,
email: req.body.email,
};
console.log(req.user)
// Create audit log before updating
await newAuditLog(
user.toObject(),
updateData,
id,
'User',
req.user._id,
'User'
);
const result = await userModel.updateOne(
{ _id: id },
{ $set: updateData },
);
if (result.nModified === 0) {
logger.error("No User updated.");
res.status(500).send({ error: "No users updated." });
}
} catch (updateError) {
logger.error("Error updating user:", updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching user:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};

View File

@ -3,6 +3,8 @@ import { vendorModel } from "../../schemas/vendor.schema.js";
import jwt from "jsonwebtoken"; import jwt from "jsonwebtoken";
import log4js from "log4js"; import log4js from "log4js";
import mongoose from "mongoose"; import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/auditlog.schema.js";
dotenv.config(); dotenv.config();
@ -69,7 +71,12 @@ export const getVendorRouteHandler = async (req, res) => {
} }
logger.trace(`Vendor with ID: ${id}:`, vendor); logger.trace(`Vendor with ID: ${id}:`, vendor);
res.send(vendor);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
res.send({...vendor._doc, auditLogs: auditLogs});
} catch (error) { } catch (error) {
logger.error("Error fetching Vendor:", error); logger.error("Error fetching Vendor:", error);
res.status(500).send({ error: error.message }); res.status(500).send({ error: error.message });
@ -102,6 +109,18 @@ export const editVendorRouteHandler = async (req, res) => {
email: req.body.email, email: req.body.email,
}; };
console.log(req.user)
// Create audit log before updating
await newAuditLog(
vendor.toObject(),
updateData,
id,
'Vendor',
req.user._id,
'User'
);
const result = await vendorModel.updateOne( const result = await vendorModel.updateOne(
{ _id: id }, { _id: id },
{ $set: updateData }, { $set: updateData },
@ -131,6 +150,17 @@ export const newVendorRouteHandler = async (req, res) => {
logger.error("No vendor created."); logger.error("No vendor created.");
res.status(500).send({ error: "No vendor created." }); res.status(500).send({ error: "No vendor created." });
} }
// Create audit log for new vendor
await newAuditLog(
{},
newVendor,
result._id,
'Vendor',
req.user.id, // Assuming user ID is available in req.user
'User'
);
res.status(200).send({ status: "ok" }); res.status(200).send({ status: "ok" });
} catch (updateError) { } catch (updateError) {
logger.error("Error updating vendor:", updateError); logger.error("Error updating vendor:", updateError);

View File

@ -1,8 +1,34 @@
function parseStringIfNumber(input) { import { ObjectId } from "mongodb"; // Only needed in Node.js with MongoDB driver
if (typeof input === "string" && !isNaN(input) && !isNaN(parseFloat(input))) {
return parseFloat(input); function parseFilter(property, value) {
if (typeof value === "string") {
const trimmed = value.trim();
// Handle booleans
if (trimmed.toLowerCase() === "true") return { [property]: true };
if (trimmed.toLowerCase() === "false") return { [property]: false };
// Handle ObjectId (24-char hex)
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
return { [property]: new ObjectId(trimmed) };
}
// Handle numbers
if (!isNaN(trimmed)) {
return { [property]: parseFloat(trimmed) };
}
// Default to case-insensitive regex for non-numeric strings
return {
[property]: {
$regex: trimmed,
$options: "i"
}
};
} }
return input;
// Handle actual booleans, numbers, objects, etc.
return { [property]: value };
} }
function convertToCamelCase(obj) { function convertToCamelCase(obj) {
@ -248,4 +274,49 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
return useCamelCase ? convertToCamelCase(configObject) : configObject; return useCamelCase ? convertToCamelCase(configObject) : configObject;
} }
export { parseStringIfNumber, convertToCamelCase, extractConfigBlock }; function getChangedValues(oldObj, newObj) {
const changes = {};
// Check all keys in the new object
for (const key in newObj) {
// Skip if the key is _id or timestamps
if (key === '_id' || key === 'createdAt' || key === 'updatedAt') continue;
// If the old value is different from the new value, include it
if (JSON.stringify(oldObj[key]) !== JSON.stringify(newObj[key])) {
changes[key] = newObj[key];
}
}
return changes;
}
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
const { auditLogModel } = await import('../schemas/auditlog.schema.js');
// Get only the changed values
const changedValues = getChangedValues(oldValue, newValue);
// If no values changed, don't create an audit log
if (Object.keys(changedValues).length === 0) {
return;
}
const auditLog = new auditLogModel({
oldValue,
newValue: changedValues,
target: targetId,
targetModel,
owner: ownerId,
ownerModel,
});
await auditLog.save();
}
export {
parseFilter,
convertToCamelCase,
extractConfigBlock,
newAuditLog
};