Refactor routes and schemas: replaced print job references with job, added user routes, and implemented new audit logging functionality. Updated filtering methods across various services to enhance query capabilities.
This commit is contained in:
parent
5a5701088a
commit
11d80fb76e
20
src/index.js
20
src/index.js
@ -5,10 +5,10 @@ import dotenv from "dotenv";
|
||||
import { expressSession, keycloak } from "./keycloak.js";
|
||||
import { dbConnect } from "./mongo/index.js";
|
||||
import {
|
||||
apiRoutes,
|
||||
authRoutes,
|
||||
userRoutes,
|
||||
printerRoutes,
|
||||
printJobRoutes,
|
||||
jobRoutes,
|
||||
gcodeFileRoutes,
|
||||
filamentRoutes,
|
||||
spotlightRoutes,
|
||||
@ -18,12 +18,18 @@ import {
|
||||
materialRoutes,
|
||||
partStockRoutes,
|
||||
filamentStockRoutes,
|
||||
stockAuditRoutes,
|
||||
stockEventRoutes,
|
||||
auditLogRoutes,
|
||||
noteTypeRoutes,
|
||||
noteRoutes
|
||||
} from "./routes/index.js";
|
||||
import path from "path";
|
||||
import * as fs from "fs";
|
||||
import cron from "node-cron";
|
||||
import ReseedAction from "./mongo/ReseedAction.js";
|
||||
import log4js from "log4js";
|
||||
import { populateUserMiddleware } from "./services/auth/index.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -56,6 +62,7 @@ app.use(
|
||||
app.use(express.json());
|
||||
app.use(expressSession);
|
||||
app.use(keycloak.middleware());
|
||||
app.use(populateUserMiddleware);
|
||||
|
||||
app.get("/", function (req, res) {
|
||||
const __dirname = fs.realpathSync(".");
|
||||
@ -63,10 +70,10 @@ app.get("/", function (req, res) {
|
||||
});
|
||||
|
||||
app.use("/auth", authRoutes);
|
||||
app.use("/overview", apiRoutes);
|
||||
app.use("/users", userRoutes)
|
||||
app.use("/spotlight", spotlightRoutes);
|
||||
app.use("/printers", printerRoutes);
|
||||
app.use("/printjobs", printJobRoutes);
|
||||
app.use("/jobs", jobRoutes);
|
||||
app.use("/gcodefiles", gcodeFileRoutes);
|
||||
app.use("/filaments", filamentRoutes);
|
||||
app.use("/parts", partRoutes);
|
||||
@ -75,6 +82,11 @@ app.use("/vendors", vendorRoutes);
|
||||
app.use("/materials", materialRoutes);
|
||||
app.use("/partstocks", partStockRoutes);
|
||||
app.use("/filamentstocks", filamentStockRoutes);
|
||||
app.use("/stockevents", stockEventRoutes);
|
||||
app.use("/stockaudits", stockAuditRoutes);
|
||||
app.use("/auditlogs", auditLogRoutes);
|
||||
app.use("/notetypes", noteTypeRoutes);
|
||||
app.use("/notes", noteRoutes)
|
||||
|
||||
if (process.env.SCHEDULE_HOUR) {
|
||||
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
||||
|
||||
@ -7,6 +7,9 @@ logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
dotenv.config();
|
||||
|
||||
// Set strictQuery to false to prepare for Mongoose 7
|
||||
mongoose.set('strictQuery', false);
|
||||
|
||||
function dbConnect() {
|
||||
mongoose.connection.once("open", () => logger.info("Database connected."));
|
||||
return mongoose.connect(
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import mongoose from "mongoose";
|
||||
import { userModel } from "../schemas/user.schema.js";
|
||||
import { printJobModel } from "../schemas/printjob.schema.js";
|
||||
import { jobModel } from "../schemas/job.schema.js";
|
||||
import { dbConnect } from "../mongo/index.js";
|
||||
|
||||
async function seedDB() {
|
||||
@ -21,7 +21,7 @@ async function seedDB() {
|
||||
const admin = new userModel(user);
|
||||
await admin.save();
|
||||
|
||||
const printJob = {
|
||||
const job = {
|
||||
_id: new mongoose.Types.ObjectId(1),
|
||||
status : {
|
||||
type: "Queued"
|
||||
@ -31,8 +31,8 @@ async function seedDB() {
|
||||
started_at: new Date(),
|
||||
};
|
||||
|
||||
const newPrintJob = new printJobModel(printJob);
|
||||
await newPrintJob.save();
|
||||
const newJob = new jobModel(job);
|
||||
await newJob.save();
|
||||
|
||||
console.log("DB seeded");
|
||||
}
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
import express from "express";
|
||||
|
||||
import { keycloak, isAuthenticated } from "../../keycloak.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
getProfileRouteHandler,
|
||||
patchProfileRouteHandler,
|
||||
getDashboardRouteHandler,
|
||||
} from "../../services/api/index.js";
|
||||
|
||||
// get main dashboard info profile
|
||||
router.get("/", keycloak.protect(), (req, res) => {
|
||||
getDashboardRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// get user's profile
|
||||
router.get("/user", isAuthenticated, (req, res) => {
|
||||
getProfileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update user's profile
|
||||
router.patch("/", isAuthenticated, async (req, res) => {
|
||||
patchProfileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
29
src/routes/auditlogs/index.js
Normal file
29
src/routes/auditlogs/index.js
Normal file
@ -0,0 +1,29 @@
|
||||
import express from 'express';
|
||||
import { listAuditLogsRouteHandler, getAuditLogRouteHandler } from '../../services/auditlogs/index.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
/**
|
||||
* @route GET /api/auditlogs
|
||||
* @desc Get all audit logs with pagination and filtering
|
||||
* @access Private
|
||||
*/
|
||||
router.get('/', async (req, res) => {
|
||||
const page = parseInt(req.query.page) || 1;
|
||||
const limit = parseInt(req.query.limit) || 25;
|
||||
const property = req.query.property || "";
|
||||
const filter = req.query.filter ? JSON.parse(req.query.filter) : {};
|
||||
|
||||
await listAuditLogsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
/**
|
||||
* @route GET /api/auditlogs/:id
|
||||
* @desc Get a single audit log by ID
|
||||
* @access Private
|
||||
*/
|
||||
router.get('/:id', async (req, res) => {
|
||||
await getAuditLogRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -16,12 +16,12 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["type", "vendor.name", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
filter = {...filter, ...parseFilter(key, value)};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -12,21 +12,22 @@ import {
|
||||
|
||||
// list of filamentStocks
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ["country"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listFilamentStocksRouteHandler(req, res, page, limit, property, filter);
|
||||
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -24,12 +24,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
"filament.color",
|
||||
];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
import userRoutes from "./users/index.js";
|
||||
import apiRoutes from "./api/index.js";
|
||||
import authRoutes from "./auth/index.js";
|
||||
import printerRoutes from "./printers/index.js";
|
||||
import printJobRoutes from "./printjobs/index.js";
|
||||
import jobRoutes from "./jobs/index.js";
|
||||
import gcodeFileRoutes from "./gcodefiles/index.js";
|
||||
import filamentRoutes from "./filaments/index.js";
|
||||
import spotlightRoutes from "./spotlight/index.js";
|
||||
@ -12,13 +11,17 @@ import vendorRoutes from "./vendors/index.js";
|
||||
import materialRoutes from "./materials/index.js";
|
||||
import partStockRoutes from "./partstocks/index.js";
|
||||
import filamentStockRoutes from "./filamentstocks/index.js";
|
||||
import stockEventRoutes from "./stockevents/index.js";
|
||||
import stockAuditRoutes from "./stockaudits/index.js";
|
||||
import auditLogRoutes from "./auditlogs/index.js";
|
||||
import noteTypeRoutes from "./notetypes/index.js";
|
||||
import noteRoutes from "./notes/index.js"
|
||||
|
||||
export {
|
||||
userRoutes,
|
||||
apiRoutes,
|
||||
authRoutes,
|
||||
printerRoutes,
|
||||
printJobRoutes,
|
||||
jobRoutes,
|
||||
gcodeFileRoutes,
|
||||
filamentRoutes,
|
||||
spotlightRoutes,
|
||||
@ -28,4 +31,9 @@ export {
|
||||
materialRoutes,
|
||||
partStockRoutes,
|
||||
filamentStockRoutes,
|
||||
stockEventRoutes,
|
||||
stockAuditRoutes,
|
||||
auditLogRoutes,
|
||||
noteTypeRoutes,
|
||||
noteRoutes
|
||||
};
|
||||
|
||||
@ -3,36 +3,36 @@ import { isAuthenticated } from "../../keycloak.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listPrintJobsRouteHandler,
|
||||
getPrintJobRouteHandler,
|
||||
editPrintJobRouteHandler,
|
||||
createPrintJobRouteHandler,
|
||||
getPrintJobStatsRouteHandler
|
||||
} from "../../services/printjobs/index.js";
|
||||
listJobsRouteHandler,
|
||||
getJobRouteHandler,
|
||||
editJobRouteHandler,
|
||||
createJobRouteHandler,
|
||||
getJobStatsRouteHandler
|
||||
} from "../../services/jobs/index.js";
|
||||
|
||||
// list of print jobs
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit } = req.body;
|
||||
listPrintJobsRouteHandler(req, res, page, limit);
|
||||
listJobsRouteHandler(req, res, page, limit);
|
||||
});
|
||||
|
||||
// get printer stats
|
||||
router.get("/stats", isAuthenticated, (req, res) => {
|
||||
getPrintJobStatsRouteHandler(req, res);
|
||||
getJobStatsRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// create new print job
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
createPrintJobRouteHandler(req, res);
|
||||
createJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getPrintJobRouteHandler(req, res);
|
||||
getJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update job info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editPrintJobRouteHandler(req, res);
|
||||
editJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
63
src/routes/notes/index.js
Normal file
63
src/routes/notes/index.js
Normal file
@ -0,0 +1,63 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import {
|
||||
listNotesRouteHandler,
|
||||
getNoteRouteHandler,
|
||||
editNoteRouteHandler,
|
||||
newNoteRouteHandler,
|
||||
deleteNoteRouteHandler
|
||||
} from "../../services/notes/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// List notes
|
||||
router.get("/", isAuthenticated, async (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ["parent", "user._id"];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const filterObject = parseFilter(key, value);
|
||||
filter = {...filter, ...filterObject}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listNotesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
||||
}
|
||||
);
|
||||
|
||||
// Get single note
|
||||
router.get(
|
||||
"/:id",
|
||||
isAuthenticated,
|
||||
getNoteRouteHandler
|
||||
);
|
||||
|
||||
// Edit note
|
||||
router.put(
|
||||
"/:id",
|
||||
isAuthenticated,
|
||||
editNoteRouteHandler
|
||||
);
|
||||
|
||||
// Delete note
|
||||
router.delete(
|
||||
"/:id",
|
||||
isAuthenticated,
|
||||
deleteNoteRouteHandler
|
||||
);
|
||||
|
||||
// Create new note
|
||||
router.post(
|
||||
"/",
|
||||
isAuthenticated,
|
||||
newNoteRouteHandler
|
||||
);
|
||||
|
||||
export default router;
|
||||
55
src/routes/notetypes/index.js
Normal file
55
src/routes/notetypes/index.js
Normal file
@ -0,0 +1,55 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import {
|
||||
listNoteTypesRouteHandler,
|
||||
getNoteTypeRouteHandler,
|
||||
editNoteTypeRouteHandler,
|
||||
newNoteTypeRouteHandler,
|
||||
} from "../../services/notetypes/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
// List note types
|
||||
router.get("/", isAuthenticated, async (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ["name", "active"];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listNoteTypesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
||||
}
|
||||
);
|
||||
|
||||
// Get single note type
|
||||
router.get(
|
||||
"/:id",
|
||||
isAuthenticated,
|
||||
getNoteTypeRouteHandler
|
||||
);
|
||||
|
||||
// Edit note type
|
||||
router.put(
|
||||
"/:id",
|
||||
isAuthenticated,
|
||||
editNoteTypeRouteHandler
|
||||
);
|
||||
|
||||
// Create new note type
|
||||
router.post(
|
||||
"/",
|
||||
isAuthenticated,
|
||||
newNoteTypeRouteHandler
|
||||
);
|
||||
|
||||
export default router;
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -18,12 +18,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["products", "name"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = { $regex: parseStringIfNumber(value), $options: 'i' }
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["country"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
54
src/routes/stockaudits/index.js
Normal file
54
src/routes/stockaudits/index.js
Normal file
@ -0,0 +1,54 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listStockAuditsRouteHandler,
|
||||
getStockAuditRouteHandler,
|
||||
newStockAuditRouteHandler,
|
||||
updateStockAuditRouteHandler,
|
||||
deleteStockAuditRouteHandler,
|
||||
} from "../../services/stockaudits/index.js";
|
||||
|
||||
// List stock audits
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["status", "type", "createdBy"];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listStockAuditsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
// Create new stock audit
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newStockAuditRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// Get specific stock audit
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getStockAuditRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// Update stock audit
|
||||
router.put("/:id", isAuthenticated, (req, res) => {
|
||||
updateStockAuditRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// Delete stock audit
|
||||
router.delete("/:id", isAuthenticated, (req, res) => {
|
||||
deleteStockAuditRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
41
src/routes/stockevents/index.js
Normal file
41
src/routes/stockevents/index.js
Normal file
@ -0,0 +1,41 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listStockEventsRouteHandler,
|
||||
getStockEventRouteHandler,
|
||||
newStockEventRouteHandler,
|
||||
} from "../../services/stockevents/index.js";
|
||||
|
||||
// List stock events
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "filamentStock"];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
listStockEventsRouteHandler(req, res, page, limit, property, filter, sort, order);
|
||||
});
|
||||
|
||||
// Create new stock event
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// Get specific stock event
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,22 +1,41 @@
|
||||
import express from 'express';
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listUsersRouteHandler,
|
||||
getUserRouteHandler,
|
||||
editUserRouteHandler,
|
||||
} from "../../services/users/index.js";
|
||||
|
||||
router.get('/', (req, res) => {
|
||||
res.send({
|
||||
data: [
|
||||
{
|
||||
id: 1,
|
||||
firstName: 'John',
|
||||
lastName: 'Smith',
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
firstName: 'Stacey',
|
||||
lastName: 'Smith',
|
||||
},
|
||||
],
|
||||
});
|
||||
// list of users
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["username", "name", "firstName", "lastName"];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listUsersRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getUserRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update user info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editUserRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
7
src/routes/vendors/index.js
vendored
7
src/routes/vendors/index.js
vendored
@ -1,6 +1,6 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
import { parseFilter } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -16,12 +16,13 @@ router.get("/", isAuthenticated, (req, res) => {
|
||||
|
||||
const allowedFilters = ["country"];
|
||||
|
||||
const filter = {};
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
const parsedFilter = parseFilter(key, value)
|
||||
filter = {...filter, ...parsedFilter};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
41
src/schemas/auditlog.schema.js
Normal file
41
src/schemas/auditlog.schema.js
Normal file
@ -0,0 +1,41 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const auditLogSchema = new Schema(
|
||||
{
|
||||
oldValue: { type: Object, required: true },
|
||||
newValue: { type: Object, required: true },
|
||||
target: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'targetModel',
|
||||
required: true
|
||||
},
|
||||
targetModel: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['Printer', 'Job', 'SubJob', 'FilamentStock', 'StockEvent', 'Vendor', 'Part', 'Product', 'Material', 'Filament', 'GCodeFile', 'NoteType', 'Note', 'User'] // Add other models as needed
|
||||
},
|
||||
owner: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'ownerModel',
|
||||
required: true
|
||||
},
|
||||
ownerModel: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['User', 'Printer']
|
||||
}
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
auditLogSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
auditLogSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
// Create and export the model
|
||||
export const auditLogModel = mongoose.model("AuditLog", auditLogSchema);
|
||||
@ -13,13 +13,7 @@ const filamentStockSchema = new Schema(
|
||||
currentGrossWeight: { type: Number, required: true },
|
||||
currentNetWeight: { type: Number, required: true },
|
||||
filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" },
|
||||
stockEvents: [{
|
||||
type: { type: String, required: true },
|
||||
value: { type: Number, required: true },
|
||||
subJob: { type: mongoose.Schema.Types.ObjectId, ref: "PrintSubJob", required: false },
|
||||
job: { type: mongoose.Schema.Types.ObjectId, ref: "PrintJob", required: false },
|
||||
timestamp: { type: Date, default: Date.now }
|
||||
}]
|
||||
stockEvents: [{ type: mongoose.Schema.Types.ObjectId, ref: "StockEvent" }]
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const printJobSchema = new mongoose.Schema({
|
||||
const jobSchema = new mongoose.Schema({
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
},
|
||||
@ -9,6 +9,7 @@ const printJobSchema = new mongoose.Schema({
|
||||
createdAt: { required: true, type: Date },
|
||||
updatedAt: { required: true, type: Date },
|
||||
startedAt: { required: false, type: Date },
|
||||
finishedAt: { required: false, type: Date },
|
||||
gcodeFile: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "GCodeFile",
|
||||
@ -21,14 +22,17 @@ const printJobSchema = new mongoose.Schema({
|
||||
min: 1,
|
||||
},
|
||||
subJobs: [
|
||||
{ type: Schema.Types.ObjectId, ref: "PrintSubJob", required: false },
|
||||
{ type: Schema.Types.ObjectId, ref: "SubJob", required: false },
|
||||
],
|
||||
notes: [
|
||||
{ type: Schema.Types.ObjectId, ref: "Note", required: false }
|
||||
],
|
||||
});
|
||||
|
||||
printJobSchema.virtual("id").get(function () {
|
||||
jobSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
printJobSchema.set("toJSON", { virtuals: true });
|
||||
jobSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const printJobModel = mongoose.model("PrintJob", printJobSchema);
|
||||
export const jobModel = mongoose.model("Job", jobSchema);
|
||||
41
src/schemas/note.schema.js
Normal file
41
src/schemas/note.schema.js
Normal file
@ -0,0 +1,41 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const noteSchema = new mongoose.Schema({
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true,
|
||||
},
|
||||
content: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
noteType: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "NoteType",
|
||||
required: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
default: Date.now,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Date,
|
||||
required: true,
|
||||
default: Date.now,
|
||||
},
|
||||
user: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "User",
|
||||
required: false,
|
||||
}
|
||||
});
|
||||
|
||||
noteSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
noteSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const noteModel = mongoose.model("Note", noteSchema);
|
||||
32
src/schemas/notetype.schema.js
Normal file
32
src/schemas/notetype.schema.js
Normal file
@ -0,0 +1,32 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const noteTypeSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
color: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
active: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: true,
|
||||
}
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
noteTypeSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
noteTypeSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const noteTypeModel = mongoose.model("NoteType", noteTypeSchema);
|
||||
@ -6,7 +6,9 @@ const partStockSchema = new Schema(
|
||||
{
|
||||
name: { type: String, required: true },
|
||||
fileName: { type: String, required: false },
|
||||
product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" },
|
||||
part: { type: mongoose.Schema.Types.ObjectId, ref: "Part" },
|
||||
startingQuantity: { type: Number, required: true },
|
||||
currentQuantity: { type: Number, required: true },
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
@ -39,10 +39,10 @@ const printerSchema = new Schema(
|
||||
moonraker: { type: moonrakerSchema, required: true },
|
||||
tags: [{ type: String }],
|
||||
firmware: { type: String },
|
||||
currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" },
|
||||
currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" },
|
||||
currentJob: { type: Schema.Types.ObjectId, ref: "Job" },
|
||||
currentSubJob: { type: Schema.Types.ObjectId, ref: "SubJob" },
|
||||
currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" },
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }],
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: "SubJob" }],
|
||||
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null },
|
||||
alerts: [alertSchema],
|
||||
},
|
||||
|
||||
38
src/schemas/stockaudit.schema.js
Normal file
38
src/schemas/stockaudit.schema.js
Normal file
@ -0,0 +1,38 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const stockAuditItemSchema = new Schema({
|
||||
type: { type: String, enum: ["filament", "part"], required: true },
|
||||
stock: { type: Schema.Types.ObjectId, required: true },
|
||||
expectedQuantity: { type: Number, required: true },
|
||||
actualQuantity: { type: Number, required: true },
|
||||
notes: { type: String }
|
||||
});
|
||||
|
||||
const stockAuditSchema = new Schema(
|
||||
{
|
||||
type: { type: String, required: true },
|
||||
status: {
|
||||
type: String,
|
||||
enum: ["pending", "in_progress", "completed", "cancelled"],
|
||||
default: "pending",
|
||||
required: true
|
||||
},
|
||||
notes: { type: String },
|
||||
items: [stockAuditItemSchema],
|
||||
createdBy: { type: Schema.Types.ObjectId, ref: "User", required: true },
|
||||
completedAt: { type: Date }
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
stockAuditSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
stockAuditSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
// Create and export the model
|
||||
export const stockAuditModel = mongoose.model("StockAudit", stockAuditSchema);
|
||||
26
src/schemas/stockevent.schema.js
Normal file
26
src/schemas/stockevent.schema.js
Normal file
@ -0,0 +1,26 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const stockEventSchema = new Schema(
|
||||
{
|
||||
type: { type: String, required: true },
|
||||
value: { type: Number, required: true },
|
||||
unit: { type: String, required: true},
|
||||
subJob: { type: Schema.Types.ObjectId, ref: "SubJob", required: false },
|
||||
job: { type: Schema.Types.ObjectId, ref: "Job", required: false },
|
||||
filamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock", required: true },
|
||||
timestamp: { type: Date, default: Date.now }
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
stockEventSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
stockEventSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
// Create and export the model
|
||||
export const stockEventModel = mongoose.model("StockEvent", stockEventSchema);
|
||||
@ -1,15 +1,15 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const printSubJobSchema = new mongoose.Schema({
|
||||
const subJobSchema = new mongoose.Schema({
|
||||
printer: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Printer",
|
||||
required: true,
|
||||
},
|
||||
printJob: {
|
||||
job: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "PrintJob",
|
||||
ref: "Job",
|
||||
required: true,
|
||||
},
|
||||
subJobId: {
|
||||
@ -37,15 +37,17 @@ const printSubJobSchema = new mongoose.Schema({
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
},
|
||||
startedAt: { required: false, type: Date },
|
||||
finishedAt: { required: false, type: Date },
|
||||
});
|
||||
|
||||
printSubJobSchema.virtual("id").get(function () {
|
||||
subJobSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
printSubJobSchema.set("toJSON", { virtuals: true });
|
||||
subJobSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const printSubJobModel = mongoose.model(
|
||||
"PrintSubJob",
|
||||
printSubJobSchema,
|
||||
export const subJobModel = mongoose.model(
|
||||
"SubJob",
|
||||
subJobSchema,
|
||||
);
|
||||
@ -1,25 +1,12 @@
|
||||
import { Binary } from "mongodb";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const userSchema = new mongoose.Schema({
|
||||
name: { required: true, type: String },
|
||||
username: { required: true, type: String},
|
||||
name: { required: true, type: String},
|
||||
firstName: { required: false, type: String },
|
||||
lastName: { required: false, type: String },
|
||||
email: { required: true, type: String },
|
||||
emailVerifiedAt: { type: Date },
|
||||
password: { required: true, type: String },
|
||||
webAuthnCredentials: [
|
||||
{
|
||||
id: String,
|
||||
publicKey: Buffer,
|
||||
counter: Number,
|
||||
deviceType: String,
|
||||
backedUp: Boolean,
|
||||
transports: [String],
|
||||
},
|
||||
],
|
||||
profileImage: { type: String },
|
||||
createdAt: { type: Date },
|
||||
updatedAt: { type: Date },
|
||||
});
|
||||
}, { timestamps: true },);
|
||||
|
||||
userSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
|
||||
@ -1,92 +0,0 @@
|
||||
import bcrypt from "bcrypt";
|
||||
import dotenv from 'dotenv';
|
||||
import { userModel } from "../../schemas/user.schema.js";
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export const getDashboardRouteHandler = (req, res) => {
|
||||
const sentData = {
|
||||
data: {}
|
||||
}
|
||||
res.send(sentData);
|
||||
}
|
||||
|
||||
export const getProfileRouteHandler = (req, res) => {
|
||||
const meUser = req.user;
|
||||
|
||||
const stringId = req.user.id;
|
||||
const decId = stringId.substring(4, 8);
|
||||
const intId = parseInt(decId, 16);
|
||||
|
||||
const sentData = {
|
||||
data: {
|
||||
type: 'users',
|
||||
id: intId === 1 ? intId : meUser.id,
|
||||
attributes: {
|
||||
name: meUser.name,
|
||||
email: meUser.email,
|
||||
profile_image: null,
|
||||
createdAt: meUser.createdAt,
|
||||
updateAt: meUser.updateAt
|
||||
},
|
||||
links: {
|
||||
self: `${process.env.APP_URL_API}/users/${meUser.id}`
|
||||
}
|
||||
}
|
||||
}
|
||||
res.send(sentData);
|
||||
}
|
||||
|
||||
export const patchProfileRouteHandler = async (req, res) => {
|
||||
const currentDataOfUser = req.user;
|
||||
const { name, email, newPassword, confirmPassword } = req.body.data.attributes;
|
||||
const foundUser = await userModel.findOne({ email: currentDataOfUser.email});
|
||||
|
||||
if (!foundUser) {
|
||||
res.status(400).json({error: 'No user matches the credentials'});
|
||||
} else {
|
||||
// check password more than 8 characters, new password matched the password confirmation
|
||||
if (newPassword && newPassword < 7 || newPassword != confirmPassword) {
|
||||
res.status(400).json({errors: { password: ["The password should have at lest 8 characters and match the password confirmation."] }});
|
||||
} else if (newPassword && newPassword > 7 && newPassword == confirmPassword) {
|
||||
const salt = await bcrypt.genSalt(10);
|
||||
const hashPassword = await bcrypt.hash(newPassword, salt);
|
||||
try{
|
||||
await userModel.updateOne( { email: foundUser.email }, { $set :{ "name": name, "email": email, "password": hashPassword } });
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
}
|
||||
const sentData = {
|
||||
data: {
|
||||
type: 'users',
|
||||
id: foundUser.id,
|
||||
attributes: {
|
||||
name: name,
|
||||
email: email,
|
||||
profile_image: null,
|
||||
}
|
||||
}
|
||||
}
|
||||
res.send(sentData);
|
||||
} else if (!newPassword) {
|
||||
try {
|
||||
await userModel.updateOne( { email: foundUser.email }, { $set :{ "name": name, "email": email } });
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
}
|
||||
const sentData = {
|
||||
data: {
|
||||
type: 'users',
|
||||
id: foundUser.id,
|
||||
attributes: {
|
||||
name: name,
|
||||
email: email,
|
||||
profile_image: null,
|
||||
}
|
||||
}
|
||||
}
|
||||
res.send(sentData);
|
||||
}
|
||||
}
|
||||
}
|
||||
62
src/services/auditlogs/index.js
Normal file
62
src/services/auditlogs/index.js
Normal file
@ -0,0 +1,62 @@
|
||||
import dotenv from "dotenv";
|
||||
import { auditLogModel } from '../../schemas/auditlog.schema.js';
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("AuditLogs");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listAuditLogsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
// Use find with population
|
||||
const auditLogs = await auditLogModel
|
||||
.find(filter)
|
||||
.skip(skip)
|
||||
.limit(Number(limit))
|
||||
.sort({ createdAt: -1 })
|
||||
.populate('owner', 'name _id')
|
||||
|
||||
logger.trace(
|
||||
`List of audit logs (Page ${page}, Limit ${limit}):`,
|
||||
auditLogs,
|
||||
);
|
||||
res.send(auditLogs);
|
||||
} catch (error) {
|
||||
logger.error("Error listing audit logs:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getAuditLogRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the audit log with the given ID
|
||||
const auditLog = await auditLogModel.findOne({
|
||||
_id: id,
|
||||
}).populate('printer').populate('owner').populate('target');
|
||||
|
||||
if (!auditLog) {
|
||||
logger.warn(`Audit log not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Audit log not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Audit log with ID: ${id}:`, auditLog);
|
||||
res.send(auditLog);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching audit log:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
@ -2,6 +2,7 @@ import dotenv from "dotenv";
|
||||
import { keycloak } from "../../keycloak.js";
|
||||
import log4js from "log4js";
|
||||
import axios from "axios";
|
||||
import { userModel } from "../../schemas/user.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -27,6 +28,49 @@ export const loginRouteHandler = (req, res) => {
|
||||
);
|
||||
};
|
||||
|
||||
// Function to fetch user from Keycloak and store in database and session
|
||||
const fetchAndStoreUser = async (req, token) => {
|
||||
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
|
||||
|
||||
try {
|
||||
const response = await axios.post(
|
||||
userInfoUrl,
|
||||
new URLSearchParams({
|
||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${token.access_token}`,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const userInfo = {
|
||||
access_token: token.access_token,
|
||||
expires_at: token.expires_at,
|
||||
roles: token.realm_access?.roles || [],
|
||||
username: response.data.preferred_username,
|
||||
email: response.data.email,
|
||||
name: response.data.name,
|
||||
firstName: response.data.given_name,
|
||||
lastName: response.data.family_name,
|
||||
};
|
||||
|
||||
// Create or update user in database
|
||||
const user = await createOrUpdateUser(userInfo);
|
||||
const fullUserInfo = { ...userInfo, _id: user._id };
|
||||
|
||||
// Store user info in session
|
||||
req.session.user = fullUserInfo;
|
||||
|
||||
return fullUserInfo;
|
||||
} catch (error) {
|
||||
logger.error("Error fetching and storing user:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Login callback handler
|
||||
export const loginCallbackRouteHandler = (req, res) => {
|
||||
// Don't use keycloak.protect() here as it expects an already authenticated session
|
||||
@ -60,20 +104,30 @@ export const loginCallbackRouteHandler = (req, res) => {
|
||||
},
|
||||
},
|
||||
)
|
||||
.then((response) => {
|
||||
.then(async (response) => {
|
||||
// Store tokens in session
|
||||
req.session["keycloak-token"] = {
|
||||
const tokenData = {
|
||||
access_token: response.data.access_token,
|
||||
refresh_token: response.data.refresh_token,
|
||||
id_token: response.data.id_token,
|
||||
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
||||
};
|
||||
req.session["keycloak-token"] = tokenData;
|
||||
|
||||
try {
|
||||
// Fetch and store user data
|
||||
await fetchAndStoreUser(req, tokenData);
|
||||
|
||||
// Save session and redirect to the original URL
|
||||
req.session.save(() => {
|
||||
res.redirect(
|
||||
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
|
||||
);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Error during user setup:", error);
|
||||
res.status(500).send("Error setting up user session");
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error(
|
||||
@ -84,51 +138,65 @@ export const loginCallbackRouteHandler = (req, res) => {
|
||||
});
|
||||
};
|
||||
|
||||
export const userRouteHandler = (req, res) => {
|
||||
if (req.session && req.session["keycloak-token"]) {
|
||||
const token = req.session["keycloak-token"];
|
||||
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
|
||||
// User is authenticated
|
||||
// Extract user info from the token
|
||||
//
|
||||
logger.info("Fetching user from keycloak...");
|
||||
axios
|
||||
.post(
|
||||
userInfoUrl,
|
||||
new URLSearchParams({
|
||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${token.access_token}`,
|
||||
},
|
||||
},
|
||||
)
|
||||
.then((response) => {
|
||||
const userInfo = {
|
||||
// Extract user details from token
|
||||
// This depends on your token structure
|
||||
access_token: token.access_token,
|
||||
expires_at: token.expires_at,
|
||||
roles: token.realm_access?.roles || [],
|
||||
username: response.data.preferred_username,
|
||||
email: response.data.email,
|
||||
name: response.data.name,
|
||||
firstName: response.data.given_name,
|
||||
lastName: response.data.family_name,
|
||||
// Function to create or update user
|
||||
const createOrUpdateUser = async (userInfo) => {
|
||||
try {
|
||||
const { username, email, name, firstName, lastName } = userInfo;
|
||||
|
||||
// Find existing user by username
|
||||
const existingUser = await userModel.findOne({ username });
|
||||
|
||||
if (existingUser) {
|
||||
// Check if any values have changed
|
||||
const hasChanges =
|
||||
existingUser.email !== email ||
|
||||
existingUser.name !== name ||
|
||||
existingUser.firstName !== firstName ||
|
||||
existingUser.lastName !== lastName;
|
||||
|
||||
if (hasChanges) {
|
||||
// Update existing user only if there are changes
|
||||
const updateData = {
|
||||
email,
|
||||
name,
|
||||
firstName,
|
||||
lastName,
|
||||
updatedAt: new Date()
|
||||
};
|
||||
res.json(userInfo);
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error(
|
||||
"Token exchange error:",
|
||||
error.response?.data || error.message,
|
||||
|
||||
await userModel.updateOne(
|
||||
{ username },
|
||||
{ $set: updateData }
|
||||
);
|
||||
res.status(500).send("Authentication failed");
|
||||
});
|
||||
|
||||
// Fetch the updated user to return
|
||||
return await userModel.findOne({ username });
|
||||
}
|
||||
|
||||
return existingUser;
|
||||
} else {
|
||||
// Create new user
|
||||
const newUser = new userModel({
|
||||
username,
|
||||
email,
|
||||
name,
|
||||
firstName,
|
||||
lastName
|
||||
});
|
||||
|
||||
await newUser.save();
|
||||
return newUser;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Error creating/updating user:", error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
export const userRouteHandler = (req, res) => {
|
||||
if (req.session && req.session.user) {
|
||||
res.json(req.session.user);
|
||||
} else {
|
||||
// User is not authenticated
|
||||
res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
};
|
||||
@ -270,6 +338,16 @@ export const refreshTokenRouteHandler = (req, res) => {
|
||||
});
|
||||
};
|
||||
|
||||
// Middleware to populate req.user from session
|
||||
export const populateUserMiddleware = (req, res, next) => {
|
||||
if (req.session && req.session.user) {
|
||||
req.user = req.session.user;
|
||||
} else {
|
||||
req.user = null;
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
// Example of how to set up your routes in Express
|
||||
/*
|
||||
import express from "express";
|
||||
|
||||
@ -3,6 +3,8 @@ import { filamentModel } from "../../schemas/filament.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -82,7 +84,12 @@ export const getFilamentRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||
res.send(filament);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...filament._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Filament:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -112,7 +119,7 @@ export const editFilamentRouteHandler = async (req, res) => {
|
||||
url: req.body.url,
|
||||
image: req.body.image,
|
||||
color: req.body.color,
|
||||
vendor: req.body.vendor.id,
|
||||
vendor: req.body.vendor._id,
|
||||
type: req.body.type,
|
||||
price: req.body.price,
|
||||
diameter: req.body.diameter,
|
||||
@ -120,6 +127,16 @@ export const editFilamentRouteHandler = async (req, res) => {
|
||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
filament.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Filament',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await filamentModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
@ -164,6 +181,16 @@ export const newFilamentRouteHandler = async (req, res) => {
|
||||
res.status(500).send({ error: "No filament created." });
|
||||
}
|
||||
|
||||
// Create audit log for new filament
|
||||
await newAuditLog(
|
||||
{},
|
||||
newFilament,
|
||||
result._id,
|
||||
'Filament',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating filament:", updateError);
|
||||
|
||||
@ -1,9 +1,11 @@
|
||||
import dotenv from "dotenv";
|
||||
import { filamentStockModel } from "../../schemas/filamentstock.schema.js";
|
||||
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||
import { stockEventModel } from "../../schemas/stockevent.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -17,6 +19,8 @@ export const listFilamentStocksRouteHandler = async (
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
sort = "",
|
||||
order = "ascend"
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
@ -48,6 +52,12 @@ export const listFilamentStocksRouteHandler = async (
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === "descend" ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
@ -75,7 +85,8 @@ export const getFilamentStockRouteHandler = async (req, res) => {
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate("filament").populate({
|
||||
.populate("filament")
|
||||
.populate({
|
||||
path: 'stockEvents',
|
||||
populate: [
|
||||
{
|
||||
@ -95,7 +106,12 @@ export const getFilamentStockRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||
res.send(filamentStock);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...filamentStock._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching filament stock:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -189,6 +205,29 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
||||
logger.error("No filament stock created.");
|
||||
return res.status(500).send({ error: "No filament stock created." });
|
||||
}
|
||||
|
||||
// Create initial stock event
|
||||
const stockEvent = {
|
||||
type: "initial",
|
||||
value: startingNetWeight,
|
||||
unit: "g",
|
||||
filamentStock: result._id,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const eventResult = await stockEventModel.create(stockEvent);
|
||||
if (!eventResult) {
|
||||
logger.error("Failed to create initial stock event.");
|
||||
return res.status(500).send({ error: "Failed to create initial stock event." });
|
||||
}
|
||||
|
||||
// Update the filament stock with the stock event reference
|
||||
await filamentStockModel.updateOne(
|
||||
{ _id: result._id },
|
||||
{ $push: { stockEvents: eventResult._id } }
|
||||
);
|
||||
|
||||
return res.send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error adding filament stock:", updateError);
|
||||
|
||||
@ -8,6 +8,8 @@ import crypto from "crypto";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
import { extractConfigBlock } from "../../util/index.js";
|
||||
|
||||
@ -225,6 +227,16 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
filament: req.body?.filament?._id,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
gcodeFile.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'GCodeFile',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await gcodeFileModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
@ -280,7 +292,18 @@ export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
logger.error("No gcode file created.");
|
||||
res.status(500).send({ error: "No gcode file created." });
|
||||
}
|
||||
res.status(200).send(result);
|
||||
|
||||
// Create audit log for new gcodefile
|
||||
await newAuditLog(
|
||||
{},
|
||||
newGCodeFile,
|
||||
result._id,
|
||||
'GCodeFile',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error creating gcode file:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
@ -407,8 +430,13 @@ export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`GCodeFile with ID: ${id}:`);
|
||||
res.send(gcodeFile);
|
||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...gcodeFile._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching GCodeFile:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
|
||||
@ -1,16 +1,18 @@
|
||||
import dotenv from "dotenv";
|
||||
import mongoose from "mongoose";
|
||||
import { printJobModel } from "../../schemas/printjob.schema.js";
|
||||
import { printSubJobModel } from "../../schemas/printsubjob.schema.js";
|
||||
import { jobModel } from "../../schemas/job.schema.js";
|
||||
import { subJobModel } from "../../schemas/subjob.schema.js";
|
||||
import { noteModel } from "../../schemas/note.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("PrintJobs");
|
||||
const logger = log4js.getLogger("Jobs");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listPrintJobsRouteHandler = async (
|
||||
export const listJobsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
@ -21,7 +23,7 @@ export const listPrintJobsRouteHandler = async (
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
// Fetch users with pagination
|
||||
const printJobs = await printJobModel
|
||||
const jobs = await jobModel
|
||||
.find()
|
||||
.sort({ createdAt: -1 })
|
||||
.skip(skip)
|
||||
@ -30,75 +32,82 @@ export const listPrintJobsRouteHandler = async (
|
||||
.populate("gcodeFile", "name");
|
||||
|
||||
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
|
||||
res.send(printJobs);
|
||||
res.send(jobs);
|
||||
} catch (error) {
|
||||
logger.error("Error listing print jobs:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getPrintJobRouteHandler = async (req, res) => {
|
||||
export const getJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the printJob with the given remote address
|
||||
const printJob = await printJobModel
|
||||
// Fetch the job with the given remote address
|
||||
const job = await jobModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate("printers", "name state")
|
||||
.populate("gcodeFile")
|
||||
.populate("subJobs");
|
||||
.populate("subJobs")
|
||||
.populate("notes");
|
||||
|
||||
if (!printJob) {
|
||||
logger.warn(`PrintJob not found with supplied id.`);
|
||||
if (!job) {
|
||||
logger.warn(`Job not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`PrintJob with ID: ${id}:`, printJob);
|
||||
res.send(printJob);
|
||||
logger.trace(`Job with ID: ${id}:`, job);
|
||||
|
||||
const targetIds = [id, ...job.subJobs.map(subJob => subJob._id)];
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: { $in: targetIds.map(id => new mongoose.Types.ObjectId(id)) }
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...job._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching printJob:", error);
|
||||
logger.error("Error fetching job:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editPrintJobRouteHandler = async (req, res) => {
|
||||
export const editJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
// Fetch the printJob with the given remote address
|
||||
const printJob = await printJobModel.findOne({ _id: id });
|
||||
// Fetch the job with the given remote address
|
||||
const job = await jobModel.findOne({ _id: id });
|
||||
|
||||
if (!printJob) {
|
||||
logger.warn(`PrintJob not found with supplied id.`);
|
||||
if (!job) {
|
||||
logger.warn(`Job not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`PrintJob with ID: ${id}:`, printJob);
|
||||
logger.trace(`Job with ID: ${id}:`, job);
|
||||
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
const result = await printJobModel.updateOne(
|
||||
const result = await jobModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
|
||||
if (result.nModified === 0) {
|
||||
logger.warn("No printJobs updated.");
|
||||
return res.status(400).send({ error: "No printJobs updated." });
|
||||
logger.warn("No jobs updated.");
|
||||
return res.status(400).send({ error: "No jobs updated." });
|
||||
}
|
||||
|
||||
res.send({ message: "Print job updated successfully" });
|
||||
} catch (error) {
|
||||
logger.error("Error updating printJob:", error);
|
||||
logger.error("Error updating job:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const createPrintJobRouteHandler = async (req, res) => {
|
||||
export const createJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const { gcodeFile, printers, quantity = 1 } = req.body;
|
||||
|
||||
@ -112,7 +121,7 @@ export const createPrintJobRouteHandler = async (req, res) => {
|
||||
const printerIds = printers.map((id) => new mongoose.Types.ObjectId(id));
|
||||
|
||||
// Create new print job
|
||||
const newPrintJob = new printJobModel({
|
||||
const newJob = new jobModel({
|
||||
state: { type: "draft" },
|
||||
printers: printerIds,
|
||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||
@ -124,14 +133,14 @@ export const createPrintJobRouteHandler = async (req, res) => {
|
||||
});
|
||||
|
||||
// Save the print job first to get its ID
|
||||
const savedPrintJob = await newPrintJob.save();
|
||||
const savedJob = await newJob.save();
|
||||
|
||||
// Create subjobs array with sequential numbers based on quantity
|
||||
const subJobs = await Promise.all(
|
||||
Array.from({ length: quantity }, (_, index) => {
|
||||
const subJob = new printSubJobModel({
|
||||
const subJob = new subJobModel({
|
||||
printer: printerIds[index % printerIds.length], // Distribute across available printers
|
||||
printJob: savedPrintJob._id,
|
||||
job: savedJob._id,
|
||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||
subJobId: `subjob-${index + 1}`,
|
||||
state: { type: "draft" },
|
||||
@ -144,22 +153,22 @@ export const createPrintJobRouteHandler = async (req, res) => {
|
||||
);
|
||||
|
||||
// Update the print job with the subjob references
|
||||
savedPrintJob.subJobs = subJobs.map((subJob) => subJob._id);
|
||||
await savedPrintJob.save();
|
||||
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
|
||||
await savedJob.save();
|
||||
|
||||
logger.trace(
|
||||
`Created new print job with ID: ${savedPrintJob._id} and ${subJobs.length} subjobs`,
|
||||
`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`,
|
||||
);
|
||||
res.status(201).send({ printJob: savedPrintJob, subJobs });
|
||||
res.status(201).send({ job: savedJob, subJobs });
|
||||
} catch (error) {
|
||||
logger.error("Error creating print job:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const getPrintJobStatsRouteHandler = async (req, res) => {
|
||||
export const getJobStatsRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const stats = await printJobModel.aggregate([
|
||||
const stats = await jobModel.aggregate([
|
||||
{
|
||||
$group: {
|
||||
_id: "$state.type",
|
||||
246
src/services/notes/index.js
Normal file
246
src/services/notes/index.js
Normal file
@ -0,0 +1,246 @@
|
||||
import dotenv from "dotenv";
|
||||
import { noteModel } from "../../schemas/note.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Notes");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listNotesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
const skip = (page - 1) * limit;
|
||||
let notes;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (Object.keys(filter).length > 0) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: "users", // The collection name (usually lowercase plural)
|
||||
localField: "user", // The field in your current model
|
||||
foreignField: "_id", // The field in the users collection
|
||||
as: "user", // The output field name
|
||||
},
|
||||
});
|
||||
aggregateCommand.push({ $unwind: "$user" });
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: "notetypes", // The collection name (usually lowercase plural)
|
||||
localField: "noteType", // The field in your current model
|
||||
foreignField: "_id", // The field in the users collection
|
||||
as: "noteType", // The output field name
|
||||
},
|
||||
});
|
||||
aggregateCommand.push({ $unwind: "$noteType" });
|
||||
aggregateCommand.push({
|
||||
$project: {
|
||||
name: 1,
|
||||
_id: 1,
|
||||
createdAt: 1,
|
||||
updatedAt: 1,
|
||||
"noteType._id": 1,
|
||||
"noteType.name": 1,
|
||||
"noteType.color": 1,
|
||||
"user._id": 1,
|
||||
"user.name": 1,
|
||||
content: 1,
|
||||
parent: 1
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
notes = await noteModel.aggregate(aggregateCommand);
|
||||
|
||||
|
||||
logger.trace(
|
||||
`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
notes,
|
||||
);
|
||||
res.send(notes);
|
||||
} catch (error) {
|
||||
logger.error("Error listing notes:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getNoteRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const note = await noteModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!note) {
|
||||
logger.warn(`Note not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Note not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Note with ID: ${id}:`, note);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...note._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching note:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editNoteRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const note = await noteModel.findOne({ _id: id });
|
||||
|
||||
if (!note) {
|
||||
logger.warn(`Note not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Note not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Note with ID: ${id}:`, note);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
color: req.body.color,
|
||||
isActive: req.body.isActive,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
note.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Note',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await noteModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No note updated.");
|
||||
res.status(500).send({ error: "No notes updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating note:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching note:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newNoteRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newNote } = req.body;
|
||||
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user._id };
|
||||
|
||||
const result = await noteModel.create(newNote);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No note created.");
|
||||
res.status(500).send({ error: "No note created." });
|
||||
}
|
||||
|
||||
// Create audit log for new note
|
||||
await newAuditLog(
|
||||
{},
|
||||
newNote,
|
||||
result._id,
|
||||
'Note',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error creating note:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteNoteRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const note = await noteModel.findOne({ _id: id });
|
||||
|
||||
if (!note) {
|
||||
logger.warn(`Note not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Note not found." });
|
||||
}
|
||||
|
||||
// Check if the current user owns this note
|
||||
if (note.user.toString() !== req.user._id.toString()) {
|
||||
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
|
||||
return res.status(403).send({ error: "You can only delete your own notes." });
|
||||
}
|
||||
|
||||
logger.trace(`Deleting note with ID: ${id} and all its children`);
|
||||
|
||||
// Recursively find and delete all child notes
|
||||
const deletedNoteIds = await recursivelyDeleteNotes(id);
|
||||
|
||||
// Create audit log for the deletion
|
||||
await newAuditLog(
|
||||
note.toObject(),
|
||||
{},
|
||||
id,
|
||||
'Note',
|
||||
req.user._id,
|
||||
'User',
|
||||
'DELETE'
|
||||
);
|
||||
|
||||
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
|
||||
res.send({
|
||||
status: "ok",
|
||||
deletedNoteIds: deletedNoteIds,
|
||||
message: `Deleted ${deletedNoteIds.length} notes`
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
logger.error("Error deleting note:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
// Helper function to recursively delete notes and their children
|
||||
const recursivelyDeleteNotes = async (noteId) => {
|
||||
const deletedIds = [];
|
||||
|
||||
// Find all notes that have this note as their parent
|
||||
const childNotes = await noteModel.find({ parent: noteId });
|
||||
|
||||
// Recursively delete all children first
|
||||
for (const childNote of childNotes) {
|
||||
const childDeletedIds = await recursivelyDeleteNotes(childNote._id);
|
||||
deletedIds.push(...childDeletedIds);
|
||||
}
|
||||
|
||||
// Delete the current note
|
||||
await noteModel.deleteOne({ _id: noteId });
|
||||
deletedIds.push(noteId);
|
||||
|
||||
return deletedIds;
|
||||
};
|
||||
154
src/services/notetypes/index.js
Normal file
154
src/services/notetypes/index.js
Normal file
@ -0,0 +1,154 @@
|
||||
import dotenv from "dotenv";
|
||||
import { noteTypeModel } from "../../schemas/notetype.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("NoteTypes");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listNoteTypesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
const skip = (page - 1) * limit;
|
||||
let noteTypes;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (Object.keys(filter).length > 0) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand)
|
||||
|
||||
|
||||
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
noteTypes,
|
||||
);
|
||||
res.send(noteTypes);
|
||||
} catch (error) {
|
||||
logger.error("Error listing note types:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getNoteTypeRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const noteType = await noteTypeModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!noteType) {
|
||||
logger.warn(`Note type not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Note type not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Note type with ID: ${id}:`, noteType);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...noteType._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching note type:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editNoteTypeRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const noteType = await noteTypeModel.findOne({ _id: id });
|
||||
|
||||
if (!noteType) {
|
||||
logger.warn(`Note type not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Note type not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Note type with ID: ${id}:`, noteType);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
color: req.body.color,
|
||||
active: req.body.active,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
noteType.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'NoteType',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await noteTypeModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No note type updated.");
|
||||
res.status(500).send({ error: "No note types updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating note type:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching note type:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newNoteTypeRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newNoteType } = req.body;
|
||||
newNoteType = { ...newNoteType, createdAt: new Date(), updatedAt: new Date() };
|
||||
|
||||
const result = await noteTypeModel.create(newNoteType);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No note type created.");
|
||||
res.status(500).send({ error: "No note type created." });
|
||||
}
|
||||
|
||||
// Create audit log for new note type
|
||||
await newAuditLog(
|
||||
{},
|
||||
newNoteType,
|
||||
result._id,
|
||||
'NoteType',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error creating note type:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
@ -5,6 +5,8 @@ import mongoose from "mongoose";
|
||||
import multer from "multer";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -139,7 +141,12 @@ export const getPartRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
res.send(part);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...part._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Part:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -165,6 +172,16 @@ export const editPartRouteHandler = async (req, res) => {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
part.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Part',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await partModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
@ -201,6 +218,19 @@ export const newPartRouteHandler = async (req, res) => {
|
||||
logger.error("No parts created.");
|
||||
return res.status(500).send({ error: "No parts created." });
|
||||
}
|
||||
|
||||
// Create audit logs for each new part
|
||||
for (const result of results) {
|
||||
await newAuditLog(
|
||||
{},
|
||||
result.toObject(),
|
||||
result._id,
|
||||
'Part',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
}
|
||||
|
||||
return res.status(200).send(results);
|
||||
} else {
|
||||
// Handle single part
|
||||
@ -212,6 +242,17 @@ export const newPartRouteHandler = async (req, res) => {
|
||||
fileName: req.body?.fileName,
|
||||
};
|
||||
const result = await partModel.create(newPart);
|
||||
|
||||
// Create audit log for new part
|
||||
await newAuditLog(
|
||||
{},
|
||||
newPart,
|
||||
result._id,
|
||||
'Part',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
return res.status(200).send(result);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
import dotenv from "dotenv";
|
||||
import { printerModel } from "../../schemas/printer.schema.js";
|
||||
import log4js from "log4js";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import mongoose from "mongoose";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -47,7 +50,7 @@ export const getPrinterRouteHandler = async (req, res) => {
|
||||
.populate({
|
||||
path: "subJobs",
|
||||
populate: {
|
||||
path: "printJob",
|
||||
path: "job",
|
||||
},
|
||||
})
|
||||
.populate("vendor")
|
||||
@ -62,7 +65,12 @@ export const getPrinterRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Printer with id ${id}:`, printer);
|
||||
res.send(printer);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: new mongoose.Types.ObjectId(id)
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...printer._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching printer:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -72,6 +80,14 @@ export const getPrinterRouteHandler = async (req, res) => {
|
||||
export const editPrinterRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
try {
|
||||
// Fetch the printer first to get the old state
|
||||
const printer = await printerModel.findOne({ _id: id });
|
||||
|
||||
if (!printer) {
|
||||
logger.warn(`Printer not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Printer not found." });
|
||||
}
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
@ -81,6 +97,16 @@ export const editPrinterRouteHandler = async (req, res) => {
|
||||
vendor: req.body.vendor.id,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
printer.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Printer',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await printerModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
@ -139,6 +165,16 @@ export const createPrinterRouteHandler = async (req, res) => {
|
||||
// Save the printer
|
||||
const savedPrinter = await newPrinter.save();
|
||||
|
||||
// Create audit log for new printer
|
||||
await newAuditLog(
|
||||
{},
|
||||
newPrinter.toObject(),
|
||||
savedPrinter._id,
|
||||
'Printer',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
logger.info(`Created new printer: ${name}`);
|
||||
res.status(201).send(savedPrinter);
|
||||
} catch (error) {
|
||||
|
||||
@ -3,6 +3,8 @@ import { productModel } from "../../schemas/product.schema.js";
|
||||
import { partModel } from "../../schemas/part.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -83,7 +85,12 @@ export const getProductRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
res.send(product);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...product._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Product:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -93,10 +100,11 @@ export const getProductRouteHandler = async (req, res) => {
|
||||
export const editProductRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
var product = null;
|
||||
|
||||
try {
|
||||
// Fetch the product with the given remote address
|
||||
const product = await productModel.findOne({ _id: id });
|
||||
product = await productModel.findOne({ _id: id });
|
||||
|
||||
if (!product) {
|
||||
// Error handling
|
||||
@ -123,7 +131,15 @@ export const editProductRouteHandler = async (req, res) => {
|
||||
marginOrPrice: req.body.marginOrPrice,
|
||||
};
|
||||
|
||||
console.log("ID:", id);
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
product.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Product',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await productModel.updateOne(
|
||||
{ _id: id },
|
||||
@ -160,6 +176,16 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
res.status(500).send({ error: "No product created." });
|
||||
}
|
||||
|
||||
// Create audit log for new product
|
||||
await newAuditLog(
|
||||
{},
|
||||
newProduct,
|
||||
newProductResult._id,
|
||||
'Product',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const parts = req.body.parts || [];
|
||||
const productId = newProductResult._id;
|
||||
|
||||
@ -179,6 +205,16 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
res.status(500).send({ error: "No parts created." });
|
||||
}
|
||||
partIds.push(newPartResult._id);
|
||||
|
||||
// Create audit log for each new part
|
||||
await newAuditLog(
|
||||
{},
|
||||
newPart,
|
||||
newPartResult._id,
|
||||
'Part',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
}
|
||||
|
||||
const editProductResult = await productModel.updateOne(
|
||||
|
||||
@ -1,56 +1,98 @@
|
||||
import dotenv from "dotenv";
|
||||
import { printJobModel } from "../../schemas/printjob.schema.js";
|
||||
import { printSubJobModel } from "../../schemas/printsubjob.schema.js";
|
||||
import { jobModel } from "../../schemas/job.schema.js";
|
||||
import { subJobModel } from "../../schemas/subjob.schema.js";
|
||||
import log4js from "log4js";
|
||||
import { printerModel } from "../../schemas/printer.schema.js";
|
||||
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
|
||||
import { partModel } from "../../schemas/part.schema.js";
|
||||
import { productModel } from "../../schemas/product.schema.js";
|
||||
import { vendorModel } from "../../schemas/vendor.schema.js";
|
||||
import { filamentStockModel } from "../../schemas/filamentstock.schema.js";
|
||||
import { stockEventModel } from "../../schemas/stockevent.schema.js";
|
||||
import { stockAuditModel } from "../../schemas/stockaudit.schema.js";
|
||||
import { partStockModel } from "../../schemas/partstock.schema.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
import { userModel } from "../../schemas/user.schema.js";
|
||||
import { noteTypeModel } from "../../schemas/notetype.schema.js";
|
||||
import { noteModel } from "../../schemas/note.schema.js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("PrintJobs");
|
||||
const logger = log4js.getLogger("Jobs");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
const formatPrintersResponse = (printers) => {
|
||||
return printers.map((printer) => ({
|
||||
id: printer.id,
|
||||
name: printer.name,
|
||||
link: `/production/printers/info?printerId=${printer.id}`,
|
||||
printer: printer,
|
||||
}));
|
||||
// Map prefixes to models and id fields
|
||||
const PREFIX_MODEL_MAP = {
|
||||
PRN: { model: printerModel, idField: '_id', type: 'printer' },
|
||||
FIL: { model: filamentModel, idField: '_id', type: 'filament' },
|
||||
SPL: { model: null, idField: '_id', type: 'spool' }, // No spool model found
|
||||
GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodefile' },
|
||||
JOB: { model: jobModel, idField: '_id', type: 'job' },
|
||||
PRT: { model: partModel, idField: '_id', type: 'part' },
|
||||
PRD: { model: productModel, idField: '_id', type: 'product' },
|
||||
VEN: { model: vendorModel, idField: '_id', type: 'vendor' },
|
||||
SJB: { model: subJobModel, idField: '_id', type: 'subjob' },
|
||||
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentstock' },
|
||||
SEV: { model: stockEventModel, idField: '_id', type: 'stockevent' },
|
||||
SAU: { model: stockAuditModel, idField: '_id', type: 'stockaudit' },
|
||||
PTS: { model: partStockModel, idField: '_id', type: 'partstock' },
|
||||
PDS: { model: null, idField: '_id', type: 'productstock' }, // No productStockModel found
|
||||
ADL: { model: auditLogModel, idField: '_id', type: 'auditlog' },
|
||||
USR: { model: userModel, idField: '_id', type: 'user' },
|
||||
NTY: { model: noteTypeModel, idField: '_id', type: 'notetype' },
|
||||
NTE: { model: noteModel, idField: '_id', type: 'note' },
|
||||
};
|
||||
|
||||
const formatJobsResponse = (jobs) => {
|
||||
return jobs.map((job) => ({
|
||||
id: job.id,
|
||||
name: job.gcodeFile.name,
|
||||
link: `/production/printjobs/info?printJobId=${job.id}`,
|
||||
job: job,
|
||||
}));
|
||||
// Helper function to build search filter from query parameters
|
||||
const buildSearchFilter = (params) => {
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
// Skip pagination and limit parameters as they're not search filters
|
||||
if (key === 'limit' || key === 'page') continue;
|
||||
|
||||
// Handle different field types
|
||||
if (key === 'name') {
|
||||
filter.name = { $regex: value, $options: 'i' }; // Case-insensitive search
|
||||
} else if (key === 'id' || key === '_id') {
|
||||
if (mongoose.Types.ObjectId.isValid(value)) {
|
||||
filter._id = value;
|
||||
}
|
||||
} else if (key === 'tags') {
|
||||
filter.tags = { $in: [new RegExp(value, 'i')] };
|
||||
} else if (key === 'state') {
|
||||
filter['state.type'] = value;
|
||||
} else if (key.includes('.')) {
|
||||
// Handle nested fields like 'state.type', 'address.city', etc.
|
||||
filter[key] = { $regex: value, $options: 'i' };
|
||||
} else {
|
||||
// For all other fields, do a case-insensitive search
|
||||
filter[key] = { $regex: value, $options: 'i' };
|
||||
}
|
||||
}
|
||||
|
||||
return filter;
|
||||
};
|
||||
|
||||
const formatFilamentsResponse = (filaments) => {
|
||||
return filaments.map((filament) => ({
|
||||
id: filament.id,
|
||||
name: filament.name,
|
||||
link: `/management/filaments/info?filamentId=${filament.id}`,
|
||||
filament: filament,
|
||||
}));
|
||||
};
|
||||
|
||||
const formatGCodeFilesResponse = (gcodeFiles) => {
|
||||
return gcodeFiles.map((gcodeFile) => ({
|
||||
id: gcodeFile.id,
|
||||
name: gcodeFile.name,
|
||||
link: `/management/gcodefiles/info?gcodeFileId=${gcodeFile.id}`,
|
||||
gcodeFile: gcodeFile,
|
||||
}));
|
||||
};
|
||||
const trimSpotlightObject = (object) => {
|
||||
return {
|
||||
_id: object._id,
|
||||
name: object.name || undefined,
|
||||
state: object.state && object?.state.type? { type: object.state.type } : undefined,
|
||||
tags: object.tags || undefined,
|
||||
email: object.email || undefined,
|
||||
color: object.color || undefined,
|
||||
updatedAt: object.updatedAt || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export const getSpotlightRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const query = req.params.query;
|
||||
if (query.length <= 4) {
|
||||
const queryParams = req.query;
|
||||
if (query.length < 3) {
|
||||
res.status(200).send([]);
|
||||
return;
|
||||
}
|
||||
@ -59,55 +101,69 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
const suffix = query.substring(4);
|
||||
|
||||
if (delimiter == ":") {
|
||||
switch (prefix) {
|
||||
case "PRN":
|
||||
const printer = await printerModel.findOne({ id: suffix });
|
||||
if (!printer) {
|
||||
res.status(404).send({ error: "Job not found" });
|
||||
} else {
|
||||
res.status(200).send(formatPrintersResponse([printer]));
|
||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||
if (!prefixEntry || !prefixEntry.model) {
|
||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case "JOB":
|
||||
const job = await printJobModel
|
||||
.findOne({ _id: suffix })
|
||||
.populate("gcodeFile", "name");
|
||||
if (!job) {
|
||||
res.status(404).send({ error: "Job not found" });
|
||||
} else {
|
||||
res.status(200).send(formatJobsResponse([job]));
|
||||
const { model, idField } = prefixEntry;
|
||||
|
||||
// Validate ObjectId if the idField is '_id'
|
||||
if (idField === '_id' && !mongoose.Types.ObjectId.isValid(suffix)) {
|
||||
res.status(404).send({ error: `${prefix} not found` });
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case "FIL":
|
||||
const filament = await filamentModel.findOne({ _id: suffix });
|
||||
if (!filament) {
|
||||
res.status(404).send({ error: "Filament not found" });
|
||||
} else {
|
||||
res.status(200).send(formatFilamentsResponse([filament]));
|
||||
|
||||
// Find the object by the correct field
|
||||
const queryObj = {};
|
||||
queryObj[idField] = suffix.toLowerCase();
|
||||
let doc = await model.findOne(queryObj).lean();
|
||||
if (!doc) {
|
||||
res.status(404).send({ error: `${prefix} not found` });
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case "GCF":
|
||||
const gcodeFile = await gcodeFileModel.findOne({ _id: suffix });
|
||||
if (!gcodeFile) {
|
||||
res.status(404).send({ error: "Filament not found" });
|
||||
} else {
|
||||
res.status(200).send(formatGCodeFilesResponse([gcodeFile]));
|
||||
// Build the response with only the required fields
|
||||
const response = trimSpotlightObject(doc)
|
||||
res.status(200).send(response);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case "SBJ":
|
||||
const subJob = await printSubJobModel.findOne({ id: suffix });
|
||||
if (!subJob) {
|
||||
res.status(404).send({ error: "SubJob not found" });
|
||||
} else {
|
||||
res.status(200).send([subJob]);
|
||||
|
||||
console.log(queryParams)
|
||||
|
||||
if (Object.keys(queryParams).length > 0) {
|
||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||
console.log(prefixEntry)
|
||||
if (!prefixEntry || !prefixEntry.model) {
|
||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
||||
return;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
res.status(400).send({ error: "Invalid prefix" });
|
||||
const { model } = prefixEntry;
|
||||
|
||||
// Use req.query for search parameters
|
||||
|
||||
if (Object.keys(queryParams).length === 0) {
|
||||
res.status(400).send({ error: "No search parameters provided" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Build search filter
|
||||
const searchFilter = buildSearchFilter(queryParams);
|
||||
|
||||
// Perform search with limit
|
||||
const limit = parseInt(req.query.limit) || 10;
|
||||
const docs = await model.find(searchFilter)
|
||||
.limit(limit)
|
||||
.sort({ updatedAt: -1 })
|
||||
.lean();
|
||||
|
||||
// Format response
|
||||
const response = docs.map(doc => (trimSpotlightObject(doc)));
|
||||
|
||||
res.status(200).send(response);
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Error listing print jobs:", error);
|
||||
logger.error("Error in spotlight lookup:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
174
src/services/stockaudits/index.js
Normal file
174
src/services/stockaudits/index.js
Normal file
@ -0,0 +1,174 @@
|
||||
import dotenv from "dotenv";
|
||||
import { stockAuditModel } from "../../schemas/stockaudit.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Stock Audits");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listStockAuditsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
const skip = (page - 1) * limit;
|
||||
let stockAudits;
|
||||
let aggregateCommand = [];
|
||||
|
||||
// Lookup createdBy user
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: "users",
|
||||
localField: "createdBy",
|
||||
foreignField: "_id",
|
||||
as: "createdBy",
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({ $unwind: "$createdBy" });
|
||||
|
||||
if (filter != {}) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
stockAudits = await stockAuditModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
stockAudits,
|
||||
);
|
||||
res.send(stockAudits);
|
||||
} catch (error) {
|
||||
logger.error("Error listing stock audits:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getStockAuditRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const stockAudit = await stockAuditModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate("createdBy")
|
||||
.populate("items.filamentStock")
|
||||
.populate("items.partStock");
|
||||
|
||||
if (!stockAudit) {
|
||||
logger.warn(`Stock audit not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Stock audit not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...stockAudit._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching stock audit:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newStockAuditRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const newStockAudit = {
|
||||
type: req.body.type,
|
||||
status: req.body.status || "pending",
|
||||
notes: req.body.notes,
|
||||
items: req.body.items.map(item => ({
|
||||
type: item.type,
|
||||
stock: item.type === "filament"
|
||||
? new mongoose.Types.ObjectId(item.filamentStock)
|
||||
: new mongoose.Types.ObjectId(item.partStock),
|
||||
expectedQuantity: item.expectedQuantity,
|
||||
actualQuantity: item.actualQuantity,
|
||||
notes: item.notes
|
||||
})),
|
||||
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
|
||||
completedAt: req.body.status === "completed" ? new Date() : null
|
||||
};
|
||||
|
||||
const result = await stockAuditModel.create(newStockAudit);
|
||||
if (!result) {
|
||||
logger.error("No stock audit created.");
|
||||
return res.status(500).send({ error: "No stock audit created." });
|
||||
}
|
||||
return res.send({ status: "ok", id: result._id });
|
||||
} catch (error) {
|
||||
logger.error("Error adding stock audit:", error);
|
||||
return res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const updateStockAuditRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const updateData = {
|
||||
...req.body,
|
||||
items: req.body.items?.map(item => ({
|
||||
type: item.type,
|
||||
stock: item.type === "filament"
|
||||
? new mongoose.Types.ObjectId(item.filamentStock)
|
||||
: new mongoose.Types.ObjectId(item.partStock),
|
||||
expectedQuantity: item.expectedQuantity,
|
||||
actualQuantity: item.actualQuantity,
|
||||
notes: item.notes
|
||||
})),
|
||||
completedAt: req.body.status === "completed" ? new Date() : null
|
||||
};
|
||||
|
||||
const result = await stockAuditModel.findByIdAndUpdate(
|
||||
id,
|
||||
{ $set: updateData },
|
||||
{ new: true }
|
||||
);
|
||||
|
||||
if (!result) {
|
||||
logger.warn(`Stock audit not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Stock audit not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Updated stock audit with ID: ${id}:`, result);
|
||||
res.send(result);
|
||||
} catch (error) {
|
||||
logger.error("Error updating stock audit:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteStockAuditRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const result = await stockAuditModel.findByIdAndDelete(id);
|
||||
|
||||
if (!result) {
|
||||
logger.warn(`Stock audit not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Stock audit not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Deleted stock audit with ID: ${id}`);
|
||||
res.send({ status: "ok" });
|
||||
} catch (error) {
|
||||
logger.error("Error deleting stock audit:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
139
src/services/stockevents/index.js
Normal file
139
src/services/stockevents/index.js
Normal file
@ -0,0 +1,139 @@
|
||||
import dotenv from "dotenv";
|
||||
import { stockEventModel } from "../../schemas/stockevent.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Stock Events");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listStockEventsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
sort = "",
|
||||
order = "ascend"
|
||||
) => {
|
||||
try {
|
||||
const skip = (page - 1) * limit;
|
||||
let stockEvents;
|
||||
let aggregateCommand = [];
|
||||
|
||||
// Lookup filamentStock
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: "filamentstocks",
|
||||
localField: "filamentStock",
|
||||
foreignField: "_id",
|
||||
as: "filamentStock",
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({ $unwind: "$filamentStock" });
|
||||
|
||||
// Conditionally lookup subJob only if it exists
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: "subjobs",
|
||||
localField: "subJob",
|
||||
foreignField: "_id",
|
||||
as: "subJob",
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$addFields: {
|
||||
subJob: {
|
||||
$cond: {
|
||||
if: { $eq: [{ $size: "$subJob" }, 0] },
|
||||
then: null,
|
||||
else: { $arrayElemAt: ["$subJob", 0] }
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (filter != {}) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === "descend" ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
// Add pagination after sorting
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log('Aggregation pipeline:', JSON.stringify(aggregateCommand, null, 2));
|
||||
|
||||
stockEvents = await stockEventModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||
stockEvents,
|
||||
);
|
||||
res.send(stockEvents);
|
||||
} catch (error) {
|
||||
logger.error("Error listing stock events:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getStockEventRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const stockEvent = await stockEventModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate("filamentStock")
|
||||
.populate("subJob")
|
||||
.populate("job");
|
||||
|
||||
if (!stockEvent) {
|
||||
logger.warn(`Stock event not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Stock event not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
|
||||
res.send(stockEvent);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching stock event:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newStockEventRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const newStockEvent = {
|
||||
type: req.body.type,
|
||||
value: req.body.value,
|
||||
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
|
||||
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
|
||||
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
|
||||
timestamp: new Date()
|
||||
};
|
||||
|
||||
const result = await stockEventModel.create(newStockEvent);
|
||||
if (!result) {
|
||||
logger.error("No stock event created.");
|
||||
return res.status(500).send({ error: "No stock event created." });
|
||||
}
|
||||
return res.send({ status: "ok", id: result._id });
|
||||
} catch (error) {
|
||||
logger.error("Error adding stock event:", error);
|
||||
return res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
139
src/services/users/index.js
Normal file
139
src/services/users/index.js
Normal file
@ -0,0 +1,139 @@
|
||||
import dotenv from "dotenv";
|
||||
import { userModel } from "../../schemas/user.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Users");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listUsersRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let user;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
user = await userModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of users (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
user,
|
||||
);
|
||||
res.send(user);
|
||||
} catch (error) {
|
||||
logger.error("Error listing users:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getUserRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the user with the given ID
|
||||
const user = await userModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!user) {
|
||||
logger.warn(`User not found with supplied id.`);
|
||||
return res.status(404).send({ error: "User not found." });
|
||||
}
|
||||
|
||||
logger.trace(`User with ID: ${id}:`, user);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...user._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching User:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editUserRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the user with the given ID
|
||||
const user = await userModel.findOne({ _id: id });
|
||||
|
||||
if (!user) {
|
||||
// Error handling
|
||||
logger.warn(`User not found with supplied id.`);
|
||||
return res.status(404).send({ error: "User not found." });
|
||||
}
|
||||
|
||||
logger.trace(`User with ID: ${id}:`, user);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
username: req.body.username,
|
||||
name: req.body.name,
|
||||
firstName: req.body.firstName,
|
||||
lastName: req.body.lastName,
|
||||
email: req.body.email,
|
||||
};
|
||||
|
||||
console.log(req.user)
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
user.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'User',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await userModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No User updated.");
|
||||
res.status(500).send({ error: "No users updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating user:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching user:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
32
src/services/vendors/index.js
vendored
32
src/services/vendors/index.js
vendored
@ -3,6 +3,8 @@ import { vendorModel } from "../../schemas/vendor.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import { newAuditLog } from "../../util/index.js";
|
||||
import { auditLogModel } from "../../schemas/auditlog.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -69,7 +71,12 @@ export const getVendorRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||
res.send(vendor);
|
||||
|
||||
const auditLogs = await auditLogModel.find({
|
||||
target: id
|
||||
}).populate('owner');
|
||||
|
||||
res.send({...vendor._doc, auditLogs: auditLogs});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Vendor:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -102,6 +109,18 @@ export const editVendorRouteHandler = async (req, res) => {
|
||||
email: req.body.email,
|
||||
};
|
||||
|
||||
console.log(req.user)
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(
|
||||
vendor.toObject(),
|
||||
updateData,
|
||||
id,
|
||||
'Vendor',
|
||||
req.user._id,
|
||||
'User'
|
||||
);
|
||||
|
||||
const result = await vendorModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
@ -131,6 +150,17 @@ export const newVendorRouteHandler = async (req, res) => {
|
||||
logger.error("No vendor created.");
|
||||
res.status(500).send({ error: "No vendor created." });
|
||||
}
|
||||
|
||||
// Create audit log for new vendor
|
||||
await newAuditLog(
|
||||
{},
|
||||
newVendor,
|
||||
result._id,
|
||||
'Vendor',
|
||||
req.user.id, // Assuming user ID is available in req.user
|
||||
'User'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating vendor:", updateError);
|
||||
|
||||
@ -1,8 +1,34 @@
|
||||
function parseStringIfNumber(input) {
|
||||
if (typeof input === "string" && !isNaN(input) && !isNaN(parseFloat(input))) {
|
||||
return parseFloat(input);
|
||||
import { ObjectId } from "mongodb"; // Only needed in Node.js with MongoDB driver
|
||||
|
||||
function parseFilter(property, value) {
|
||||
if (typeof value === "string") {
|
||||
const trimmed = value.trim();
|
||||
|
||||
// Handle booleans
|
||||
if (trimmed.toLowerCase() === "true") return { [property]: true };
|
||||
if (trimmed.toLowerCase() === "false") return { [property]: false };
|
||||
|
||||
// Handle ObjectId (24-char hex)
|
||||
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
|
||||
return { [property]: new ObjectId(trimmed) };
|
||||
}
|
||||
return input;
|
||||
|
||||
// Handle numbers
|
||||
if (!isNaN(trimmed)) {
|
||||
return { [property]: parseFloat(trimmed) };
|
||||
}
|
||||
|
||||
// Default to case-insensitive regex for non-numeric strings
|
||||
return {
|
||||
[property]: {
|
||||
$regex: trimmed,
|
||||
$options: "i"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Handle actual booleans, numbers, objects, etc.
|
||||
return { [property]: value };
|
||||
}
|
||||
|
||||
function convertToCamelCase(obj) {
|
||||
@ -248,4 +274,49 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
||||
return useCamelCase ? convertToCamelCase(configObject) : configObject;
|
||||
}
|
||||
|
||||
export { parseStringIfNumber, convertToCamelCase, extractConfigBlock };
|
||||
function getChangedValues(oldObj, newObj) {
|
||||
const changes = {};
|
||||
|
||||
// Check all keys in the new object
|
||||
for (const key in newObj) {
|
||||
// Skip if the key is _id or timestamps
|
||||
if (key === '_id' || key === 'createdAt' || key === 'updatedAt') continue;
|
||||
|
||||
// If the old value is different from the new value, include it
|
||||
if (JSON.stringify(oldObj[key]) !== JSON.stringify(newObj[key])) {
|
||||
changes[key] = newObj[key];
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
|
||||
const { auditLogModel } = await import('../schemas/auditlog.schema.js');
|
||||
|
||||
// Get only the changed values
|
||||
const changedValues = getChangedValues(oldValue, newValue);
|
||||
|
||||
// If no values changed, don't create an audit log
|
||||
if (Object.keys(changedValues).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auditLog = new auditLogModel({
|
||||
oldValue,
|
||||
newValue: changedValues,
|
||||
target: targetId,
|
||||
targetModel,
|
||||
owner: ownerId,
|
||||
ownerModel,
|
||||
});
|
||||
|
||||
await auditLog.save();
|
||||
}
|
||||
|
||||
export {
|
||||
parseFilter,
|
||||
convertToCamelCase,
|
||||
extractConfigBlock,
|
||||
newAuditLog
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user