Remove unused files and refactor imports: deleted obsolete files including passport.js, and others, while updating import paths in various services and routes for consistency. Added ESLint and Prettier configurations to enhance code quality.

This commit is contained in:
Tom Butcher 2025-06-29 21:53:18 +01:00
parent 65ccd0cd90
commit a5f3b75be8
45 changed files with 1912 additions and 2051 deletions

View File

@ -1,86 +0,0 @@
import bcrypt from "bcrypt";
import dotenv from "dotenv";
import { userModel } from "../../schemas/user.schema.js";
import { printerModel } from "../../schemas/printer.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
dotenv.config();
const logger = log4js.getLogger("Printers");
logger.level = process.env.LOG_LEVEL;
export const listPrintersRouteHandler = async (
req,
res,
page = 1,
limit = 25
) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
// Fetch users with pagination
const printers = await printerModel.find().skip(skip).limit(limit);
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
res.send(printers);
} catch (error) {
logger.error("Error listing users:", error);
res.status(500).send({ error: error });
}
};
export const getPrinterRouteHandler = async (req, res) => {
const remoteAddress = req.params.remoteAddress;
try {
// Fetch the printer with the given remote address
const printer = await printerModel.findOne({ remoteAddress });
if (!printer) {
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
return res.status(404).send({ error: "Printer not found" });
}
logger.trace(`Printer with remote address ${remoteAddress}:`, printer);
res.send(printer);
} catch (error) {
logger.error("Error fetching printer:", error);
res.status(500).send({ error: error.message });
}
};
export const editPrinterRouteHandler = async (req, res) => {
const remoteAddress = req.params.remoteAddress;
const { friendlyName } = req.body;
try {
// Fetch the printer with the given remote address
const printer = await printerModel.findOne({ remoteAddress });
if (!printer) {
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
return res.status(404).send({ error: "Printer not found" });
}
logger.trace(`Editing printer with remote address ${remoteAddress}:`, printer);
try {
const result = await printerModel.updateOne(
{ remoteAddress: remoteAddress },
{ $set: req.body }
);
if (result.nModified === 0) {
logger.error("No printers updated.");
res.status(500).send({ error: "No printers updated." });
}
} catch (updateError) {
logger.error("Error updating printer:", updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching printer:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};

1076
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -44,6 +44,10 @@
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
"@babel/preset-env": "^7.18.2",
"@babel/register": "^7.17.7",
"eslint": "^8.57.1",
"eslint-config-prettier": "^10.1.5",
"eslint-plugin-prettier": "^5.5.1",
"prettier": "^3.6.2",
"sequelize-cli": "^6.4.1",
"standard": "^17.1.0"
},

View File

@ -1,33 +1,33 @@
import mongoose from "mongoose";
import bcrypt from "bcrypt";
import { userModel } from "../schemas/management/user.schema.js";
import { dbConnect } from "./index.js";
import mongoose from 'mongoose';
import bcrypt from 'bcrypt';
import { userModel } from '../schemas/management/user.schema.js';
import { dbConnect } from './mongo.js';
const ReseedAction = () => {
async function clear() {
dbConnect();
await userModel.deleteMany({});
console.log("DB cleared");
console.log('DB cleared');
}
async function seedDB() {
await clear();
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash("secret", salt);
const hashPassword = await bcrypt.hash('secret', salt);
const user = {
_id: mongoose.Types.ObjectId(1),
name: "Admin",
email: "admin@jsonapi.com",
name: 'Admin',
email: 'admin@jsonapi.com',
password: hashPassword,
createdAt: new Date(),
profile_image: "../../images/admin.jpg",
profile_image: '../../images/admin.jpg',
};
const admin = new userModel(user);
await admin.save();
console.log("DB seeded");
console.log('DB seeded');
}
seedDB();

View File

@ -1,8 +1,8 @@
import mongoose from "mongoose";
import dotenv from "dotenv";
import log4js from "log4js";
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import log4js from 'log4js';
const logger = log4js.getLogger("MongoDB");
const logger = log4js.getLogger('MongoDB');
logger.level = process.env.LOG_LEVEL;
dotenv.config();
@ -11,7 +11,7 @@ dotenv.config();
mongoose.set('strictQuery', false);
function dbConnect() {
mongoose.connection.once("open", () => logger.info("Database connected."));
mongoose.connection.once('open', () => logger.info('Database connected.'));
return mongoose.connect(
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
{}

View File

@ -1,9 +1,9 @@
import express from "express";
import bodyParser from "body-parser";
import cors from "cors";
import dotenv from "dotenv";
import { expressSession, keycloak } from "./keycloak.js";
import { dbConnect } from "./mongo/index.js";
import express from 'express';
import bodyParser from 'body-parser';
import cors from 'cors';
import dotenv from 'dotenv';
import { expressSession, keycloak } from './keycloak.js';
import { dbConnect } from './database/mongo.js';
import {
authRoutes,
userRoutes,
@ -22,24 +22,24 @@ import {
stockEventRoutes,
auditLogRoutes,
noteTypeRoutes,
noteRoutes
} from "./routes/index.js";
import path from "path";
import * as fs from "fs";
import cron from "node-cron";
import ReseedAction from "./mongo/ReseedAction.js";
import log4js from "log4js";
import { populateUserMiddleware } from "./services/misc/auth.js";
noteRoutes,
} from './routes/index.js';
import path from 'path';
import * as fs from 'fs';
import cron from 'node-cron';
import ReseedAction from './database/ReseedAction.js';
import log4js from 'log4js';
import { populateUserMiddleware } from './services/misc/auth.js';
dotenv.config();
const PORT = process.env.PORT || 8080;
const app = express();
const logger = log4js.getLogger("App");
const logger = log4js.getLogger('App');
logger.level = process.env.LOG_LEVEL;
app.use(log4js.connectLogger(logger, { level: "trace" }));
app.use(log4js.connectLogger(logger, { level: 'trace' }));
const whitelist = [process.env.APP_URL_CLIENT];
const corsOptions = {
@ -47,7 +47,7 @@ const corsOptions = {
if (!origin || whitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
callback(new Error("Not allowed by CORS"));
callback(new Error('Not allowed by CORS'));
}
},
credentials: true,
@ -56,37 +56,35 @@ const corsOptions = {
dbConnect();
app.use(cors(corsOptions));
app.use(
bodyParser.json({ type: "application/json", strict: false, limit: "50mb" }),
);
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
app.use(express.json());
app.use(expressSession);
app.use(keycloak.middleware());
app.use(populateUserMiddleware);
app.get("/", function (req, res) {
const __dirname = fs.realpathSync(".");
res.sendFile(path.join(__dirname, "/src/landing/index.html"));
app.get('/', function (req, res) {
const __dirname = fs.realpathSync('.');
res.sendFile(path.join(__dirname, '/src/landing/index.html'));
});
app.use("/auth", authRoutes);
app.use("/users", userRoutes)
app.use("/spotlight", spotlightRoutes);
app.use("/printers", printerRoutes);
app.use("/jobs", jobRoutes);
app.use("/gcodefiles", gcodeFileRoutes);
app.use("/filaments", filamentRoutes);
app.use("/parts", partRoutes);
app.use("/products", productRoutes);
app.use("/vendors", vendorRoutes);
app.use("/materials", materialRoutes);
app.use("/partstocks", partStockRoutes);
app.use("/filamentstocks", filamentStockRoutes);
app.use("/stockevents", stockEventRoutes);
app.use("/stockaudits", stockAuditRoutes);
app.use("/auditlogs", auditLogRoutes);
app.use("/notetypes", noteTypeRoutes);
app.use("/notes", noteRoutes)
app.use('/auth', authRoutes);
app.use('/users', userRoutes);
app.use('/spotlight', spotlightRoutes);
app.use('/printers', printerRoutes);
app.use('/jobs', jobRoutes);
app.use('/gcodefiles', gcodeFileRoutes);
app.use('/filaments', filamentRoutes);
app.use('/parts', partRoutes);
app.use('/products', productRoutes);
app.use('/vendors', vendorRoutes);
app.use('/materials', materialRoutes);
app.use('/partstocks', partStockRoutes);
app.use('/filamentstocks', filamentStockRoutes);
app.use('/stockevents', stockEventRoutes);
app.use('/stockaudits', stockAuditRoutes);
app.use('/auditlogs', auditLogRoutes);
app.use('/notetypes', noteTypeRoutes);
app.use('/notes', noteRoutes);
if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {

View File

@ -1,26 +1,25 @@
import Keycloak from "keycloak-connect";
import session from "express-session";
import dotenv from "dotenv";
import axios from "axios";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import Keycloak from 'keycloak-connect';
import session from 'express-session';
import dotenv from 'dotenv';
import axios from 'axios';
import jwt from 'jsonwebtoken';
import log4js from 'log4js';
dotenv.config();
const logger = log4js.getLogger("Keycloak");
logger.level = process.env.LOG_LEVEL || "info";
const logger = log4js.getLogger('Keycloak');
logger.level = process.env.LOG_LEVEL || 'info';
// Initialize Keycloak
const keycloakConfig = {
realm: process.env.KEYCLOAK_REALM || "farm-control",
"auth-server-url": process.env.KEYCLOAK_URL || "http://localhost:8080/auth",
"ssl-required": process.env.NODE_ENV === "production" ? "external" : "none",
resource: process.env.KEYCLOAK_CLIENT_ID || "farmcontrol-client",
"confidential-port": 0,
"bearer-only": true,
"public-client": false,
"use-resource-role-mappings": true,
"verify-token-audience": true,
realm: process.env.KEYCLOAK_REALM || 'farm-control',
'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
'confidential-port': 0,
'bearer-only': true,
'public-client': false,
'use-resource-role-mappings': true,
'verify-token-audience': true,
credentials: {
secret: process.env.KEYCLOAK_CLIENT_SECRET,
},
@ -29,7 +28,7 @@ const keycloakConfig = {
const memoryStore = new session.MemoryStore();
var expressSession = session({
secret: process.env.SESSION_SECRET || "n00Dl3s23!",
secret: process.env.SESSION_SECRET || 'n00Dl3s23!',
resave: false,
saveUninitialized: true, // Set this to true to ensure session is initialized
store: memoryStore,
@ -60,15 +59,15 @@ const isAuthenticated = async (req, res, next) => {
}),
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
'Content-Type': 'application/x-www-form-urlencoded',
},
}
);
const introspection = response.data;
if (!introspection.active) {
logger.info("Token is not active");
return res.status(401).json({ error: "Not authenticated" });
logger.info('Token is not active');
return res.status(401).json({ error: 'Not authenticated' });
}
// Parse token to extract user info
@ -83,20 +82,20 @@ const isAuthenticated = async (req, res, next) => {
return next();
} catch (error) {
logger.error("Token verification error:", error.message);
return res.status(401).json({ error: "Not authenticated" });
logger.error('Token verification error:', error.message);
return res.status(401).json({ error: 'Not authenticated' });
}
}
// Fallback to session-based authentication
if (req.session && req.session["keycloak-token"]) {
const sessionToken = req.session["keycloak-token"];
if (req.session && req.session['keycloak-token']) {
const sessionToken = req.session['keycloak-token'];
if (sessionToken.expires_at > new Date().getTime()) {
return next();
}
}
return res.status(401).json({ error: "Not authenticated" });
return res.status(401).json({ error: 'Not authenticated' });
};
// Helper function to extract roles from token
@ -112,11 +111,7 @@ function extractRoles(token) {
if (token.resource_access) {
for (const client in token.resource_access) {
if (token.resource_access[client].roles) {
roles.push(
...token.resource_access[client].roles.map(
(role) => `${client}:${role}`
)
);
roles.push(...token.resource_access[client].roles.map((role) => `${client}:${role}`));
}
}
}

View File

@ -1,77 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Node.js API FREE by Creative Tim & UPDIVISION</title>
<link href="https://fonts.googleapis.com/css?family=Nunito:200,600" rel="stylesheet"
/>
<style>
html,
body {
background-color: #fff;
color: #636b6f;
font-family: "Nunito", sans-serif;
font-weight: 200;
height: 100vh;
margin: 0;
}
.full-height {
height: 100vh;
}
.flex-center {
align-items: center;
display: flex;
justify-content: center;
}
.position-ref {
position: relative;
}
.top-right {
position: absolute;
right: 10px;
top: 18px;
}
.content {
text-align: center;
}
.title {
font-size: 84px;
}
.links > a {
color: #636b6f;
padding: 0 25px;
font-size: 13px;
font-weight: 600;
letter-spacing: 0.1rem;
text-decoration: none;
text-transform: uppercase;
}
.m-b-md {
margin-bottom: 30px;
}
</style>
</head>
<body>
<div class="flex-center position-ref full-height">
<div class="content">
<div class="title m-b-md">Headless CMS with ExpressJS API:FREE</div>
<div class="links">
<a href="https://expressjs.com/" target="_blank">Express.js</a>
<a href="https://www.mongodb.com/" target="_blank">MongoDB</a>
<a href="https://documenter.getpostman.com/view/8138626/Uze1virp" target="_blank">Documentation</a>
</div>
</div>
</div>
</body>
</html>

View File

@ -1,40 +0,0 @@
import { ExtractJwt } from "passport-jwt";
import passportJWT from "passport-jwt";
import dotenv from "dotenv";
import passport from "passport";
import { userModel } from "./schemas/user.schema.js";
import { hostModel } from "./schemas/host.schema.js";
const JWTStrategy = passportJWT.Strategy;
dotenv.config();
passport.use(
new JWTStrategy(
{
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
secretOrKey: process.env.JWT_SECRET,
},
function (jwtPayload, done) {
if (jwtPayload.hostId) {
return hostModel
.findOne({ hostId: jwtPayload.hostId })
.then((host) => {
return done(null, host);
})
.catch((err) => {
return done(err);
});
} else {
return userModel
.findOne({ _id: jwtPayload.id })
.then((user) => {
return done(null, user);
})
.catch((err) => {
return done(err);
});
}
}
)
);

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,20 +8,20 @@ import {
getFilamentStockRouteHandler,
editFilamentStockRouteHandler,
newFilamentStockRouteHandler,
} from "../../services/inventory/filamentstocks.js";
} from '../../services/inventory/filamentstocks.js';
// list of filamentStocks
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["country"];
const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newFilamentStockRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getFilamentStockRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editFilamentStockRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,20 +8,20 @@ import {
getPartStockRouteHandler,
editPartStockRouteHandler,
newPartStockRouteHandler,
} from "../../services/inventory/partstocks.js";
} from '../../services/inventory/partstocks.js';
// list of partStocks
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["country"];
const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listPartStocksRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newPartStockRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getPartStockRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editPartStockRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -9,20 +9,20 @@ import {
newStockAuditRouteHandler,
updateStockAuditRouteHandler,
deleteStockAuditRouteHandler,
} from "../../services/inventory/stockaudits.js";
} from '../../services/inventory/stockaudits.js';
// List stock audits
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["status", "type", "createdBy"];
const allowedFilters = ['status', 'type', 'createdBy'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -32,22 +32,22 @@ router.get("/", isAuthenticated, (req, res) => {
});
// Create new stock audit
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newStockAuditRouteHandler(req, res);
});
// Get specific stock audit
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getStockAuditRouteHandler(req, res);
});
// Update stock audit
router.put("/:id", isAuthenticated, (req, res) => {
router.put('/:id', isAuthenticated, (req, res) => {
updateStockAuditRouteHandler(req, res);
});
// Delete stock audit
router.delete("/:id", isAuthenticated, (req, res) => {
router.delete('/:id', isAuthenticated, (req, res) => {
deleteStockAuditRouteHandler(req, res);
});

View File

@ -1,26 +1,26 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listStockEventsRouteHandler,
getStockEventRouteHandler,
newStockEventRouteHandler,
} from "../../services/inventory/stockevents.js";
} from '../../services/inventory/stockevents.js';
// List stock events
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["type", "filamentStock"];
const allowedFilters = ['type', 'filamentStock'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -29,12 +29,12 @@ router.get("/", isAuthenticated, (req, res) => {
});
// Create new stock event
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newStockEventRouteHandler(req, res);
});
// Get specific stock event
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getStockEventRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,20 +8,20 @@ import {
getMaterialRouteHandler,
editMaterialRouteHandler,
newMaterialRouteHandler,
} from "../../services/management/materials.js";
} from '../../services/management/materials.js';
// list of materials
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["type", "brand", "diameter", "color"];
const allowedFilters = ['type', 'brand', 'diameter', 'color'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listMaterialsRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newMaterialRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getMaterialRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editMaterialRouteHandler(req, res);
});

View File

@ -1,55 +1,42 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import {
listNoteTypesRouteHandler,
getNoteTypeRouteHandler,
editNoteTypeRouteHandler,
newNoteTypeRouteHandler,
} from "../../services/management/notetypes.js";
import { parseFilter } from "../../util/index.js";
} from '../../services/management/notetypes.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
// List note types
router.get("/", isAuthenticated, async (req, res) => {
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["name", "active"];
const allowedFilters = ['name', 'active'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listNoteTypesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
}
);
listNoteTypesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
// Get single note type
router.get(
"/:id",
isAuthenticated,
getNoteTypeRouteHandler
);
router.get('/:id', isAuthenticated, getNoteTypeRouteHandler);
// Edit note type
router.put(
"/:id",
isAuthenticated,
editNoteTypeRouteHandler
);
router.put('/:id', isAuthenticated, editNoteTypeRouteHandler);
// Create new note type
router.post(
"/",
isAuthenticated,
newNoteTypeRouteHandler
);
router.post('/', isAuthenticated, newNoteTypeRouteHandler);
export default router;

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -10,46 +10,46 @@ import {
newPartRouteHandler,
uploadPartFileContentRouteHandler,
getPartFileContentRouteHandler,
} from "../../services/management/parts.js";
} from '../../services/management/parts.js';
// list of parts
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["products", "name"];
const allowedFilters = ['products', 'name'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listPartsRouteHandler(req, res, page, limit, property, filter, "", sort, order);
listPartsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newPartRouteHandler(req, res);
});
router.post("/:id/content", isAuthenticated, (req, res) => {
router.post('/:id/content', isAuthenticated, (req, res) => {
uploadPartFileContentRouteHandler(req, res);
});
router.get("/:id/content", isAuthenticated, (req, res) => {
router.get('/:id/content', isAuthenticated, (req, res) => {
getPartFileContentRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getPartRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editPartRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,20 +8,20 @@ import {
getProductRouteHandler,
editProductRouteHandler,
newProductRouteHandler,
} from "../../services/management/products.js";
} from '../../services/management/products.js';
// list of products
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["type", "brand", "diameter", "color"];
const allowedFilters = ['type', 'brand', 'diameter', 'color'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listProductsRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newProductRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getProductRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editProductRouteHandler(req, res);
});

View File

@ -1,26 +1,26 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listUsersRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
} from "../../services/management/users.js";
} from '../../services/management/users.js';
// list of users
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["username", "name", "firstName", "lastName"];
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -29,12 +29,12 @@ router.get("/", isAuthenticated, (req, res) => {
listUsersRouteHandler(req, res, page, limit, property, filter);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});
// update user info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editUserRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,20 +8,20 @@ import {
getVendorRouteHandler,
editVendorRouteHandler,
newVendorRouteHandler,
} from "../../services/management/vendors.js";
} from '../../services/management/vendors.js';
// list of vendors
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["country"];
const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listVendorsRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newVendorRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getVendorRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editVendorRouteHandler(req, res);
});

View File

@ -1,21 +1,21 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import {
listNotesRouteHandler,
getNoteRouteHandler,
editNoteRouteHandler,
newNoteRouteHandler,
deleteNoteRouteHandler
} from "../../services/misc/notes.js";
import { parseFilter } from "../../util/index.js";
deleteNoteRouteHandler,
} from '../../services/misc/notes.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
// List notes
router.get("/", isAuthenticated, async (req, res) => {
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ["parent", "user._id"];
const allowedFilters = ['parent', 'user._id'];
var filter = {};
@ -23,41 +23,24 @@ router.get("/", isAuthenticated, async (req, res) => {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const filterObject = parseFilter(key, value);
filter = {...filter, ...filterObject}
filter = { ...filter, ...filterObject };
}
}
}
listNotesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
}
);
listNotesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
// Get single note
router.get(
"/:id",
isAuthenticated,
getNoteRouteHandler
);
router.get('/:id', isAuthenticated, getNoteRouteHandler);
// Edit note
router.put(
"/:id",
isAuthenticated,
editNoteRouteHandler
);
router.put('/:id', isAuthenticated, editNoteRouteHandler);
// Delete note
router.delete(
"/:id",
isAuthenticated,
deleteNoteRouteHandler
);
router.delete('/:id', isAuthenticated, deleteNoteRouteHandler);
// Create new note
router.post(
"/",
isAuthenticated,
newNoteRouteHandler
);
router.post('/', isAuthenticated, newNoteRouteHandler);
export default router;

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -8,13 +8,13 @@ import {
getFilamentRouteHandler,
editFilamentRouteHandler,
newFilamentRouteHandler,
} from "../../services/management/filaments.js";
} from '../../services/management/filaments.js';
// list of filaments
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = ["type", "vendor.name", "diameter", "color"];
const allowedFilters = ['type', 'vendor.name', 'diameter', 'color'];
var filter = {};
@ -29,16 +29,16 @@ router.get("/", isAuthenticated, (req, res) => {
listFilamentsRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newFilamentRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getFilamentRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editFilamentRouteHandler(req, res);
});

View File

@ -1,6 +1,6 @@
import express from "express";
import { isAuthenticated } from "../../keycloak.js";
import { parseFilter } from "../../util/index.js";
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@ -11,17 +11,17 @@ import {
parseGCodeFileHandler,
uploadGCodeFileContentRouteHandler,
getGCodeFileContentRouteHandler,
} from "../../services/production/gcodefiles.js";
} from '../../services/production/gcodefiles.js';
// list of printers
router.get("/", isAuthenticated, (req, res) => {
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = [
"filament.type",
"filament.vendor.name",
"filament.diameter",
"filament.color",
'filament.type',
'filament.vendor.name',
'filament.diameter',
'filament.color',
];
var filter = {};
@ -29,7 +29,7 @@ router.get("/", isAuthenticated, (req, res) => {
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value)
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
@ -39,28 +39,28 @@ router.get("/", isAuthenticated, (req, res) => {
});
// new pritner
router.post("/", isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, (req, res) => {
newGCodeFileRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, (req, res) => {
getGCodeFileRouteHandler(req, res);
});
// update printer info
router.put("/:id", isAuthenticated, async (req, res) => {
router.put('/:id', isAuthenticated, async (req, res) => {
editGCodeFileRouteHandler(req, res);
});
router.post("/:id/content", isAuthenticated, (req, res) => {
router.post('/:id/content', isAuthenticated, (req, res) => {
uploadGCodeFileContentRouteHandler(req, res);
});
router.post("/content", isAuthenticated, (req, res) => {
router.post('/content', isAuthenticated, (req, res) => {
parseGCodeFileHandler(req, res);
});
router.get("/:id/content", isAuthenticated, (req, res) => {
router.get('/:id/content', isAuthenticated, (req, res) => {
getGCodeFileContentRouteHandler(req, res);
});

View File

@ -1,4 +1,4 @@
import mongoose from "mongoose";
import mongoose from 'mongoose';
const materialSchema = new mongoose.Schema({
name: { required: true, type: String },
@ -7,10 +7,10 @@ const materialSchema = new mongoose.Schema({
tags: [{ type: String }],
});
materialSchema.virtual("id").get(function () {
materialSchema.virtual('id').get(function () {
return this._id.toHexString();
});
materialSchema.set("toJSON", { virtuals: true });
materialSchema.set('toJSON', { virtuals: true });
export const materialModel = mongoose.model("Material", materialSchema);
export const materialModel = mongoose.model('Material', materialSchema);

View File

@ -1,15 +1,14 @@
import dotenv from "dotenv";
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
import { filamentModel } from "../../schemas/management/filament.schema.js";
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Filament Stocks");
const logger = log4js.getLogger('Filament Stocks');
logger.level = process.env.LOG_LEVEL;
export const listFilamentStocksRouteHandler = async (
@ -17,10 +16,10 @@ export const listFilamentStocksRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
property = '',
filter = {},
sort = "",
order = "ascend"
sort = '',
order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@ -31,30 +30,30 @@ export const listFilamentStocksRouteHandler = async (
aggregateCommand.push({
$lookup: {
from: "filaments", // The collection name (usually lowercase plural)
localField: "filament", // The field in your current model
foreignField: "_id", // The field in the products collection
as: "filament", // The output field name
from: 'filaments', // The collection name (usually lowercase plural)
localField: 'filament', // The field in your current model
foreignField: '_id', // The field in the products collection
as: 'filament', // The output field name
},
});
aggregateCommand.push({ $unwind: "$filament" });
aggregateCommand.push({ $unwind: '$filament' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@ -67,11 +66,11 @@ export const listFilamentStocksRouteHandler = async (
logger.trace(
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
filamentStock,
filamentStock
);
res.send(filamentStock);
} catch (error) {
logger.error("Error listing filament stocks:", error);
logger.error('Error listing filament stocks:', error);
res.status(500).send({ error: error });
}
};
@ -85,35 +84,37 @@ export const getFilamentStockRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("filament")
.populate('filament')
.populate({
path: 'stockEvents',
populate: [
{
path: 'subJob',
select: 'number'
select: 'number',
},
{
path: 'job',
select: 'startedAt'
}
]
select: 'startedAt',
},
],
});
if (!filamentStock) {
logger.warn(`Filament stock not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...filamentStock._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching filament stock:", error);
logger.error('Error fetching filament stock:', error);
res.status(500).send({ error: error.message });
}
};
@ -128,7 +129,7 @@ export const editFilamentStockRouteHandler = async (req, res) => {
if (!filamentStock) {
// Error handling
logger.warn(`Filament stock not found with supplied id.`);
return res.status(404).send({ error: "Filament stock not found." });
return res.status(404).send({ error: 'Filament stock not found.' });
}
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
@ -144,21 +145,18 @@ export const editFilamentStockRouteHandler = async (req, res) => {
email: req.body.email,
};
const result = await filamentStockModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await filamentStockModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No filament stock updated.");
res.status(500).send({ error: "No filament stocks updated." });
logger.error('No filament stock updated.');
res.status(500).send({ error: 'No filament stocks updated.' });
}
} catch (updateError) {
logger.error("Error updating filament stock:", updateError);
logger.error('Error updating filament stock:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching filament stock:", fetchError);
logger.error('Error fetching filament stock:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -176,12 +174,12 @@ export const newFilamentStockRouteHandler = async (req, res) => {
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
return res.status(404).send({ error: "Filament not found." });
return res.status(404).send({ error: 'Filament not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
} catch (error) {
logger.error("Error fetching filament:", error);
logger.error('Error fetching filament:', error);
return res.status(500).send({ error: error.message });
}
@ -195,22 +193,22 @@ export const newFilamentStockRouteHandler = async (req, res) => {
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
filament: req.body.filament._id,
state: {
type: "unconsumed",
type: 'unconsumed',
percent: 0,
},
};
const result = await filamentStockModel.create(newFilamentStock);
if (result.nCreated === 0) {
logger.error("No filament stock created.");
return res.status(500).send({ error: "No filament stock created." });
logger.error('No filament stock created.');
return res.status(500).send({ error: 'No filament stock created.' });
}
// Create initial stock event
const stockEvent = {
type: "initial",
value: startingNetWeight,
unit: "g",
type: 'initial',
value: startingGrossWeight - filament.emptySpoolWeight,
unit: 'g',
filamentStock: result._id,
createdAt: new Date(),
updatedAt: new Date(),
@ -218,8 +216,8 @@ export const newFilamentStockRouteHandler = async (req, res) => {
const eventResult = await stockEventModel.create(stockEvent);
if (!eventResult) {
logger.error("Failed to create initial stock event.");
return res.status(500).send({ error: "Failed to create initial stock event." });
logger.error('Failed to create initial stock event.');
return res.status(500).send({ error: 'Failed to create initial stock event.' });
}
// Update the filament stock with the stock event reference
@ -228,9 +226,9 @@ export const newFilamentStockRouteHandler = async (req, res) => {
{ $push: { stockEvents: eventResult._id } }
);
return res.send({ status: "ok" });
return res.send({ status: 'ok' });
} catch (updateError) {
logger.error("Error adding filament stock:", updateError);
logger.error('Error adding filament stock:', updateError);
return res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,11 +1,11 @@
import dotenv from "dotenv";
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import dotenv from 'dotenv';
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger("PartStocks");
const logger = log4js.getLogger('PartStocks');
logger.level = process.env.LOG_LEVEL;
export const listPartStocksRouteHandler = async (
@ -13,8 +13,8 @@ export const listPartStocksRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -28,9 +28,9 @@ export const listPartStocksRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@ -44,11 +44,11 @@ export const listPartStocksRouteHandler = async (
logger.trace(
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
partStock,
partStock
);
res.send(partStock);
} catch (error) {
logger.error("Error listing partStocks:", error);
logger.error('Error listing partStocks:', error);
res.status(500).send({ error: error });
}
};
@ -64,13 +64,13 @@ export const getPartStockRouteHandler = async (req, res) => {
if (!partStock) {
logger.warn(`PartStock not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`PartStock with ID: ${id}:`, partStock);
res.send(partStock);
} catch (error) {
logger.error("Error fetching PartStock:", error);
logger.error('Error fetching PartStock:', error);
res.status(500).send({ error: error.message });
}
};
@ -85,7 +85,7 @@ export const editPartStockRouteHandler = async (req, res) => {
if (!partStock) {
// Error handling
logger.warn(`PartStock not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`PartStock with ID: ${id}:`, partStock);
@ -101,21 +101,18 @@ export const editPartStockRouteHandler = async (req, res) => {
email: req.body.email,
};
const result = await partStockModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await partStockModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No PartStock updated.");
res.status(500).send({ error: "No partStocks updated." });
logger.error('No PartStock updated.');
res.status(500).send({ error: 'No partStocks updated.' });
}
} catch (updateError) {
logger.error("Error updating partStock:", updateError);
logger.error('Error updating partStock:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching partStock:", fetchError);
logger.error('Error fetching partStock:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -131,12 +128,12 @@ export const newPartStockRouteHandler = async (req, res) => {
const result = await partStockModel.create(newPartStock);
if (result.nCreated === 0) {
logger.error("No partStock created.");
res.status(500).send({ error: "No partStock created." });
logger.error('No partStock created.');
res.status(500).send({ error: 'No partStock created.' });
}
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error updating partStock:", updateError);
logger.error('Error updating partStock:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,12 +1,12 @@
import dotenv from "dotenv";
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Stock Audits");
const logger = log4js.getLogger('Stock Audits');
logger.level = process.env.LOG_LEVEL;
export const listStockAuditsRouteHandler = async (
@ -14,8 +14,8 @@ export const listStockAuditsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
const skip = (page - 1) * limit;
@ -25,22 +25,22 @@ export const listStockAuditsRouteHandler = async (
// Lookup createdBy user
aggregateCommand.push({
$lookup: {
from: "users",
localField: "createdBy",
foreignField: "_id",
as: "createdBy",
from: 'users',
localField: 'createdBy',
foreignField: '_id',
as: 'createdBy',
},
});
aggregateCommand.push({ $unwind: "$createdBy" });
aggregateCommand.push({ $unwind: '$createdBy' });
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
aggregateCommand.push({ $skip: skip });
@ -50,11 +50,11 @@ export const listStockAuditsRouteHandler = async (
logger.trace(
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
stockAudits,
stockAudits
);
res.send(stockAudits);
} catch (error) {
logger.error("Error listing stock audits:", error);
logger.error('Error listing stock audits:', error);
res.status(500).send({ error: error });
}
};
@ -66,24 +66,26 @@ export const getStockAuditRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("createdBy")
.populate("items.filamentStock")
.populate("items.partStock");
.populate('createdBy')
.populate('items.filamentStock')
.populate('items.partStock');
if (!stockAudit) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...stockAudit._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching stock audit:", error);
logger.error('Error fetching stock audit:', error);
res.status(500).send({ error: error.message });
}
};
@ -92,29 +94,30 @@ export const newStockAuditRouteHandler = async (req, res) => {
try {
const newStockAudit = {
type: req.body.type,
status: req.body.status || "pending",
status: req.body.status || 'pending',
notes: req.body.notes,
items: req.body.items.map(item => ({
items: req.body.items.map((item) => ({
type: item.type,
stock: item.type === "filament"
stock:
item.type === 'filament'
? new mongoose.Types.ObjectId(item.filamentStock)
: new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
notes: item.notes
notes: item.notes,
})),
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
completedAt: req.body.status === "completed" ? new Date() : null
completedAt: req.body.status === 'completed' ? new Date() : null,
};
const result = await stockAuditModel.create(newStockAudit);
if (!result) {
logger.error("No stock audit created.");
return res.status(500).send({ error: "No stock audit created." });
logger.error('No stock audit created.');
return res.status(500).send({ error: 'No stock audit created.' });
}
return res.send({ status: "ok", id: result._id });
return res.send({ status: 'ok', id: result._id });
} catch (error) {
logger.error("Error adding stock audit:", error);
logger.error('Error adding stock audit:', error);
return res.status(500).send({ error: error.message });
}
};
@ -124,33 +127,30 @@ export const updateStockAuditRouteHandler = async (req, res) => {
const id = new mongoose.Types.ObjectId(req.params.id);
const updateData = {
...req.body,
items: req.body.items?.map(item => ({
items: req.body.items?.map((item) => ({
type: item.type,
stock: item.type === "filament"
stock:
item.type === 'filament'
? new mongoose.Types.ObjectId(item.filamentStock)
: new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
notes: item.notes
notes: item.notes,
})),
completedAt: req.body.status === "completed" ? new Date() : null
completedAt: req.body.status === 'completed' ? new Date() : null,
};
const result = await stockAuditModel.findByIdAndUpdate(
id,
{ $set: updateData },
{ new: true }
);
const result = await stockAuditModel.findByIdAndUpdate(id, { $set: updateData }, { new: true });
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Updated stock audit with ID: ${id}:`, result);
res.send(result);
} catch (error) {
logger.error("Error updating stock audit:", error);
logger.error('Error updating stock audit:', error);
res.status(500).send({ error: error.message });
}
};
@ -162,13 +162,13 @@ export const deleteStockAuditRouteHandler = async (req, res) => {
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
return res.status(404).send({ error: "Stock audit not found." });
return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Deleted stock audit with ID: ${id}`);
res.send({ status: "ok" });
res.send({ status: 'ok' });
} catch (error) {
logger.error("Error deleting stock audit:", error);
logger.error('Error deleting stock audit:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,11 +1,10 @@
import dotenv from "dotenv";
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import dotenv from 'dotenv';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger("Stock Events");
const logger = log4js.getLogger('Stock Events');
logger.level = process.env.LOG_LEVEL;
export const listStockEventsRouteHandler = async (
@ -13,10 +12,10 @@ export const listStockEventsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
property = '',
filter = {},
sort = "",
order = "ascend"
sort = '',
order = 'ascend'
) => {
try {
const skip = (page - 1) * limit;
@ -26,22 +25,22 @@ export const listStockEventsRouteHandler = async (
// Lookup filamentStock
aggregateCommand.push({
$lookup: {
from: "filamentstocks",
localField: "filamentStock",
foreignField: "_id",
as: "filamentStock",
from: 'filamentstocks',
localField: 'filamentStock',
foreignField: '_id',
as: 'filamentStock',
},
});
aggregateCommand.push({ $unwind: "$filamentStock" });
aggregateCommand.push({ $unwind: '$filamentStock' });
// Conditionally lookup subJob only if it exists
aggregateCommand.push({
$lookup: {
from: "subjobs",
localField: "subJob",
foreignField: "_id",
as: "subJob",
from: 'subjobs',
localField: 'subJob',
foreignField: '_id',
as: 'subJob',
},
});
@ -49,26 +48,26 @@ export const listStockEventsRouteHandler = async (
$addFields: {
subJob: {
$cond: {
if: { $eq: [{ $size: "$subJob" }, 0] },
if: { $eq: [{ $size: '$subJob' }, 0] },
then: null,
else: { $arrayElemAt: ["$subJob", 0] }
}
}
}
else: { $arrayElemAt: ['$subJob', 0] },
},
},
},
});
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@ -82,11 +81,11 @@ export const listStockEventsRouteHandler = async (
logger.trace(
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
stockEvents,
stockEvents
);
res.send(stockEvents);
} catch (error) {
logger.error("Error listing stock events:", error);
logger.error('Error listing stock events:', error);
res.status(500).send({ error: error });
}
};
@ -98,19 +97,19 @@ export const getStockEventRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("filamentStock")
.populate("subJob")
.populate("job");
.populate('filamentStock')
.populate('subJob')
.populate('job');
if (!stockEvent) {
logger.warn(`Stock event not found with supplied id.`);
return res.status(404).send({ error: "Stock event not found." });
return res.status(404).send({ error: 'Stock event not found.' });
}
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
res.send(stockEvent);
} catch (error) {
logger.error("Error fetching stock event:", error);
logger.error('Error fetching stock event:', error);
res.status(500).send({ error: error.message });
}
};
@ -123,17 +122,17 @@ export const newStockEventRouteHandler = async (req, res) => {
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
timestamp: new Date()
timestamp: new Date(),
};
const result = await stockEventModel.create(newStockEvent);
if (!result) {
logger.error("No stock event created.");
return res.status(500).send({ error: "No stock event created." });
logger.error('No stock event created.');
return res.status(500).send({ error: 'No stock event created.' });
}
return res.send({ status: "ok", id: result._id });
return res.send({ status: 'ok', id: result._id });
} catch (error) {
logger.error("Error adding stock event:", error);
logger.error('Error adding stock event:', error);
return res.status(500).send({ error: error.message });
}
};

View File

@ -1,21 +1,13 @@
import dotenv from "dotenv";
import dotenv from 'dotenv';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import log4js from "log4js";
import mongoose from "mongoose";
import log4js from 'log4js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger("AuditLogs");
const logger = log4js.getLogger('AuditLogs');
logger.level = process.env.LOG_LEVEL;
export const listAuditLogsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = "",
filter = {},
) => {
export const listAuditLogsRouteHandler = async (req, res, page = 1, limit = 25, filter = {}) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@ -26,15 +18,12 @@ export const listAuditLogsRouteHandler = async (
.skip(skip)
.limit(Number(limit))
.sort({ createdAt: -1 })
.populate('owner', 'name _id')
.populate('owner', 'name _id');
logger.trace(
`List of audit logs (Page ${page}, Limit ${limit}):`,
auditLogs,
);
logger.trace(`List of audit logs (Page ${page}, Limit ${limit}):`, auditLogs);
res.send(auditLogs);
} catch (error) {
logger.error("Error listing audit logs:", error);
logger.error('Error listing audit logs:', error);
res.status(500).send({ error: error });
}
};
@ -44,19 +33,23 @@ export const getAuditLogRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the audit log with the given ID
const auditLog = await auditLogModel.findOne({
const auditLog = await auditLogModel
.findOne({
_id: id,
}).populate('printer').populate('owner').populate('target');
})
.populate('printer')
.populate('owner')
.populate('target');
if (!auditLog) {
logger.warn(`Audit log not found with supplied id.`);
return res.status(404).send({ error: "Audit log not found." });
return res.status(404).send({ error: 'Audit log not found.' });
}
logger.trace(`Audit log with ID: ${id}:`, auditLog);
res.send(auditLog);
} catch (error) {
logger.error("Error fetching audit log:", error);
logger.error('Error fetching audit log:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,14 +1,12 @@
import dotenv from "dotenv";
import { filamentModel } from "../../schemas/management/filament.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Filaments");
const logger = log4js.getLogger('Filaments');
logger.level = process.env.LOG_LEVEL;
export const listFilamentsRouteHandler = async (
@ -16,8 +14,8 @@ export const listFilamentsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -28,23 +26,23 @@ export const listFilamentsRouteHandler = async (
aggregateCommand.push({
$lookup: {
from: "vendors", // The collection name (usually lowercase plural)
localField: "vendor", // The field in your current model
foreignField: "_id", // The field in the products collection
as: "vendor", // The output field name
from: 'vendors', // The collection name (usually lowercase plural)
localField: 'vendor', // The field in your current model
foreignField: '_id', // The field in the products collection
as: 'vendor', // The output field name
},
});
aggregateCommand.push({ $unwind: "$vendor" });
aggregateCommand.push({ $unwind: '$vendor' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@ -58,11 +56,11 @@ export const listFilamentsRouteHandler = async (
logger.trace(
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
filament,
filament
);
res.send(filament);
} catch (error) {
logger.error("Error listing filaments:", error);
logger.error('Error listing filaments:', error);
res.status(500).send({ error: error });
}
};
@ -76,22 +74,24 @@ export const getFilamentRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("vendor");
.populate('vendor');
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...filament._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching Filament:", error);
logger.error('Error fetching Filament:', error);
res.status(500).send({ error: error.message });
}
};
@ -106,7 +106,7 @@ export const editFilamentRouteHandler = async (req, res) => {
if (!filament) {
// Error handling
logger.warn(`Filament not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
@ -128,30 +128,20 @@ export const editFilamentRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
filament.toObject(),
updateData,
id,
'Filament',
req.user._id,
'User'
);
await newAuditLog(filament.toObject(), updateData, id, 'Filament', req.user._id, 'User');
const result = await filamentModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await filamentModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No Filament updated.");
return res.status(500).send({ error: "No filaments updated." });
logger.error('No Filament updated.');
return res.status(500).send({ error: 'No filaments updated.' });
}
} catch (updateError) {
logger.error("Error updating filament:", updateError);
logger.error('Error updating filament:', updateError);
return res.status(500).send({ error: updateError.message });
}
return res.send("OK");
return res.send('OK');
} catch (fetchError) {
logger.error("Error fetching filament:", fetchError);
logger.error('Error fetching filament:', fetchError);
return res.status(500).send({ error: fetchError.message });
}
};
@ -177,23 +167,16 @@ export const newFilamentRouteHandler = async (req, res) => {
const result = await filamentModel.create(newFilament);
if (result.nCreated === 0) {
logger.error("No filament created.");
res.status(500).send({ error: "No filament created." });
logger.error('No filament created.');
res.status(500).send({ error: 'No filament created.' });
}
// Create audit log for new filament
await newAuditLog(
{},
newFilament,
result._id,
'Filament',
req.user._id,
'User'
);
await newAuditLog({}, newFilament, result._id, 'Filament', req.user._id, 'User');
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error updating filament:", updateError);
logger.error('Error updating filament:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,12 +1,10 @@
import dotenv from "dotenv";
import { materialModel } from "../../schemas/management/material.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import dotenv from 'dotenv';
import { materialModel } from '../../schemas/management/material.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger("Materials");
const logger = log4js.getLogger('Materials');
logger.level = process.env.LOG_LEVEL;
export const listMaterialsRouteHandler = async (
@ -14,8 +12,8 @@ export const listMaterialsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -29,9 +27,9 @@ export const listMaterialsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@ -45,11 +43,11 @@ export const listMaterialsRouteHandler = async (
logger.trace(
`List of materials (Page ${page}, Limit ${limit}, Property ${property}):`,
material,
material
);
res.send(material);
} catch (error) {
logger.error("Error listing materials:", error);
logger.error('Error listing materials:', error);
res.status(500).send({ error: error });
}
};
@ -65,13 +63,13 @@ export const getMaterialRouteHandler = async (req, res) => {
if (!material) {
logger.warn(`Material not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Material with ID: ${id}:`, material);
res.send(material);
} catch (error) {
logger.error("Error fetching Material:", error);
logger.error('Error fetching Material:', error);
res.status(500).send({ error: error.message });
}
};
@ -86,30 +84,26 @@ export const editMaterialRouteHandler = async (req, res) => {
if (!material) {
// Error handling
logger.warn(`Material not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Material with ID: ${id}:`, material);
try {
const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body;
const updateData = req.body;
const result = await materialModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await materialModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No Material updated.");
res.status(500).send({ error: "No materials updated." });
logger.error('No Material updated.');
res.status(500).send({ error: 'No materials updated.' });
}
} catch (updateError) {
logger.error("Error updating material:", updateError);
logger.error('Error updating material:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching material:", fetchError);
logger.error('Error fetching material:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -125,12 +119,12 @@ export const newMaterialRouteHandler = async (req, res) => {
const result = await materialModel.create(newMaterial);
if (result.nCreated === 0) {
logger.error("No material created.");
res.status(500).send({ error: "No material created." });
logger.error('No material created.');
res.status(500).send({ error: 'No material created.' });
}
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error updating material:", updateError);
logger.error('Error updating material:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,13 +1,13 @@
import dotenv from "dotenv";
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("NoteTypes");
const logger = log4js.getLogger('NoteTypes');
logger.level = process.env.LOG_LEVEL;
export const listNoteTypesRouteHandler = async (
@ -15,8 +15,8 @@ export const listNoteTypesRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
const skip = (page - 1) * limit;
@ -27,26 +27,25 @@ export const listNoteTypesRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand)
console.log(aggregateCommand);
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
logger.trace(
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
noteTypes,
noteTypes
);
res.send(noteTypes);
} catch (error) {
logger.error("Error listing note types:", error);
logger.error('Error listing note types:', error);
res.status(500).send({ error: error });
}
};
@ -60,18 +59,20 @@ export const getNoteTypeRouteHandler = async (req, res) => {
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: "Note type not found." });
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...noteType._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching note type:", error);
logger.error('Error fetching note type:', error);
res.status(500).send({ error: error.message });
}
};
@ -83,7 +84,7 @@ export const editNoteTypeRouteHandler = async (req, res) => {
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: "Note type not found." });
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
@ -97,30 +98,20 @@ export const editNoteTypeRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
noteType.toObject(),
updateData,
id,
'NoteType',
req.user._id,
'User'
);
await newAuditLog(noteType.toObject(), updateData, id, 'NoteType', req.user._id, 'User');
const result = await noteTypeModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No note type updated.");
res.status(500).send({ error: "No note types updated." });
logger.error('No note type updated.');
res.status(500).send({ error: 'No note types updated.' });
}
} catch (updateError) {
logger.error("Error updating note type:", updateError);
logger.error('Error updating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching note type:", fetchError);
logger.error('Error fetching note type:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -132,23 +123,16 @@ export const newNoteTypeRouteHandler = async (req, res) => {
const result = await noteTypeModel.create(newNoteType);
if (result.nCreated === 0) {
logger.error("No note type created.");
res.status(500).send({ error: "No note type created." });
logger.error('No note type created.');
res.status(500).send({ error: 'No note type created.' });
}
// Create audit log for new note type
await newAuditLog(
{},
newNoteType,
result._id,
'NoteType',
req.user._id,
'User'
);
await newAuditLog({}, newNoteType, result._id, 'NoteType', req.user._id, 'User');
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error creating note type:", updateError);
logger.error('Error creating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,16 +1,15 @@
import dotenv from "dotenv";
import { partModel } from "../../schemas/management/part.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import multer from "multer";
import fs from "fs";
import path from "path";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { partModel } from '../../schemas/management/part.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import multer from 'multer';
import fs from 'fs';
import path from 'path';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Parts");
const logger = log4js.getLogger('Parts');
logger.level = process.env.LOG_LEVEL;
// Set storage engine
@ -18,7 +17,7 @@ const partsStorage = multer.diskStorage({
destination: process.env.PART_STORAGE,
filename: async function (req, file, cb) {
// Retrieve custom file name from request body
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
// Create the final filename ensuring it ends with .g
const finalFilename = `${customFileName}.stl`;
@ -34,7 +33,7 @@ const partUpload = multer({
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
},
}).single("partFile"); // The name attribute of the file input in the HTML form
}).single('partFile'); // The name attribute of the file input in the HTML form
// Check file type
function checkFileType(file, cb) {
@ -47,7 +46,7 @@ function checkFileType(file, cb) {
console.log(file);
return cb(null, true);
} else {
cb("Error: .stl files only!");
cb('Error: .stl files only!');
}
}
@ -56,11 +55,11 @@ export const listPartsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
property = '',
filter = {},
search = "",
sort = "",
order = "ascend"
search = '',
sort = '',
order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@ -69,40 +68,51 @@ export const listPartsRouteHandler = async (
let part;
let aggregateCommand = [];
if (search) {
// Add a text search match stage for name and brand fields
aggregateCommand.push({
$match: {
$text: {
$search: search,
},
},
});
}
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
logger.error(property);
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$lookup: {
from: "products", // The collection name (usually lowercase plural)
localField: "product", // The field in your current model
foreignField: "_id", // The field in the products collection
as: "product", // The output field name
from: 'products', // The collection name (usually lowercase plural)
localField: 'product', // The field in your current model
foreignField: '_id', // The field in the products collection
as: 'product', // The output field name
},
});
aggregateCommand.push({ $unwind: "$product" });
aggregateCommand.push({ $unwind: '$product' });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
"product._id": 1,
"product.name": 1,
'product._id': 1,
'product.name': 1,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@ -115,11 +125,11 @@ export const listPartsRouteHandler = async (
logger.trace(
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
part,
part
);
res.send(part);
} catch (error) {
logger.error("Error listing parts:", error);
logger.error('Error listing parts:', error);
res.status(500).send({ error: error });
}
};
@ -133,22 +143,24 @@ export const getPartRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("product");
.populate('product');
if (!part) {
logger.warn(`Part not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}:`, part);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...part._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching Part:", error);
logger.error('Error fetching Part:', error);
res.status(500).send({ error: error.message });
}
};
@ -163,40 +175,29 @@ export const editPartRouteHandler = async (req, res) => {
if (!part) {
// Error handling
logger.warn(`Part not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}:`, part);
try {
const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body;
const updateData = req.body;
// Create audit log before updating
await newAuditLog(
part.toObject(),
updateData,
id,
'Part',
req.user._id,
'User'
);
await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'User');
const result = await partModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await partModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No Part updated.");
res.status(500).send({ error: "No parts updated." });
logger.error('No Part updated.');
res.status(500).send({ error: 'No parts updated.' });
}
} catch (updateError) {
logger.error("Error updating part:", updateError);
logger.error('Error updating part:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching part:", fetchError);
logger.error('Error fetching part:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -215,20 +216,13 @@ export const newPartRouteHandler = async (req, res) => {
const results = await partModel.insertMany(partsToCreate);
if (!results.length) {
logger.error("No parts created.");
return res.status(500).send({ error: "No parts created." });
logger.error('No parts created.');
return res.status(500).send({ error: 'No parts created.' });
}
// Create audit logs for each new part
for (const result of results) {
await newAuditLog(
{},
result.toObject(),
result._id,
'Part',
req.user._id,
'User'
);
await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'User');
}
return res.status(200).send(results);
@ -244,19 +238,12 @@ export const newPartRouteHandler = async (req, res) => {
const result = await partModel.create(newPart);
// Create audit log for new part
await newAuditLog(
{},
newPart,
result._id,
'Part',
req.user._id,
'User'
);
await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'User');
return res.status(200).send(result);
}
} catch (error) {
logger.error("Error creating part(s):", error);
logger.error('Error creating part(s):', error);
return res.status(500).send({ error: error.message });
}
};
@ -270,7 +257,7 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
if (!part) {
// Error handling
logger.warn(`Part not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}`);
try {
@ -282,22 +269,22 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
} else {
if (req.file == undefined) {
res.send({
message: "No file selected!",
message: 'No file selected!',
});
} else {
res.send({
status: "OK",
status: 'OK',
file: `${req.file.filename}`,
});
}
}
});
} catch (updateError) {
logger.error("Error updating part:", updateError);
logger.error('Error updating part:', updateError);
res.status(500).send({ error: updateError.message });
}
} catch (fetchError) {
logger.error("Error fetching part:", fetchError);
logger.error('Error fetching part:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -313,22 +300,22 @@ export const getPartFileContentRouteHandler = async (req, res) => {
if (!part) {
logger.warn(`Part not found with supplied id.`);
return res.status(404).send({ error: "Part not found." });
return res.status(404).send({ error: 'Part not found.' });
}
logger.trace(`Returning part file contents with ID: ${id}:`);
const filePath = path.join(process.env.PART_STORAGE, id + ".stl");
const filePath = path.join(process.env.PART_STORAGE, id + '.stl');
// Read the file
fs.readFile(filePath, "utf8", (err, data) => {
fs.readFile(filePath, 'utf8', (err, data) => {
if (err) {
if (err.code === "ENOENT") {
if (err.code === 'ENOENT') {
// File not found
return res.status(404).send({ error: "File not found!" });
return res.status(404).send({ error: 'File not found!' });
} else {
// Other errors
return res.status(500).send({ error: "Error reading file." });
return res.status(500).send({ error: 'Error reading file.' });
}
}
@ -336,7 +323,7 @@ export const getPartFileContentRouteHandler = async (req, res) => {
res.send(data);
});
} catch (error) {
logger.error("Error fetching Part:", error);
logger.error('Error fetching Part:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,14 +1,13 @@
import dotenv from "dotenv";
import { productModel } from "../../schemas/management/product.schema.js";
import { partModel } from "../../schemas/management/part.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { productModel } from '../../schemas/management/product.schema.js';
import { partModel } from '../../schemas/management/part.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Products");
const logger = log4js.getLogger('Products');
logger.level = process.env.LOG_LEVEL;
export const listProductsRouteHandler = async (
@ -16,8 +15,8 @@ export const listProductsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -31,13 +30,13 @@ export const listProductsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
aggregateCommand.push({
$match: {
$or: [
{ [property]: null },
{ [property]: "" },
{ [property]: '' },
{ [property]: [] },
{ [property]: {} },
{ [property]: { $exists: false } },
@ -56,13 +55,10 @@ export const listProductsRouteHandler = async (
product = await productModel.aggregate(aggregateCommand);
logger.trace(
`List of products (Page ${page}, Limit ${limit}, Property ${property}):`,
product,
);
logger.trace(`List of products (Page ${page}, Limit ${limit}, Property ${property}):`, product);
res.send(product);
} catch (error) {
logger.error("Error listing products:", error);
logger.error('Error listing products:', error);
res.status(500).send({ error: error });
}
};
@ -76,23 +72,25 @@ export const getProductRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("vendor")
.populate("parts");
.populate('vendor')
.populate('parts');
if (!product) {
logger.warn(`Product not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Product with ID: ${id}:`, product);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...product._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching Product:", error);
logger.error('Error fetching Product:', error);
res.status(500).send({ error: error.message });
}
};
@ -109,12 +107,12 @@ export const editProductRouteHandler = async (req, res) => {
if (!product) {
// Error handling
logger.warn(`Product not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Product with ID: ${id}:`, product);
} catch (fetchError) {
logger.error("Error fetching product:", fetchError);
logger.error('Error fetching product:', fetchError);
res.status(500).send({ error: fetchError.message });
}
@ -132,28 +130,18 @@ export const editProductRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
product.toObject(),
updateData,
id,
'Product',
req.user._id,
'User'
);
await newAuditLog(product.toObject(), updateData, id, 'Product', req.user._id, 'User');
const result = await productModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await productModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No Product updated.");
res.status(500).send({ error: "No products updated." });
logger.error('No Product updated.');
res.status(500).send({ error: 'No products updated.' });
}
} catch (updateError) {
logger.error("Error updating product:", updateError);
logger.error('Error updating product:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
};
export const newProductRouteHandler = async (req, res) => {
@ -172,19 +160,12 @@ export const newProductRouteHandler = async (req, res) => {
const newProductResult = await productModel.create(newProduct);
if (newProductResult.nCreated === 0) {
logger.error("No product created.");
res.status(500).send({ error: "No product created." });
logger.error('No product created.');
res.status(500).send({ error: 'No product created.' });
}
// Create audit log for new product
await newAuditLog(
{},
newProduct,
newProductResult._id,
'Product',
req.user._id,
'User'
);
await newAuditLog({}, newProduct, newProductResult._id, 'Product', req.user._id, 'User');
const parts = req.body.parts || [];
const productId = newProductResult._id;
@ -201,35 +182,28 @@ export const newProductRouteHandler = async (req, res) => {
const newPartResult = await partModel.create(newPart);
if (newPartResult.nCreated === 0) {
logger.error("No parts created.");
res.status(500).send({ error: "No parts created." });
logger.error('No parts created.');
res.status(500).send({ error: 'No parts created.' });
}
partIds.push(newPartResult._id);
// Create audit log for each new part
await newAuditLog(
{},
newPart,
newPartResult._id,
'Part',
req.user._id,
'User'
);
await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'User');
}
const editProductResult = await productModel.updateOne(
{ _id: productId },
{ $set: { parts: partIds } },
{ $set: { parts: partIds } }
);
if (editProductResult.nModified === 0) {
logger.error("No product updated.");
res.status(500).send({ error: "No products updated." });
logger.error('No product updated.');
res.status(500).send({ error: 'No products updated.' });
}
res.status(200).send({ ...newProductResult, parts: partIds });
} catch (updateError) {
logger.error("Error updating product:", updateError);
logger.error('Error updating product:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,26 +1,25 @@
import dotenv from "dotenv";
import { jobModel } from "../../schemas/production/job.schema.js";
import { subJobModel } from "../../schemas/production/subjob.schema.js";
import log4js from "log4js";
import { printerModel } from "../../schemas/production/printer.schema.js";
import { filamentModel } from "../../schemas/management/filament.schema.js";
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
import { partModel } from "../../schemas/management/part.schema.js";
import { productModel } from "../../schemas/management/product.schema.js";
import { vendorModel } from "../../schemas/management/vendor.schema.js";
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import { userModel } from "../../schemas/management/user.schema.js";
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
import { noteModel } from "../../schemas/misc/note.schema.js";
import mongoose from "mongoose";
import dotenv from 'dotenv';
import { jobModel } from '../../schemas/production/job.schema.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { printerModel } from '../../schemas/production/printer.schema.js';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
import { partModel } from '../../schemas/management/part.schema.js';
import { productModel } from '../../schemas/management/product.schema.js';
import { vendorModel } from '../../schemas/management/vendor.schema.js';
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { userModel } from '../../schemas/management/user.schema.js';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import { noteModel } from '../../schemas/misc/note.schema.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger("Jobs");
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
// Map prefixes to models and id fields
@ -86,7 +85,7 @@ return {
color: object.color || undefined,
updatedAt: object.updatedAt || undefined,
};
}
};
export const getSpotlightRouteHandler = async (req, res) => {
try {
@ -100,10 +99,10 @@ export const getSpotlightRouteHandler = async (req, res) => {
const delimiter = query.substring(3, 4);
const suffix = query.substring(4);
if (delimiter == ":") {
if (delimiter == ':') {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
if (!prefixEntry || !prefixEntry.model) {
res.status(400).send({ error: "Invalid or unsupported prefix" });
res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model, idField } = prefixEntry;
@ -123,18 +122,18 @@ export const getSpotlightRouteHandler = async (req, res) => {
return;
}
// Build the response with only the required fields
const response = trimSpotlightObject(doc)
const response = trimSpotlightObject(doc);
res.status(200).send(response);
return;
}
console.log(queryParams)
console.log(queryParams);
if (Object.keys(queryParams).length > 0) {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
console.log(prefixEntry)
console.log(prefixEntry);
if (!prefixEntry || !prefixEntry.model) {
res.status(400).send({ error: "Invalid or unsupported prefix" });
res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model } = prefixEntry;
@ -142,7 +141,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
// Use req.query for search parameters
if (Object.keys(queryParams).length === 0) {
res.status(400).send({ error: "No search parameters provided" });
res.status(400).send({ error: 'No search parameters provided' });
return;
}
@ -151,19 +150,16 @@ export const getSpotlightRouteHandler = async (req, res) => {
// Perform search with limit
const limit = parseInt(req.query.limit) || 10;
const docs = await model.find(searchFilter)
.limit(limit)
.sort({ updatedAt: -1 })
.lean();
const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
// Format response
const response = docs.map(doc => (trimSpotlightObject(doc)));
const response = docs.map((doc) => trimSpotlightObject(doc));
res.status(200).send(response);
return;
}
} catch (error) {
logger.error("Error in spotlight lookup:", error);
logger.error('Error in spotlight lookup:', error);
res.status(500).send({ error: error });
}
};

View File

@ -1,14 +1,12 @@
import dotenv from "dotenv";
import { userModel } from "../../schemas/management/user.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { userModel } from '../../schemas/management/user.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Users");
const logger = log4js.getLogger('Users');
logger.level = process.env.LOG_LEVEL;
export const listUsersRouteHandler = async (
@ -16,8 +14,8 @@ export const listUsersRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -31,10 +29,9 @@ export const listUsersRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
}
aggregateCommand.push({ $skip: skip });
@ -44,13 +41,10 @@ export const listUsersRouteHandler = async (
user = await userModel.aggregate(aggregateCommand);
logger.trace(
`List of users (Page ${page}, Limit ${limit}, Property ${property}):`,
user,
);
logger.trace(`List of users (Page ${page}, Limit ${limit}, Property ${property}):`, user);
res.send(user);
} catch (error) {
logger.error("Error listing users:", error);
logger.error('Error listing users:', error);
res.status(500).send({ error: error });
}
};
@ -66,18 +60,20 @@ export const getUserRouteHandler = async (req, res) => {
if (!user) {
logger.warn(`User not found with supplied id.`);
return res.status(404).send({ error: "User not found." });
return res.status(404).send({ error: 'User not found.' });
}
logger.trace(`User with ID: ${id}:`, user);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...user._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching User:", error);
logger.error('Error fetching User:', error);
res.status(500).send({ error: error.message });
}
};
@ -92,7 +88,7 @@ export const editUserRouteHandler = async (req, res) => {
if (!user) {
// Error handling
logger.warn(`User not found with supplied id.`);
return res.status(404).send({ error: "User not found." });
return res.status(404).send({ error: 'User not found.' });
}
logger.trace(`User with ID: ${id}:`, user);
@ -107,33 +103,23 @@ export const editUserRouteHandler = async (req, res) => {
email: req.body.email,
};
console.log(req.user)
console.log(req.user);
// Create audit log before updating
await newAuditLog(
user.toObject(),
updateData,
id,
'User',
req.user._id,
'User'
);
await newAuditLog(user.toObject(), updateData, id, 'User', req.user._id, 'User');
const result = await userModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await userModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No User updated.");
res.status(500).send({ error: "No users updated." });
logger.error('No User updated.');
res.status(500).send({ error: 'No users updated.' });
}
} catch (updateError) {
logger.error("Error updating user:", updateError);
logger.error('Error updating user:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching user:", fetchError);
logger.error('Error fetching user:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};

View File

@ -1,14 +1,12 @@
import dotenv from "dotenv";
import { vendorModel } from "../../schemas/management/vendor.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { vendorModel } from '../../schemas/management/vendor.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Vendors");
const logger = log4js.getLogger('Vendors');
logger.level = process.env.LOG_LEVEL;
export const listVendorsRouteHandler = async (
@ -16,8 +14,8 @@ export const listVendorsRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@ -31,9 +29,9 @@ export const listVendorsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@ -45,13 +43,10 @@ export const listVendorsRouteHandler = async (
vendor = await vendorModel.aggregate(aggregateCommand);
logger.trace(
`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`,
vendor,
);
logger.trace(`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`, vendor);
res.send(vendor);
} catch (error) {
logger.error("Error listing vendors:", error);
logger.error('Error listing vendors:', error);
res.status(500).send({ error: error });
}
};
@ -67,18 +62,20 @@ export const getVendorRouteHandler = async (req, res) => {
if (!vendor) {
logger.warn(`Vendor not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Vendor with ID: ${id}:`, vendor);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...vendor._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching Vendor:", error);
logger.error('Error fetching Vendor:', error);
res.status(500).send({ error: error.message });
}
};
@ -93,7 +90,7 @@ export const editVendorRouteHandler = async (req, res) => {
if (!vendor) {
// Error handling
logger.warn(`Vendor not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Vendor with ID: ${id}:`, vendor);
@ -109,33 +106,23 @@ export const editVendorRouteHandler = async (req, res) => {
email: req.body.email,
};
console.log(req.user)
console.log(req.user);
// Create audit log before updating
await newAuditLog(
vendor.toObject(),
updateData,
id,
'Vendor',
req.user._id,
'User'
);
await newAuditLog(vendor.toObject(), updateData, id, 'Vendor', req.user._id, 'User');
const result = await vendorModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await vendorModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No Vendor updated.");
res.status(500).send({ error: "No vendors updated." });
logger.error('No Vendor updated.');
res.status(500).send({ error: 'No vendors updated.' });
}
} catch (updateError) {
logger.error("Error updating vendor:", updateError);
logger.error('Error updating vendor:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching vendor:", fetchError);
logger.error('Error fetching vendor:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -147,8 +134,8 @@ export const newVendorRouteHandler = async (req, res) => {
const result = await vendorModel.create(newVendor);
if (result.nCreated === 0) {
logger.error("No vendor created.");
res.status(500).send({ error: "No vendor created." });
logger.error('No vendor created.');
res.status(500).send({ error: 'No vendor created.' });
}
// Create audit log for new vendor
@ -161,9 +148,9 @@ export const newVendorRouteHandler = async (req, res) => {
'User'
);
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error updating vendor:", updateError);
logger.error('Error updating vendor:', updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,30 +1,27 @@
import dotenv from "dotenv";
import { keycloak } from "../../keycloak.js";
import log4js from "log4js";
import axios from "axios";
import { userModel } from "../../schemas/management/user.schema.js";
import dotenv from 'dotenv';
import { keycloak } from '../../keycloak.js';
import log4js from 'log4js';
import axios from 'axios';
import { userModel } from '../../schemas/management/user.schema.js';
dotenv.config();
const logger = log4js.getLogger("Auth");
const logger = log4js.getLogger('Auth');
logger.level = process.env.LOG_LEVEL;
// Login handler
export const loginRouteHandler = (req, res) => {
// Get the redirect URL from form data or default to production overview
const redirectUrl = req.query.redirect_uri || "/production/overview";
const redirectUrl = req.query.redirect_uri || '/production/overview';
// Store the original URL to redirect after login
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
const callbackUrl = encodeURIComponent(
`${process.env.APP_URL_API}/auth/callback`,
);
const callbackUrl = encodeURIComponent(`${process.env.APP_URL_API}/auth/callback`);
const state = encodeURIComponent(redirectUrl);
logger.warn(req.query.redirect_uri);
res.redirect(
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`,
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
);
};
@ -66,7 +63,7 @@ const fetchAndStoreUser = async (req, token) => {
return fullUserInfo;
} catch (error) {
logger.error("Error fetching and storing user:", error);
logger.error('Error fetching and storing user:', error);
throw error;
}
};
@ -77,22 +74,22 @@ export const loginCallbackRouteHandler = (req, res) => {
// Extract the code and state from the query parameters
const code = req.query.code;
const state = req.query.state || "/production/overview";
const state = req.query.state || '/production/overview';
if (!code) {
return res.status(400).send("Authorization code missing");
return res.status(400).send('Authorization code missing');
}
// Exchange the code for tokens manually
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const redirectUri = `${process.env.APP_URL_API || "http://localhost:8080"}/auth/callback`;
const redirectUri = `${process.env.APP_URL_API || 'http://localhost:8080'}/auth/callback`;
// Make a POST request to exchange the code for tokens
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: "authorization_code",
grant_type: 'authorization_code',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
code: code,
@ -100,9 +97,9 @@ export const loginCallbackRouteHandler = (req, res) => {
}).toString(),
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
'Content-Type': 'application/x-www-form-urlencoded',
},
}
)
.then(async (response) => {
// Store tokens in session
@ -112,7 +109,7 @@ export const loginCallbackRouteHandler = (req, res) => {
id_token: response.data.id_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
};
req.session["keycloak-token"] = tokenData;
req.session['keycloak-token'] = tokenData;
try {
// Fetch and store user data
@ -120,21 +117,16 @@ export const loginCallbackRouteHandler = (req, res) => {
// Save session and redirect to the original URL
req.session.save(() => {
res.redirect(
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
);
res.redirect((process.env.APP_URL_CLIENT || 'http://localhost:3000') + state);
});
} catch (error) {
logger.error("Error during user setup:", error);
res.status(500).send("Error setting up user session");
logger.error('Error during user setup:', error);
res.status(500).send('Error setting up user session');
}
})
.catch((error) => {
console.error(
"Token exchange error:",
error.response?.data || error.message,
);
res.status(500).send("Authentication failed");
console.error('Token exchange error:', error.response?.data || error.message);
res.status(500).send('Authentication failed');
});
};
@ -161,13 +153,10 @@ const createOrUpdateUser = async (userInfo) => {
name,
firstName,
lastName,
updatedAt: new Date()
updatedAt: new Date(),
};
await userModel.updateOne(
{ username },
{ $set: updateData }
);
await userModel.updateOne({ username }, { $set: updateData });
// Fetch the updated user to return
return await userModel.findOne({ username });
@ -181,14 +170,14 @@ const createOrUpdateUser = async (userInfo) => {
email,
name,
firstName,
lastName
lastName,
});
await newUser.save();
return newUser;
}
} catch (error) {
logger.error("Error creating/updating user:", error);
logger.error('Error creating/updating user:', error);
throw error;
}
};
@ -197,31 +186,29 @@ export const userRouteHandler = (req, res) => {
if (req.session && req.session.user) {
res.json(req.session.user);
} else {
res.status(401).json({ error: "Not authenticated" });
res.status(401).json({ error: 'Not authenticated' });
}
};
// Logout handler
export const logoutRouteHandler = (req, res) => {
// Get the redirect URL from query or default to login page
const redirectUrl = req.query.redirect_uri || "/login";
const redirectUrl = req.query.redirect_uri || '/login';
// Destroy the session
req.session.destroy((err) => {
if (err) {
logger.error("Error destroying session:", err);
return res.status(500).json({ error: "Failed to logout" });
logger.error('Error destroying session:', err);
return res.status(500).json({ error: 'Failed to logout' });
}
// Construct the Keycloak logout URL with the redirect URI
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(
`${process.env.APP_URL_CLIENT}${redirectUrl}`,
);
const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
// Redirect to Keycloak logout with the redirect URI
res.redirect(
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`,
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
);
});
};
@ -245,36 +232,32 @@ export const getUserInfoHandler = (req, res) => {
email: token.content.email,
name:
token.content.name ||
`${token.content.given_name || ""} ${token.content.family_name || ""}`.trim(),
`${token.content.given_name || ''} ${token.content.family_name || ''}`.trim(),
roles: token.content.realm_access?.roles || [],
};
return res.json(userInfo);
}
return res.status(401).json({ error: "Not authenticated" });
return res.status(401).json({ error: 'Not authenticated' });
};
// Register route - Since we're using Keycloak, registration should be handled there
// This endpoint will redirect to Keycloak's registration page
export const registerRouteHandler = (req, res) => {
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(
process.env.APP_URL_CLIENT + "/auth/login",
);
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
res.redirect(
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
);
};
// Forgot password handler - redirect to Keycloak's reset password page
export const forgotPasswordRouteHandler = (req, res) => {
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(
process.env.APP_URL_CLIENT + "/auth/login",
);
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
res.redirect(
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
);
};
@ -282,34 +265,34 @@ export const forgotPasswordRouteHandler = (req, res) => {
export const refreshTokenRouteHandler = (req, res) => {
if (
!req.session ||
!req.session["keycloak-token"] ||
!req.session["keycloak-token"].refresh_token
!req.session['keycloak-token'] ||
!req.session['keycloak-token'].refresh_token
) {
return res.status(401).json({ error: "No refresh token available" });
return res.status(401).json({ error: 'No refresh token available' });
}
const refreshToken = req.session["keycloak-token"].refresh_token;
const refreshToken = req.session['keycloak-token'].refresh_token;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: "refresh_token",
grant_type: 'refresh_token',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
refresh_token: refreshToken,
}).toString(),
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
'Content-Type': 'application/x-www-form-urlencoded',
},
}
)
.then((response) => {
// Update session with new tokens
req.session["keycloak-token"] = {
...req.session["keycloak-token"],
req.session['keycloak-token'] = {
...req.session['keycloak-token'],
access_token: response.data.access_token,
refresh_token: response.data.refresh_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
@ -319,22 +302,19 @@ export const refreshTokenRouteHandler = (req, res) => {
req.session.save(() => {
res.json({
access_token: response.data.access_token,
expires_at: req.session["keycloak-token"].expires_at,
expires_at: req.session['keycloak-token'].expires_at,
});
});
})
.catch((error) => {
logger.error(
"Token refresh error:",
error.response?.data || error.message,
);
logger.error('Token refresh error:', error.response?.data || error.message);
// If refresh token is invalid, clear the session
if (error.response?.status === 400) {
req.session.destroy();
}
res.status(500).json({ error: "Failed to refresh token" });
res.status(500).json({ error: 'Failed to refresh token' });
});
};

View File

@ -1,13 +1,12 @@
import dotenv from "dotenv";
import { noteModel } from "../../schemas/misc/note.schema.js";
import log4js from "log4js";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { noteModel } from '../../schemas/misc/note.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Notes");
const logger = log4js.getLogger('Notes');
logger.level = process.env.LOG_LEVEL;
export const listNotesRouteHandler = async (
@ -15,8 +14,8 @@ export const listNotesRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
filter = {},
property = '',
filter = {}
) => {
try {
const skip = (page - 1) * limit;
@ -29,35 +28,35 @@ export const listNotesRouteHandler = async (
aggregateCommand.push({
$lookup: {
from: "users", // The collection name (usually lowercase plural)
localField: "user", // The field in your current model
foreignField: "_id", // The field in the users collection
as: "user", // The output field name
from: 'users', // The collection name (usually lowercase plural)
localField: 'user', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'user', // The output field name
},
});
aggregateCommand.push({ $unwind: "$user" });
aggregateCommand.push({ $unwind: '$user' });
aggregateCommand.push({
$lookup: {
from: "notetypes", // The collection name (usually lowercase plural)
localField: "noteType", // The field in your current model
foreignField: "_id", // The field in the users collection
as: "noteType", // The output field name
from: 'notetypes', // The collection name (usually lowercase plural)
localField: 'noteType', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'noteType', // The output field name
},
});
aggregateCommand.push({ $unwind: "$noteType" });
aggregateCommand.push({ $unwind: '$noteType' });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
"noteType._id": 1,
"noteType.name": 1,
"noteType.color": 1,
"user._id": 1,
"user.name": 1,
'noteType._id': 1,
'noteType.name': 1,
'noteType.color': 1,
'user._id': 1,
'user.name': 1,
content: 1,
parent: 1
parent: 1,
},
});
@ -66,14 +65,10 @@ export const listNotesRouteHandler = async (
notes = await noteModel.aggregate(aggregateCommand);
logger.trace(
`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`,
notes,
);
logger.trace(`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`, notes);
res.send(notes);
} catch (error) {
logger.error("Error listing notes:", error);
logger.error('Error listing notes:', error);
res.status(500).send({ error: error });
}
};
@ -87,18 +82,20 @@ export const getNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...note._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching note:", error);
logger.error('Error fetching note:', error);
res.status(500).send({ error: error.message });
}
};
@ -110,7 +107,7 @@ export const editNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note);
@ -124,30 +121,20 @@ export const editNoteRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
note.toObject(),
updateData,
id,
'Note',
req.user._id,
'User'
);
await newAuditLog(note.toObject(), updateData, id, 'Note', req.user._id, 'User');
const result = await noteModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No note updated.");
res.status(500).send({ error: "No notes updated." });
logger.error('No note updated.');
res.status(500).send({ error: 'No notes updated.' });
}
} catch (updateError) {
logger.error("Error updating note:", updateError);
logger.error('Error updating note:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching note:", fetchError);
logger.error('Error fetching note:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -159,23 +146,16 @@ export const newNoteRouteHandler = async (req, res) => {
const result = await noteModel.create(newNote);
if (result.nCreated === 0) {
logger.error("No note created.");
res.status(500).send({ error: "No note created." });
logger.error('No note created.');
res.status(500).send({ error: 'No note created.' });
}
// Create audit log for new note
await newAuditLog(
{},
newNote,
result._id,
'Note',
req.user._id,
'User'
);
await newAuditLog({}, newNote, result._id, 'Note', req.user._id, 'User');
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error creating note:", updateError);
logger.error('Error creating note:', updateError);
res.status(500).send({ error: updateError.message });
}
};
@ -187,13 +167,13 @@ export const deleteNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: "Note not found." });
return res.status(404).send({ error: 'Note not found.' });
}
// Check if the current user owns this note
if (note.user.toString() !== req.user._id.toString()) {
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
return res.status(403).send({ error: "You can only delete your own notes." });
return res.status(403).send({ error: 'You can only delete your own notes.' });
}
logger.trace(`Deleting note with ID: ${id} and all its children`);
@ -202,25 +182,16 @@ export const deleteNoteRouteHandler = async (req, res) => {
const deletedNoteIds = await recursivelyDeleteNotes(id);
// Create audit log for the deletion
await newAuditLog(
note.toObject(),
{},
id,
'Note',
req.user._id,
'User',
'DELETE'
);
await newAuditLog(note.toObject(), {}, id, 'Note', req.user._id, 'User', 'DELETE');
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
res.send({
status: "ok",
status: 'ok',
deletedNoteIds: deletedNoteIds,
message: `Deleted ${deletedNoteIds.length} notes`
message: `Deleted ${deletedNoteIds.length} notes`,
});
} catch (error) {
logger.error("Error deleting note:", error);
logger.error('Error deleting note:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,26 +1,25 @@
import dotenv from "dotenv";
import log4js from "log4js";
import mongoose from "mongoose";
import { jobModel } from "../../schemas/production/job.schema.js";
import { subJobModel } from "../../schemas/production/subjob.schema.js";
import { printerModel } from "../../schemas/production/printer.schema.js";
import { filamentModel } from "../../schemas/management/filament.schema.js";
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
import { partModel } from "../../schemas/management/part.schema.js";
import { productModel } from "../../schemas/management/product.schema.js";
import { vendorModel } from "../../schemas/management/vendor.schema.js";
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import { userModel } from "../../schemas/management/user.schema.js";
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
import { noteModel } from "../../schemas/misc/note.schema.js";
import dotenv from 'dotenv';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { jobModel } from '../../schemas/production/job.schema.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import { printerModel } from '../../schemas/production/printer.schema.js';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
import { partModel } from '../../schemas/management/part.schema.js';
import { productModel } from '../../schemas/management/product.schema.js';
import { vendorModel } from '../../schemas/management/vendor.schema.js';
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { userModel } from '../../schemas/management/user.schema.js';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import { noteModel } from '../../schemas/misc/note.schema.js';
dotenv.config();
const logger = log4js.getLogger("Jobs");
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
// Map prefixes to models and id fields
@ -86,7 +85,7 @@ return {
color: object.color || undefined,
updatedAt: object.updatedAt || undefined,
};
}
};
export const getSpotlightRouteHandler = async (req, res) => {
try {
@ -100,10 +99,10 @@ export const getSpotlightRouteHandler = async (req, res) => {
const delimiter = query.substring(3, 4);
const suffix = query.substring(4);
if (delimiter == ":") {
if (delimiter == ':') {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
if (!prefixEntry || !prefixEntry.model) {
res.status(400).send({ error: "Invalid or unsupported prefix" });
res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model, idField } = prefixEntry;
@ -123,18 +122,18 @@ export const getSpotlightRouteHandler = async (req, res) => {
return;
}
// Build the response with only the required fields
const response = trimSpotlightObject(doc)
const response = trimSpotlightObject(doc);
res.status(200).send(response);
return;
}
console.log(queryParams)
console.log(queryParams);
if (Object.keys(queryParams).length > 0) {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
console.log(prefixEntry)
console.log(prefixEntry);
if (!prefixEntry || !prefixEntry.model) {
res.status(400).send({ error: "Invalid or unsupported prefix" });
res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model } = prefixEntry;
@ -142,7 +141,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
// Use req.query for search parameters
if (Object.keys(queryParams).length === 0) {
res.status(400).send({ error: "No search parameters provided" });
res.status(400).send({ error: 'No search parameters provided' });
return;
}
@ -151,19 +150,16 @@ export const getSpotlightRouteHandler = async (req, res) => {
// Perform search with limit
const limit = parseInt(req.query.limit) || 10;
const docs = await model.find(searchFilter)
.limit(limit)
.sort({ updatedAt: -1 })
.lean();
const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
// Format response
const response = docs.map(doc => (trimSpotlightObject(doc)));
const response = docs.map((doc) => trimSpotlightObject(doc));
res.status(200).send(response);
return;
}
} catch (error) {
logger.error("Error in spotlight lookup:", error);
logger.error('Error in spotlight lookup:', error);
res.status(500).send({ error: error });
}
};

View File

@ -1,21 +1,18 @@
import dotenv from "dotenv";
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
import { filamentModel } from "../../schemas/management/filament.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import multer from "multer";
import crypto from "crypto";
import path from "path";
import fs from "fs";
import mongoose from "mongoose";
import { newAuditLog } from "../../util/index.js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import { extractConfigBlock } from "../../util/index.js";
import dotenv from 'dotenv';
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import log4js from 'log4js';
import multer from 'multer';
import path from 'path';
import fs from 'fs';
import mongoose from 'mongoose';
import { newAuditLog } from '../../utils.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { extractConfigBlock } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger("GCodeFiles");
const logger = log4js.getLogger('GCodeFiles');
logger.level = process.env.LOG_LEVEL;
// Set storage engine
@ -23,7 +20,7 @@ const gcodeStorage = multer.diskStorage({
destination: process.env.GCODE_STORAGE,
filename: async function (req, file, cb) {
// Retrieve custom file name from request body
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
// Create the final filename ensuring it ends with .gcode
const finalFilename = `${customFileName}.gcode`;
@ -40,7 +37,7 @@ const gcodeUpload = multer({
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
},
}).single("gcodeFile"); // The name attribute of the file input in the HTML form
}).single('gcodeFile'); // The name attribute of the file input in the HTML form
// Check file type
function checkFileType(file, cb) {
@ -53,7 +50,7 @@ function checkFileType(file, cb) {
console.log(file);
return cb(null, true);
} else {
cb("Error: .g, .gco, and .gcode files only!");
cb('Error: .g, .gco, and .gcode files only!');
}
}
@ -62,11 +59,11 @@ export const listGCodeFilesRouteHandler = async (
res,
page = 1,
limit = 25,
property = "",
property = '',
filter = {},
search = "",
sort = "",
order = "ascend"
search = '',
sort = '',
order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@ -88,60 +85,60 @@ export const listGCodeFilesRouteHandler = async (
aggregateCommand.push({
$lookup: {
from: "filaments", // The name of the Filament collection
localField: "filament",
foreignField: "_id",
as: "filament",
from: 'filaments', // The name of the Filament collection
localField: 'filament',
foreignField: '_id',
as: 'filament',
},
});
aggregateCommand.push({
$unwind: {
path: "$filament",
path: '$filament',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
aggregateCommand.push({
$addFields: {
filament: "$filament",
filament: '$filament',
},
});
aggregateCommand.push({
$lookup: {
from: "vendors", // The collection name (usually lowercase plural)
localField: "filament.vendor", // The field in your current model
foreignField: "_id", // The field in the products collection
as: "filament.vendor", // The output field name
from: 'vendors', // The collection name (usually lowercase plural)
localField: 'filament.vendor', // The field in your current model
foreignField: '_id', // The field in the products collection
as: 'filament.vendor', // The output field name
},
});
aggregateCommand.push({ $unwind: "$filament.vendor" });
aggregateCommand.push({ $unwind: '$filament.vendor' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$project: {
"filament.gcodeFileInfo.estimatedPrintingTimeNormalMode": 0,
'filament.gcodeFileInfo.estimatedPrintingTimeNormalMode': 0,
url: 0,
"filament.image": 0,
"filament.createdAt": 0,
"filament.updatedAt": 0,
'filament.image': 0,
'filament.createdAt': 0,
'filament.updatedAt': 0,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === "descend" ? -1 : 1;
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@ -154,11 +151,11 @@ export const listGCodeFilesRouteHandler = async (
logger.trace(
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
gcodeFile,
gcodeFile
);
res.send(gcodeFile);
} catch (error) {
logger.error("Error listing gcode files:", error);
logger.error('Error listing gcode files:', error);
res.status(500).send({ error: error });
}
};
@ -174,25 +171,22 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Returning GCode File contents with ID: ${id}:`);
const filePath = path.join(
process.env.GCODE_STORAGE,
gcodeFile.gcodeFileName,
);
const filePath = path.join(process.env.GCODE_STORAGE, gcodeFile.gcodeFileName);
// Read the file
fs.readFile(filePath, "utf8", (err, data) => {
fs.readFile(filePath, 'utf8', (err, data) => {
if (err) {
if (err.code === "ENOENT") {
if (err.code === 'ENOENT') {
// File not found
return res.status(404).send({ error: "File not found!" });
return res.status(404).send({ error: 'File not found!' });
} else {
// Other errors
return res.status(500).send({ error: "Error reading file." });
return res.status(500).send({ error: 'Error reading file.' });
}
}
@ -200,7 +194,7 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
res.send(data);
});
} catch (error) {
logger.error("Error fetching GCodeFile:", error);
logger.error('Error fetching GCodeFile:', error);
res.status(500).send({ error: error.message });
}
};
@ -215,7 +209,7 @@ export const editGCodeFileRouteHandler = async (req, res) => {
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
@ -228,30 +222,20 @@ export const editGCodeFileRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
gcodeFile.toObject(),
updateData,
id,
'GCodeFile',
req.user._id,
'User'
);
await newAuditLog(gcodeFile.toObject(), updateData, id, 'GCodeFile', req.user._id, 'User');
const result = await gcodeFileModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await gcodeFileModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No gcodeFile updated.");
res.status(500).send({ error: "No gcodeFiles updated." });
logger.error('No gcodeFile updated.');
res.status(500).send({ error: 'No gcodeFiles updated.' });
}
} catch (updateError) {
logger.error("Error updating gcodeFile:", updateError);
logger.error('Error updating gcodeFile:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching gcodeFile:", fetchError);
logger.error('Error fetching gcodeFile:', fetchError);
//res.status(500).send({ error: fetchError.message });
}
};
@ -269,11 +253,11 @@ export const newGCodeFileRouteHandler = async (req, res) => {
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
return res.status(404).send({ error: "Filament not found." });
return res.status(404).send({ error: 'Filament not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
} catch (error) {
logger.error("Error fetching filament:", error);
logger.error('Error fetching filament:', error);
return res.status(500).send({ error: error.message });
}
@ -289,23 +273,16 @@ export const newGCodeFileRouteHandler = async (req, res) => {
const result = await gcodeFileModel.create(newGCodeFile);
if (result.nCreated === 0) {
logger.error("No gcode file created.");
res.status(500).send({ error: "No gcode file created." });
logger.error('No gcode file created.');
res.status(500).send({ error: 'No gcode file created.' });
}
// Create audit log for new gcodefile
await newAuditLog(
{},
newGCodeFile,
result._id,
'GCodeFile',
req.user._id,
'User'
);
await newAuditLog({}, newGCodeFile, result._id, 'GCodeFile', req.user._id, 'User');
res.status(200).send({ status: "ok" });
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error("Error creating gcode file:", updateError);
logger.error('Error creating gcode file:', updateError);
res.status(500).send({ error: updateError.message });
}
};
@ -322,7 +299,7 @@ export const parseGCodeFileHandler = async (req, res) => {
if (req.file == undefined) {
return res.send({
message: "No file selected!",
message: 'No file selected!',
});
}
@ -331,7 +308,7 @@ export const parseGCodeFileHandler = async (req, res) => {
const filePath = path.join(req.file.destination, req.file.filename);
// Read the file content
const fileContent = fs.readFileSync(filePath, "utf8");
const fileContent = fs.readFileSync(filePath, 'utf8');
// Extract the config block
const configInfo = extractConfigBlock(fileContent);
@ -342,12 +319,12 @@ export const parseGCodeFileHandler = async (req, res) => {
// Optionally clean up the file after processing if it's not needed
fs.unlinkSync(filePath);
} catch (parseError) {
logger.error("Error parsing GCode file:", parseError);
logger.error('Error parsing GCode file:', parseError);
res.status(500).send({ error: parseError.message });
}
});
} catch (error) {
logger.error("Error in parseGCodeFileHandler:", error);
logger.error('Error in parseGCodeFileHandler:', error);
res.status(500).send({ error: error.message });
}
};
@ -361,7 +338,7 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}`);
try {
@ -373,15 +350,9 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
} else {
if (req.file == undefined) {
res.send({
message: "No file selected!",
message: 'No file selected!',
});
} else {
// Get the path to the uploaded file
const filePath = path.join(req.file.destination, req.file.filename);
// Read the file content
const fileContent = fs.readFileSync(filePath, "utf8");
// Update the gcodeFile document with the filename and the extracted config
const result = await gcodeFileModel.updateOne(
{ _id: id },
@ -389,27 +360,27 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
$set: {
gcodeFileName: req.file.filename,
},
},
}
);
if (result.nModified === 0) {
logger.error("No gcodeFile updated.");
res.status(500).send({ error: "No gcodeFiles updated." });
logger.error('No gcodeFile updated.');
res.status(500).send({ error: 'No gcodeFiles updated.' });
}
res.send({
status: "OK",
status: 'OK',
file: `${req.file.filename}`,
});
}
}
});
} catch (updateError) {
logger.error("Error updating gcodeFile:", updateError);
logger.error('Error updating gcodeFile:', updateError);
res.status(500).send({ error: updateError.message });
}
} catch (fetchError) {
logger.error("Error fetching gcodeFile:", fetchError);
logger.error('Error fetching gcodeFile:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@ -423,22 +394,24 @@ export const getGCodeFileRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("filament");
.populate('filament');
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
const auditLogs = await auditLogModel.find({
target: id
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: id,
})
.populate('owner');
res.send({ ...gcodeFile._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching GCodeFile:", error);
logger.error('Error fetching GCodeFile:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,23 +1,16 @@
import dotenv from "dotenv";
import mongoose from "mongoose";
import { jobModel } from "../../schemas/production/job.schema.js";
import { subJobModel } from "../../schemas/production/subjob.schema.js";
import { noteModel } from "../../schemas/misc/note.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import mongoose from 'mongoose';
import { jobModel } from '../../schemas/production/job.schema.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Jobs");
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
export const listJobsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
) => {
export const listJobsRouteHandler = async (req, res, page = 1, limit = 25) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@ -28,13 +21,13 @@ export const listJobsRouteHandler = async (
.sort({ createdAt: -1 })
.skip(skip)
.limit(limit)
.populate("subJobs", "state")
.populate("gcodeFile", "name");
.populate('subJobs', 'state')
.populate('gcodeFile', 'name');
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
res.send(jobs);
} catch (error) {
logger.error("Error listing print jobs:", error);
logger.error('Error listing print jobs:', error);
res.status(500).send({ error: error });
}
};
@ -48,26 +41,28 @@ export const getJobRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
.populate("printers", "name state")
.populate("gcodeFile")
.populate("subJobs")
.populate("notes");
.populate('printers', 'name state')
.populate('gcodeFile')
.populate('subJobs')
.populate('notes');
if (!job) {
logger.warn(`Job not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Job with ID: ${id}:`, job);
const targetIds = [id, ...job.subJobs.map(subJob => subJob._id)];
const auditLogs = await auditLogModel.find({
target: { $in: targetIds.map(id => new mongoose.Types.ObjectId(id)) }
}).populate('owner');
const targetIds = [id, ...job.subJobs.map((subJob) => subJob._id)];
const auditLogs = await auditLogModel
.find({
target: { $in: targetIds.map((id) => new mongoose.Types.ObjectId(id)) },
})
.populate('owner');
res.send({ ...job._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching job:", error);
logger.error('Error fetching job:', error);
res.status(500).send({ error: error.message });
}
};
@ -82,27 +77,23 @@ export const editJobRouteHandler = async (req, res) => {
if (!job) {
logger.warn(`Job not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Job with ID: ${id}:`, job);
const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body;
const updateData = req.body;
const result = await jobModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await jobModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.warn("No jobs updated.");
return res.status(400).send({ error: "No jobs updated." });
logger.warn('No jobs updated.');
return res.status(400).send({ error: 'No jobs updated.' });
}
res.send({ message: "Print job updated successfully" });
res.send({ message: 'Print job updated successfully' });
} catch (error) {
logger.error("Error updating job:", error);
logger.error('Error updating job:', error);
res.status(500).send({ error: error.message });
}
};
@ -112,9 +103,7 @@ export const createJobRouteHandler = async (req, res) => {
const { gcodeFile, printers, quantity = 1 } = req.body;
if (!printers || printers.length === 0) {
return res
.status(400)
.send({ error: "At least one printer must be specified" });
return res.status(400).send({ error: 'At least one printer must be specified' });
}
// Convert printer IDs to ObjectIds
@ -122,14 +111,14 @@ export const createJobRouteHandler = async (req, res) => {
// Create new print job
const newJob = new jobModel({
state: { type: "draft" },
state: { type: 'draft' },
printers: printerIds,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
quantity,
subJobs: [], // Initialize empty array for subjob references
createdAt: new Date(),
updatedAt: new Date(),
startedAt: null
startedAt: null,
});
// Save the print job first to get its ID
@ -143,25 +132,23 @@ export const createJobRouteHandler = async (req, res) => {
job: savedJob._id,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
subJobId: `subjob-${index + 1}`,
state: { type: "draft" },
state: { type: 'draft' },
number: index + 1,
createdAt: new Date(),
updatedAt: new Date(),
});
return subJob.save();
}),
})
);
// Update the print job with the subjob references
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
await savedJob.save();
logger.trace(
`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`,
);
logger.trace(`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`);
res.status(201).send({ job: savedJob, subJobs });
} catch (error) {
logger.error("Error creating print job:", error);
logger.error('Error creating print job:', error);
res.status(500).send({ error: error.message });
}
};
@ -171,10 +158,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
const stats = await jobModel.aggregate([
{
$group: {
_id: "$state.type",
count: { $sum: 1 }
}
}
_id: '$state.type',
count: { $sum: 1 },
},
},
]);
// Transform the results into a more readable format
@ -183,11 +170,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
return acc;
}, {});
logger.trace("Print job stats by state:", formattedStats);
logger.trace('Print job stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
logger.error("Error fetching print job stats:", error);
logger.error('Error fetching print job stats:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,21 +1,16 @@
import dotenv from "dotenv";
import { printerModel } from "../../schemas/production/printer.schema.js";
import log4js from "log4js";
import { newAuditLog } from "../../util/index.js";
import mongoose from "mongoose";
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
import dotenv from 'dotenv';
import { printerModel } from '../../schemas/production/printer.schema.js';
import log4js from 'log4js';
import { newAuditLog } from '../../utils.js';
import mongoose from 'mongoose';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger("Printers");
const logger = log4js.getLogger('Printers');
logger.level = process.env.LOG_LEVEL;
export const listPrintersRouteHandler = async (
req,
res,
page = 1,
limit = 25,
) => {
export const listPrintersRouteHandler = async (req, res, page = 1, limit = 25) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@ -26,7 +21,7 @@ export const listPrintersRouteHandler = async (
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
res.send(printers);
} catch (error) {
logger.error("Error listing users:", error);
logger.error('Error listing users:', error);
res.status(500).send({ error: error });
}
};
@ -38,41 +33,45 @@ export const getPrinterRouteHandler = async (req, res) => {
// Fetch the printer with the given remote address
const printer = await printerModel
.findOne({ _id: id })
.populate("subJobs")
.populate("currentJob")
.populate('subJobs')
.populate('currentJob')
.populate({
path: "currentJob",
path: 'currentJob',
populate: {
path: "gcodeFile",
path: 'gcodeFile',
},
})
.populate("currentSubJob")
.populate('currentSubJob')
.populate({
path: "subJobs",
path: 'subJobs',
populate: {
path: "job",
path: 'job',
},
})
.populate("vendor")
.populate({ path: "currentFilamentStock",
.populate('vendor')
.populate({
path: 'currentFilamentStock',
populate: {
path: "filament",
},})
path: 'filament',
},
});
if (!printer) {
logger.warn(`Printer with id ${id} not found.`);
return res.status(404).send({ error: "Printer not found" });
return res.status(404).send({ error: 'Printer not found' });
}
logger.trace(`Printer with id ${id}:`, printer);
const auditLogs = await auditLogModel.find({
target: new mongoose.Types.ObjectId(id)
}).populate('owner');
const auditLogs = await auditLogModel
.find({
target: new mongoose.Types.ObjectId(id),
})
.populate('owner');
res.send({ ...printer._doc, auditLogs: auditLogs });
} catch (error) {
logger.error("Error fetching printer:", error);
logger.error('Error fetching printer:', error);
res.status(500).send({ error: error.message });
}
};
@ -85,7 +84,7 @@ export const editPrinterRouteHandler = async (req, res) => {
if (!printer) {
logger.warn(`Printer not found with supplied id.`);
return res.status(404).send({ error: "Printer not found." });
return res.status(404).send({ error: 'Printer not found.' });
}
try {
@ -98,55 +97,41 @@ export const editPrinterRouteHandler = async (req, res) => {
};
// Create audit log before updating
await newAuditLog(
printer.toObject(),
updateData,
id,
'Printer',
req.user._id,
'User'
);
await newAuditLog(printer.toObject(), updateData, id, 'Printer', req.user._id, 'User');
const result = await printerModel.updateOne(
{ _id: id },
{ $set: updateData },
);
const result = await printerModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error("No printers updated.");
res.status(500).send({ error: "No printers updated." });
logger.error('No printers updated.');
res.status(500).send({ error: 'No printers updated.' });
}
} catch (updateError) {
logger.error("Error updating printer:", updateError);
logger.error('Error updating printer:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
res.send('OK');
} catch (fetchError) {
logger.error("Error fetching printer:", fetchError);
logger.error('Error fetching printer:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const createPrinterRouteHandler = async (req, res) => {
try {
const { name, moonraker, tags = [], firmware = "n/a" } = req.body;
const { name, moonraker, tags = [], firmware = 'n/a' } = req.body;
// Validate required fields
if (!name || !moonraker) {
logger.warn("Missing required fields in printer creation request");
logger.warn('Missing required fields in printer creation request');
return res.status(400).send({
error:
"Missing required fields. name and moonraker configuration are required.",
error: 'Missing required fields. name and moonraker configuration are required.',
});
}
// Validate moonraker configuration
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
logger.warn(
"Invalid moonraker configuration in printer creation request",
);
logger.warn('Invalid moonraker configuration in printer creation request');
return res.status(400).send({
error:
"Invalid moonraker configuration. host, port, protocol are required.",
error: 'Invalid moonraker configuration. host, port, protocol are required.',
});
}
@ -158,7 +143,7 @@ export const createPrinterRouteHandler = async (req, res) => {
firmware,
online: false,
state: {
type: "offline",
type: 'offline',
},
});
@ -166,19 +151,12 @@ export const createPrinterRouteHandler = async (req, res) => {
const savedPrinter = await newPrinter.save();
// Create audit log for new printer
await newAuditLog(
{},
newPrinter.toObject(),
savedPrinter._id,
'Printer',
req.user._id,
'User'
);
await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'Printer', req.user._id, 'User');
logger.info(`Created new printer: ${name}`);
res.status(201).send(savedPrinter);
} catch (error) {
logger.error("Error creating printer:", error);
logger.error('Error creating printer:', error);
res.status(500).send({ error: error.message });
}
};
@ -188,10 +166,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
const stats = await printerModel.aggregate([
{
$group: {
_id: "$state.type",
count: { $sum: 1 }
}
}
_id: '$state.type',
count: { $sum: 1 },
},
},
]);
// Transform the results into a more readable format
@ -200,10 +178,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
return acc;
}, {});
logger.trace("Printer stats by state:", formattedStats);
logger.trace('Printer stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
logger.error("Error fetching printer stats:", error);
logger.error('Error fetching printer stats:', error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,12 +1,12 @@
import { ObjectId } from "mongodb"; // Only needed in Node.js with MongoDB driver
import { ObjectId } from 'mongodb'; // Only needed in Node.js with MongoDB driver
function parseFilter(property, value) {
if (typeof value === "string") {
if (typeof value === 'string') {
const trimmed = value.trim();
// Handle booleans
if (trimmed.toLowerCase() === "true") return { [property]: true };
if (trimmed.toLowerCase() === "false") return { [property]: false };
if (trimmed.toLowerCase() === 'true') return { [property]: true };
if (trimmed.toLowerCase() === 'false') return { [property]: false };
// Handle ObjectId (24-char hex)
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
@ -22,8 +22,8 @@ function parseFilter(property, value) {
return {
[property]: {
$regex: trimmed,
$options: "i"
}
$options: 'i',
},
};
}
@ -41,29 +41,25 @@ function convertToCamelCase(obj) {
// Convert the key to camelCase
let camelKey = key
// First handle special cases with spaces, brackets and other characters
.replace(/\s*\[.*?\]\s*/g, "") // Remove brackets and their contents
.replace(/\s+/g, " ") // Normalize spaces
.replace(/\s*\[.*?\]\s*/g, '') // Remove brackets and their contents
.replace(/\s+/g, ' ') // Normalize spaces
.trim()
// Split by common separators (space, underscore, hyphen)
.split(/[\s_-]/)
// Convert to camelCase
.map((word, index) => {
// Remove any non-alphanumeric characters
word = word.replace(/[^a-zA-Z0-9]/g, "");
word = word.replace(/[^a-zA-Z0-9]/g, '');
// Lowercase first word, uppercase others
return index === 0
? word.toLowerCase()
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
})
.join("");
.join('');
// Handle values that are objects recursively
if (
value !== null &&
typeof value === "object" &&
!Array.isArray(value)
) {
if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
result[camelKey] = convertToCamelCase(value);
} else {
result[camelKey] = value;
@ -78,14 +74,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const configObject = {};
// Extract header information
const headerBlockRegex =
/; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
const headerBlockRegex = /; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
const headerBlockMatch = fileContent.match(headerBlockRegex);
if (headerBlockMatch && headerBlockMatch[1]) {
const headerLines = headerBlockMatch[1].split("\n");
const headerLines = headerBlockMatch[1].split('\n');
headerLines.forEach((line) => {
// Match lines with info after semicolon
const headerLineRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
@ -96,24 +89,22 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
let value = match[2].trim();
// Try to convert value to appropriate type
if (!isNaN(value) && value !== "") {
if (!isNaN(value) && value !== '') {
value = Number(value);
}
configObject[key] = value;
} else {
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
match = line.match(simpleValueRegex);
if (match && match[1] && !match[1].includes("HEADER_BLOCK")) {
if (match && match[1] && !match[1].includes('HEADER_BLOCK')) {
const text = match[1].trim();
// Extract slicer info
const slicerMatch = text.match(
/generated by (.*?) on (.*?) at (.*?)$/,
);
const slicerMatch = text.match(/generated by (.*?) on (.*?) at (.*?)$/);
if (slicerMatch) {
configObject["slicer"] = slicerMatch[1].trim();
configObject["date"] = slicerMatch[2].trim();
configObject["time"] = slicerMatch[3].trim();
configObject['slicer'] = slicerMatch[1].trim();
configObject['date'] = slicerMatch[2].trim();
configObject['time'] = slicerMatch[3].trim();
} else {
// Just add as a general header entry if it doesn't match any specific pattern
const key = `header_${Object.keys(configObject).length}`;
@ -125,12 +116,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
}
// Extract thumbnail data
const thumbnailBlockRegex =
/; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
const thumbnailBlockRegex = /; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
const thumbnailLines = thumbnailBlockMatch[1].split("\n");
let base64Data = "";
const thumbnailLines = thumbnailBlockMatch[1].split('\n');
let base64Data = '';
let thumbnailInfo = {};
thumbnailLines.forEach((line) => {
@ -142,13 +132,10 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
thumbnailInfo.width = parseInt(match[1], 10);
thumbnailInfo.height = parseInt(match[2], 10);
thumbnailInfo.size = parseInt(match[3], 10);
} else if (
line.trim().startsWith("; ") &&
!line.includes("THUMBNAIL_BLOCK")
) {
} else if (line.trim().startsWith('; ') && !line.includes('THUMBNAIL_BLOCK')) {
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
const dataLine = line.trim().substring(2);
if (dataLine && dataLine != "thumbnail end") {
if (dataLine && dataLine != 'thumbnail end') {
base64Data += dataLine;
}
}
@ -164,12 +151,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
}
// Extract CONFIG_BLOCK
const configBlockRegex =
/; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
const configBlockRegex = /; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
const configBlockMatch = fileContent.match(configBlockRegex);
if (configBlockMatch && configBlockMatch[1]) {
// Extract each config line
const configLines = configBlockMatch[1].split("\n");
const configLines = configBlockMatch[1].split('\n');
// Process each line
configLines.forEach((line) => {
// Check if the line starts with a semicolon and has an equals sign
@ -179,11 +165,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === "true" || value === "false") {
value = value === "true";
} else if (!isNaN(value) && value !== "") {
if (value === 'true' || value === 'false') {
value = value === 'true';
} else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
if (!value.includes("%")) {
if (!value.includes('%')) {
value = Number(value);
}
}
@ -197,31 +183,31 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
if (additionalVarsMatch && additionalVarsMatch[1]) {
const additionalLines = additionalVarsMatch[1].split("\n");
const additionalLines = additionalVarsMatch[1].split('\n');
additionalLines.forEach((line) => {
// Match both standard format and the special case for "total filament cost"
const varRegex =
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
const match = line.match(varRegex);
if (match) {
const key = match[1].replace(/\[([^\]]+)\]/g, "$1").trim();
const key = match[1].replace(/\[([^\]]+)\]/g, '$1').trim();
let value = match[2].trim();
// Clean up values - remove units in brackets and handle special cases
if (key.includes("filament used")) {
if (key.includes('filament used')) {
// Extract just the numeric value, ignoring units in brackets
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes("filament cost")) {
} else if (key.includes('filament cost')) {
// Extract just the numeric value
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes("total layers count")) {
} else if (key.includes('total layers count')) {
value = parseInt(value, 10);
} else if (key.includes("estimated printing time")) {
} else if (key.includes('estimated printing time')) {
// Keep as string but trim any additional whitespace
value = value.trim();
}
@ -243,7 +229,7 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
const postConfigMatch = fileContent.match(postConfigParams);
if (postConfigMatch && postConfigMatch[1]) {
const postConfigLines = postConfigMatch[1].split("\n");
const postConfigLines = postConfigMatch[1].split('\n');
postConfigLines.forEach((line) => {
// Match lines with format "; parameter_name = value"
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
@ -253,11 +239,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === "true" || value === "false") {
value = value === "true";
} else if (!isNaN(value) && value !== "") {
if (value === 'true' || value === 'false') {
value = value === 'true';
} else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
if (!value.includes("%")) {
if (!value.includes('%')) {
value = Number(value);
}
}
@ -292,7 +278,7 @@ function getChangedValues(oldObj, newObj) {
}
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
const { auditLogModel } = await import('../schemas/management/auditlog.schema.js');
const { auditLogModel } = await import('./schemas/management/auditlog.schema.js');
// Get only the changed values
const changedValues = getChangedValues(oldValue, newValue);
@ -314,9 +300,4 @@ async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, o
await auditLog.save();
}
export {
parseFilter,
convertToCamelCase,
extractConfigBlock,
newAuditLog
};
export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog };