Remove unused files and refactor imports: deleted obsolete files including passport.js, and others, while updating import paths in various services and routes for consistency. Added ESLint and Prettier configurations to enhance code quality.
This commit is contained in:
parent
65ccd0cd90
commit
a5f3b75be8
86
index.js
86
index.js
@ -1,86 +0,0 @@
|
|||||||
import bcrypt from "bcrypt";
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
import { userModel } from "../../schemas/user.schema.js";
|
|
||||||
import { printerModel } from "../../schemas/printer.schema.js";
|
|
||||||
import jwt from "jsonwebtoken";
|
|
||||||
import log4js from "log4js";
|
|
||||||
|
|
||||||
dotenv.config();
|
|
||||||
|
|
||||||
const logger = log4js.getLogger("Printers");
|
|
||||||
logger.level = process.env.LOG_LEVEL;
|
|
||||||
|
|
||||||
export const listPrintersRouteHandler = async (
|
|
||||||
req,
|
|
||||||
res,
|
|
||||||
page = 1,
|
|
||||||
limit = 25
|
|
||||||
) => {
|
|
||||||
try {
|
|
||||||
// Calculate the skip value based on the page number and limit
|
|
||||||
const skip = (page - 1) * limit;
|
|
||||||
|
|
||||||
// Fetch users with pagination
|
|
||||||
const printers = await printerModel.find().skip(skip).limit(limit);
|
|
||||||
|
|
||||||
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
|
|
||||||
res.send(printers);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Error listing users:", error);
|
|
||||||
res.status(500).send({ error: error });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getPrinterRouteHandler = async (req, res) => {
|
|
||||||
const remoteAddress = req.params.remoteAddress;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Fetch the printer with the given remote address
|
|
||||||
const printer = await printerModel.findOne({ remoteAddress });
|
|
||||||
|
|
||||||
if (!printer) {
|
|
||||||
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
|
|
||||||
return res.status(404).send({ error: "Printer not found" });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`Printer with remote address ${remoteAddress}:`, printer);
|
|
||||||
res.send(printer);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Error fetching printer:", error);
|
|
||||||
res.status(500).send({ error: error.message });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export const editPrinterRouteHandler = async (req, res) => {
|
|
||||||
const remoteAddress = req.params.remoteAddress;
|
|
||||||
const { friendlyName } = req.body;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Fetch the printer with the given remote address
|
|
||||||
const printer = await printerModel.findOne({ remoteAddress });
|
|
||||||
|
|
||||||
if (!printer) {
|
|
||||||
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
|
|
||||||
return res.status(404).send({ error: "Printer not found" });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`Editing printer with remote address ${remoteAddress}:`, printer);
|
|
||||||
try {
|
|
||||||
const result = await printerModel.updateOne(
|
|
||||||
{ remoteAddress: remoteAddress },
|
|
||||||
{ $set: req.body }
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
|
||||||
logger.error("No printers updated.");
|
|
||||||
res.status(500).send({ error: "No printers updated." });
|
|
||||||
}
|
|
||||||
} catch (updateError) {
|
|
||||||
logger.error("Error updating printer:", updateError);
|
|
||||||
res.status(500).send({ error: updateError.message });
|
|
||||||
}
|
|
||||||
res.send("OK");
|
|
||||||
} catch (fetchError) {
|
|
||||||
logger.error("Error fetching printer:", fetchError);
|
|
||||||
res.status(500).send({ error: fetchError.message });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
1076
package-lock.json
generated
1076
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -44,6 +44,10 @@
|
|||||||
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
|
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
|
||||||
"@babel/preset-env": "^7.18.2",
|
"@babel/preset-env": "^7.18.2",
|
||||||
"@babel/register": "^7.17.7",
|
"@babel/register": "^7.17.7",
|
||||||
|
"eslint": "^8.57.1",
|
||||||
|
"eslint-config-prettier": "^10.1.5",
|
||||||
|
"eslint-plugin-prettier": "^5.5.1",
|
||||||
|
"prettier": "^3.6.2",
|
||||||
"sequelize-cli": "^6.4.1",
|
"sequelize-cli": "^6.4.1",
|
||||||
"standard": "^17.1.0"
|
"standard": "^17.1.0"
|
||||||
},
|
},
|
||||||
|
|||||||
@ -1,33 +1,33 @@
|
|||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import bcrypt from "bcrypt";
|
import bcrypt from 'bcrypt';
|
||||||
import { userModel } from "../schemas/management/user.schema.js";
|
import { userModel } from '../schemas/management/user.schema.js';
|
||||||
import { dbConnect } from "./index.js";
|
import { dbConnect } from './mongo.js';
|
||||||
|
|
||||||
const ReseedAction = () => {
|
const ReseedAction = () => {
|
||||||
async function clear() {
|
async function clear() {
|
||||||
dbConnect();
|
dbConnect();
|
||||||
await userModel.deleteMany({});
|
await userModel.deleteMany({});
|
||||||
console.log("DB cleared");
|
console.log('DB cleared');
|
||||||
}
|
}
|
||||||
|
|
||||||
async function seedDB() {
|
async function seedDB() {
|
||||||
await clear();
|
await clear();
|
||||||
const salt = await bcrypt.genSalt(10);
|
const salt = await bcrypt.genSalt(10);
|
||||||
const hashPassword = await bcrypt.hash("secret", salt);
|
const hashPassword = await bcrypt.hash('secret', salt);
|
||||||
|
|
||||||
const user = {
|
const user = {
|
||||||
_id: mongoose.Types.ObjectId(1),
|
_id: mongoose.Types.ObjectId(1),
|
||||||
name: "Admin",
|
name: 'Admin',
|
||||||
email: "admin@jsonapi.com",
|
email: 'admin@jsonapi.com',
|
||||||
password: hashPassword,
|
password: hashPassword,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
profile_image: "../../images/admin.jpg",
|
profile_image: '../../images/admin.jpg',
|
||||||
};
|
};
|
||||||
|
|
||||||
const admin = new userModel(user);
|
const admin = new userModel(user);
|
||||||
await admin.save();
|
await admin.save();
|
||||||
|
|
||||||
console.log("DB seeded");
|
console.log('DB seeded');
|
||||||
}
|
}
|
||||||
|
|
||||||
seedDB();
|
seedDB();
|
||||||
@ -1,8 +1,8 @@
|
|||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
|
|
||||||
const logger = log4js.getLogger("MongoDB");
|
const logger = log4js.getLogger('MongoDB');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
@ -11,10 +11,10 @@ dotenv.config();
|
|||||||
mongoose.set('strictQuery', false);
|
mongoose.set('strictQuery', false);
|
||||||
|
|
||||||
function dbConnect() {
|
function dbConnect() {
|
||||||
mongoose.connection.once("open", () => logger.info("Database connected."));
|
mongoose.connection.once('open', () => logger.info('Database connected.'));
|
||||||
return mongoose.connect(
|
return mongoose.connect(
|
||||||
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
|
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
|
||||||
{ }
|
{}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
80
src/index.js
80
src/index.js
@ -1,9 +1,9 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import bodyParser from "body-parser";
|
import bodyParser from 'body-parser';
|
||||||
import cors from "cors";
|
import cors from 'cors';
|
||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { expressSession, keycloak } from "./keycloak.js";
|
import { expressSession, keycloak } from './keycloak.js';
|
||||||
import { dbConnect } from "./mongo/index.js";
|
import { dbConnect } from './database/mongo.js';
|
||||||
import {
|
import {
|
||||||
authRoutes,
|
authRoutes,
|
||||||
userRoutes,
|
userRoutes,
|
||||||
@ -22,24 +22,24 @@ import {
|
|||||||
stockEventRoutes,
|
stockEventRoutes,
|
||||||
auditLogRoutes,
|
auditLogRoutes,
|
||||||
noteTypeRoutes,
|
noteTypeRoutes,
|
||||||
noteRoutes
|
noteRoutes,
|
||||||
} from "./routes/index.js";
|
} from './routes/index.js';
|
||||||
import path from "path";
|
import path from 'path';
|
||||||
import * as fs from "fs";
|
import * as fs from 'fs';
|
||||||
import cron from "node-cron";
|
import cron from 'node-cron';
|
||||||
import ReseedAction from "./mongo/ReseedAction.js";
|
import ReseedAction from './database/ReseedAction.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import { populateUserMiddleware } from "./services/misc/auth.js";
|
import { populateUserMiddleware } from './services/misc/auth.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const PORT = process.env.PORT || 8080;
|
const PORT = process.env.PORT || 8080;
|
||||||
const app = express();
|
const app = express();
|
||||||
|
|
||||||
const logger = log4js.getLogger("App");
|
const logger = log4js.getLogger('App');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
app.use(log4js.connectLogger(logger, { level: "trace" }));
|
app.use(log4js.connectLogger(logger, { level: 'trace' }));
|
||||||
|
|
||||||
const whitelist = [process.env.APP_URL_CLIENT];
|
const whitelist = [process.env.APP_URL_CLIENT];
|
||||||
const corsOptions = {
|
const corsOptions = {
|
||||||
@ -47,7 +47,7 @@ const corsOptions = {
|
|||||||
if (!origin || whitelist.indexOf(origin) !== -1) {
|
if (!origin || whitelist.indexOf(origin) !== -1) {
|
||||||
callback(null, true);
|
callback(null, true);
|
||||||
} else {
|
} else {
|
||||||
callback(new Error("Not allowed by CORS"));
|
callback(new Error('Not allowed by CORS'));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
credentials: true,
|
credentials: true,
|
||||||
@ -56,37 +56,35 @@ const corsOptions = {
|
|||||||
dbConnect();
|
dbConnect();
|
||||||
|
|
||||||
app.use(cors(corsOptions));
|
app.use(cors(corsOptions));
|
||||||
app.use(
|
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
|
||||||
bodyParser.json({ type: "application/json", strict: false, limit: "50mb" }),
|
|
||||||
);
|
|
||||||
app.use(express.json());
|
app.use(express.json());
|
||||||
app.use(expressSession);
|
app.use(expressSession);
|
||||||
app.use(keycloak.middleware());
|
app.use(keycloak.middleware());
|
||||||
app.use(populateUserMiddleware);
|
app.use(populateUserMiddleware);
|
||||||
|
|
||||||
app.get("/", function (req, res) {
|
app.get('/', function (req, res) {
|
||||||
const __dirname = fs.realpathSync(".");
|
const __dirname = fs.realpathSync('.');
|
||||||
res.sendFile(path.join(__dirname, "/src/landing/index.html"));
|
res.sendFile(path.join(__dirname, '/src/landing/index.html'));
|
||||||
});
|
});
|
||||||
|
|
||||||
app.use("/auth", authRoutes);
|
app.use('/auth', authRoutes);
|
||||||
app.use("/users", userRoutes)
|
app.use('/users', userRoutes);
|
||||||
app.use("/spotlight", spotlightRoutes);
|
app.use('/spotlight', spotlightRoutes);
|
||||||
app.use("/printers", printerRoutes);
|
app.use('/printers', printerRoutes);
|
||||||
app.use("/jobs", jobRoutes);
|
app.use('/jobs', jobRoutes);
|
||||||
app.use("/gcodefiles", gcodeFileRoutes);
|
app.use('/gcodefiles', gcodeFileRoutes);
|
||||||
app.use("/filaments", filamentRoutes);
|
app.use('/filaments', filamentRoutes);
|
||||||
app.use("/parts", partRoutes);
|
app.use('/parts', partRoutes);
|
||||||
app.use("/products", productRoutes);
|
app.use('/products', productRoutes);
|
||||||
app.use("/vendors", vendorRoutes);
|
app.use('/vendors', vendorRoutes);
|
||||||
app.use("/materials", materialRoutes);
|
app.use('/materials', materialRoutes);
|
||||||
app.use("/partstocks", partStockRoutes);
|
app.use('/partstocks', partStockRoutes);
|
||||||
app.use("/filamentstocks", filamentStockRoutes);
|
app.use('/filamentstocks', filamentStockRoutes);
|
||||||
app.use("/stockevents", stockEventRoutes);
|
app.use('/stockevents', stockEventRoutes);
|
||||||
app.use("/stockaudits", stockAuditRoutes);
|
app.use('/stockaudits', stockAuditRoutes);
|
||||||
app.use("/auditlogs", auditLogRoutes);
|
app.use('/auditlogs', auditLogRoutes);
|
||||||
app.use("/notetypes", noteTypeRoutes);
|
app.use('/notetypes', noteTypeRoutes);
|
||||||
app.use("/notes", noteRoutes)
|
app.use('/notes', noteRoutes);
|
||||||
|
|
||||||
if (process.env.SCHEDULE_HOUR) {
|
if (process.env.SCHEDULE_HOUR) {
|
||||||
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
||||||
|
|||||||
@ -1,26 +1,25 @@
|
|||||||
import Keycloak from "keycloak-connect";
|
import Keycloak from 'keycloak-connect';
|
||||||
import session from "express-session";
|
import session from 'express-session';
|
||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import axios from "axios";
|
import axios from 'axios';
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from 'jsonwebtoken';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
const logger = log4js.getLogger('Keycloak');
|
||||||
const logger = log4js.getLogger("Keycloak");
|
logger.level = process.env.LOG_LEVEL || 'info';
|
||||||
logger.level = process.env.LOG_LEVEL || "info";
|
|
||||||
|
|
||||||
// Initialize Keycloak
|
// Initialize Keycloak
|
||||||
const keycloakConfig = {
|
const keycloakConfig = {
|
||||||
realm: process.env.KEYCLOAK_REALM || "farm-control",
|
realm: process.env.KEYCLOAK_REALM || 'farm-control',
|
||||||
"auth-server-url": process.env.KEYCLOAK_URL || "http://localhost:8080/auth",
|
'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
|
||||||
"ssl-required": process.env.NODE_ENV === "production" ? "external" : "none",
|
'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
|
||||||
resource: process.env.KEYCLOAK_CLIENT_ID || "farmcontrol-client",
|
resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
|
||||||
"confidential-port": 0,
|
'confidential-port': 0,
|
||||||
"bearer-only": true,
|
'bearer-only': true,
|
||||||
"public-client": false,
|
'public-client': false,
|
||||||
"use-resource-role-mappings": true,
|
'use-resource-role-mappings': true,
|
||||||
"verify-token-audience": true,
|
'verify-token-audience': true,
|
||||||
credentials: {
|
credentials: {
|
||||||
secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
},
|
},
|
||||||
@ -29,7 +28,7 @@ const keycloakConfig = {
|
|||||||
const memoryStore = new session.MemoryStore();
|
const memoryStore = new session.MemoryStore();
|
||||||
|
|
||||||
var expressSession = session({
|
var expressSession = session({
|
||||||
secret: process.env.SESSION_SECRET || "n00Dl3s23!",
|
secret: process.env.SESSION_SECRET || 'n00Dl3s23!',
|
||||||
resave: false,
|
resave: false,
|
||||||
saveUninitialized: true, // Set this to true to ensure session is initialized
|
saveUninitialized: true, // Set this to true to ensure session is initialized
|
||||||
store: memoryStore,
|
store: memoryStore,
|
||||||
@ -60,15 +59,15 @@ const isAuthenticated = async (req, res, next) => {
|
|||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/x-www-form-urlencoded",
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
const introspection = response.data;
|
const introspection = response.data;
|
||||||
if (!introspection.active) {
|
if (!introspection.active) {
|
||||||
logger.info("Token is not active");
|
logger.info('Token is not active');
|
||||||
return res.status(401).json({ error: "Not authenticated" });
|
return res.status(401).json({ error: 'Not authenticated' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse token to extract user info
|
// Parse token to extract user info
|
||||||
@ -83,20 +82,20 @@ const isAuthenticated = async (req, res, next) => {
|
|||||||
|
|
||||||
return next();
|
return next();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Token verification error:", error.message);
|
logger.error('Token verification error:', error.message);
|
||||||
return res.status(401).json({ error: "Not authenticated" });
|
return res.status(401).json({ error: 'Not authenticated' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to session-based authentication
|
// Fallback to session-based authentication
|
||||||
if (req.session && req.session["keycloak-token"]) {
|
if (req.session && req.session['keycloak-token']) {
|
||||||
const sessionToken = req.session["keycloak-token"];
|
const sessionToken = req.session['keycloak-token'];
|
||||||
if (sessionToken.expires_at > new Date().getTime()) {
|
if (sessionToken.expires_at > new Date().getTime()) {
|
||||||
return next();
|
return next();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.status(401).json({ error: "Not authenticated" });
|
return res.status(401).json({ error: 'Not authenticated' });
|
||||||
};
|
};
|
||||||
|
|
||||||
// Helper function to extract roles from token
|
// Helper function to extract roles from token
|
||||||
@ -112,11 +111,7 @@ function extractRoles(token) {
|
|||||||
if (token.resource_access) {
|
if (token.resource_access) {
|
||||||
for (const client in token.resource_access) {
|
for (const client in token.resource_access) {
|
||||||
if (token.resource_access[client].roles) {
|
if (token.resource_access[client].roles) {
|
||||||
roles.push(
|
roles.push(...token.resource_access[client].roles.map((role) => `${client}:${role}`));
|
||||||
...token.resource_access[client].roles.map(
|
|
||||||
(role) => `${client}:${role}`
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,77 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
||||||
|
|
||||||
<title>Node.js API FREE by Creative Tim & UPDIVISION</title>
|
|
||||||
<link href="https://fonts.googleapis.com/css?family=Nunito:200,600" rel="stylesheet"
|
|
||||||
/>
|
|
||||||
<style>
|
|
||||||
html,
|
|
||||||
body {
|
|
||||||
background-color: #fff;
|
|
||||||
color: #636b6f;
|
|
||||||
font-family: "Nunito", sans-serif;
|
|
||||||
font-weight: 200;
|
|
||||||
height: 100vh;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.full-height {
|
|
||||||
height: 100vh;
|
|
||||||
}
|
|
||||||
|
|
||||||
.flex-center {
|
|
||||||
align-items: center;
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.position-ref {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.top-right {
|
|
||||||
position: absolute;
|
|
||||||
right: 10px;
|
|
||||||
top: 18px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.content {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
|
|
||||||
.title {
|
|
||||||
font-size: 84px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.links > a {
|
|
||||||
color: #636b6f;
|
|
||||||
padding: 0 25px;
|
|
||||||
font-size: 13px;
|
|
||||||
font-weight: 600;
|
|
||||||
letter-spacing: 0.1rem;
|
|
||||||
text-decoration: none;
|
|
||||||
text-transform: uppercase;
|
|
||||||
}
|
|
||||||
|
|
||||||
.m-b-md {
|
|
||||||
margin-bottom: 30px;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<div class="flex-center position-ref full-height">
|
|
||||||
<div class="content">
|
|
||||||
<div class="title m-b-md">Headless CMS with ExpressJS API:FREE</div>
|
|
||||||
|
|
||||||
<div class="links">
|
|
||||||
<a href="https://expressjs.com/" target="_blank">Express.js</a>
|
|
||||||
<a href="https://www.mongodb.com/" target="_blank">MongoDB</a>
|
|
||||||
<a href="https://documenter.getpostman.com/view/8138626/Uze1virp" target="_blank">Documentation</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
import { ExtractJwt } from "passport-jwt";
|
|
||||||
import passportJWT from "passport-jwt";
|
|
||||||
import dotenv from "dotenv";
|
|
||||||
import passport from "passport";
|
|
||||||
|
|
||||||
import { userModel } from "./schemas/user.schema.js";
|
|
||||||
import { hostModel } from "./schemas/host.schema.js";
|
|
||||||
|
|
||||||
const JWTStrategy = passportJWT.Strategy;
|
|
||||||
dotenv.config();
|
|
||||||
|
|
||||||
passport.use(
|
|
||||||
new JWTStrategy(
|
|
||||||
{
|
|
||||||
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
|
|
||||||
secretOrKey: process.env.JWT_SECRET,
|
|
||||||
},
|
|
||||||
function (jwtPayload, done) {
|
|
||||||
if (jwtPayload.hostId) {
|
|
||||||
return hostModel
|
|
||||||
.findOne({ hostId: jwtPayload.hostId })
|
|
||||||
.then((host) => {
|
|
||||||
return done(null, host);
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
return done(err);
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
return userModel
|
|
||||||
.findOne({ _id: jwtPayload.id })
|
|
||||||
.then((user) => {
|
|
||||||
return done(null, user);
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
return done(err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,21 +8,21 @@ import {
|
|||||||
getFilamentStockRouteHandler,
|
getFilamentStockRouteHandler,
|
||||||
editFilamentStockRouteHandler,
|
editFilamentStockRouteHandler,
|
||||||
newFilamentStockRouteHandler,
|
newFilamentStockRouteHandler,
|
||||||
} from "../../services/inventory/filamentstocks.js";
|
} from '../../services/inventory/filamentstocks.js';
|
||||||
|
|
||||||
// list of filamentStocks
|
// list of filamentStocks
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["country"];
|
const allowedFilters = ['country'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
|
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newFilamentStockRouteHandler(req, res);
|
newFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getFilamentStockRouteHandler(req, res);
|
getFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editFilamentStockRouteHandler(req, res);
|
editFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,21 +8,21 @@ import {
|
|||||||
getPartStockRouteHandler,
|
getPartStockRouteHandler,
|
||||||
editPartStockRouteHandler,
|
editPartStockRouteHandler,
|
||||||
newPartStockRouteHandler,
|
newPartStockRouteHandler,
|
||||||
} from "../../services/inventory/partstocks.js";
|
} from '../../services/inventory/partstocks.js';
|
||||||
|
|
||||||
// list of partStocks
|
// list of partStocks
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["country"];
|
const allowedFilters = ['country'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listPartStocksRouteHandler(req, res, page, limit, property, filter);
|
listPartStocksRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newPartStockRouteHandler(req, res);
|
newPartStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getPartStockRouteHandler(req, res);
|
getPartStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editPartStockRouteHandler(req, res);
|
editPartStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -9,21 +9,21 @@ import {
|
|||||||
newStockAuditRouteHandler,
|
newStockAuditRouteHandler,
|
||||||
updateStockAuditRouteHandler,
|
updateStockAuditRouteHandler,
|
||||||
deleteStockAuditRouteHandler,
|
deleteStockAuditRouteHandler,
|
||||||
} from "../../services/inventory/stockaudits.js";
|
} from '../../services/inventory/stockaudits.js';
|
||||||
|
|
||||||
// List stock audits
|
// List stock audits
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["status", "type", "createdBy"];
|
const allowedFilters = ['status', 'type', 'createdBy'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -32,22 +32,22 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Create new stock audit
|
// Create new stock audit
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newStockAuditRouteHandler(req, res);
|
newStockAuditRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get specific stock audit
|
// Get specific stock audit
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getStockAuditRouteHandler(req, res);
|
getStockAuditRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Update stock audit
|
// Update stock audit
|
||||||
router.put("/:id", isAuthenticated, (req, res) => {
|
router.put('/:id', isAuthenticated, (req, res) => {
|
||||||
updateStockAuditRouteHandler(req, res);
|
updateStockAuditRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Delete stock audit
|
// Delete stock audit
|
||||||
router.delete("/:id", isAuthenticated, (req, res) => {
|
router.delete('/:id', isAuthenticated, (req, res) => {
|
||||||
deleteStockAuditRouteHandler(req, res);
|
deleteStockAuditRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,27 +1,27 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
listStockEventsRouteHandler,
|
listStockEventsRouteHandler,
|
||||||
getStockEventRouteHandler,
|
getStockEventRouteHandler,
|
||||||
newStockEventRouteHandler,
|
newStockEventRouteHandler,
|
||||||
} from "../../services/inventory/stockevents.js";
|
} from '../../services/inventory/stockevents.js';
|
||||||
|
|
||||||
// List stock events
|
// List stock events
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "filamentStock"];
|
const allowedFilters = ['type', 'filamentStock'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -29,12 +29,12 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Create new stock event
|
// Create new stock event
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newStockEventRouteHandler(req, res);
|
newStockEventRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Get specific stock event
|
// Get specific stock event
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getStockEventRouteHandler(req, res);
|
getStockEventRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,21 +8,21 @@ import {
|
|||||||
getMaterialRouteHandler,
|
getMaterialRouteHandler,
|
||||||
editMaterialRouteHandler,
|
editMaterialRouteHandler,
|
||||||
newMaterialRouteHandler,
|
newMaterialRouteHandler,
|
||||||
} from "../../services/management/materials.js";
|
} from '../../services/management/materials.js';
|
||||||
|
|
||||||
// list of materials
|
// list of materials
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
const allowedFilters = ['type', 'brand', 'diameter', 'color'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listMaterialsRouteHandler(req, res, page, limit, property, filter);
|
listMaterialsRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newMaterialRouteHandler(req, res);
|
newMaterialRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getMaterialRouteHandler(req, res);
|
getMaterialRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editMaterialRouteHandler(req, res);
|
editMaterialRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,55 +1,42 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import {
|
import {
|
||||||
listNoteTypesRouteHandler,
|
listNoteTypesRouteHandler,
|
||||||
getNoteTypeRouteHandler,
|
getNoteTypeRouteHandler,
|
||||||
editNoteTypeRouteHandler,
|
editNoteTypeRouteHandler,
|
||||||
newNoteTypeRouteHandler,
|
newNoteTypeRouteHandler,
|
||||||
} from "../../services/management/notetypes.js";
|
} from '../../services/management/notetypes.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// List note types
|
// List note types
|
||||||
router.get("/", isAuthenticated, async (req, res) => {
|
router.get('/', isAuthenticated, async (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["name", "active"];
|
const allowedFilters = ['name', 'active'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
listNoteTypesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
listNoteTypesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
|
||||||
// Get single note type
|
// Get single note type
|
||||||
router.get(
|
router.get('/:id', isAuthenticated, getNoteTypeRouteHandler);
|
||||||
"/:id",
|
|
||||||
isAuthenticated,
|
|
||||||
getNoteTypeRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
// Edit note type
|
// Edit note type
|
||||||
router.put(
|
router.put('/:id', isAuthenticated, editNoteTypeRouteHandler);
|
||||||
"/:id",
|
|
||||||
isAuthenticated,
|
|
||||||
editNoteTypeRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create new note type
|
// Create new note type
|
||||||
router.post(
|
router.post('/', isAuthenticated, newNoteTypeRouteHandler);
|
||||||
"/",
|
|
||||||
isAuthenticated,
|
|
||||||
newNoteTypeRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -10,46 +10,46 @@ import {
|
|||||||
newPartRouteHandler,
|
newPartRouteHandler,
|
||||||
uploadPartFileContentRouteHandler,
|
uploadPartFileContentRouteHandler,
|
||||||
getPartFileContentRouteHandler,
|
getPartFileContentRouteHandler,
|
||||||
} from "../../services/management/parts.js";
|
} from '../../services/management/parts.js';
|
||||||
|
|
||||||
// list of parts
|
// list of parts
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["products", "name"];
|
const allowedFilters = ['products', 'name'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
listPartsRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
listPartsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newPartRouteHandler(req, res);
|
newPartRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/:id/content", isAuthenticated, (req, res) => {
|
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||||
uploadPartFileContentRouteHandler(req, res);
|
uploadPartFileContentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id/content", isAuthenticated, (req, res) => {
|
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||||
getPartFileContentRouteHandler(req, res);
|
getPartFileContentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getPartRouteHandler(req, res);
|
getPartRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editPartRouteHandler(req, res);
|
editPartRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,21 +8,21 @@ import {
|
|||||||
getProductRouteHandler,
|
getProductRouteHandler,
|
||||||
editProductRouteHandler,
|
editProductRouteHandler,
|
||||||
newProductRouteHandler,
|
newProductRouteHandler,
|
||||||
} from "../../services/management/products.js";
|
} from '../../services/management/products.js';
|
||||||
|
|
||||||
// list of products
|
// list of products
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
const allowedFilters = ['type', 'brand', 'diameter', 'color'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listProductsRouteHandler(req, res, page, limit, property, filter);
|
listProductsRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newProductRouteHandler(req, res);
|
newProductRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getProductRouteHandler(req, res);
|
getProductRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editProductRouteHandler(req, res);
|
editProductRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,27 +1,27 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
listUsersRouteHandler,
|
listUsersRouteHandler,
|
||||||
getUserRouteHandler,
|
getUserRouteHandler,
|
||||||
editUserRouteHandler,
|
editUserRouteHandler,
|
||||||
} from "../../services/management/users.js";
|
} from '../../services/management/users.js';
|
||||||
|
|
||||||
// list of users
|
// list of users
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["username", "name", "firstName", "lastName"];
|
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -29,12 +29,12 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listUsersRouteHandler(req, res, page, limit, property, filter);
|
listUsersRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getUserRouteHandler(req, res);
|
getUserRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update user info
|
// update user info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editUserRouteHandler(req, res);
|
editUserRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,21 +8,21 @@ import {
|
|||||||
getVendorRouteHandler,
|
getVendorRouteHandler,
|
||||||
editVendorRouteHandler,
|
editVendorRouteHandler,
|
||||||
newVendorRouteHandler,
|
newVendorRouteHandler,
|
||||||
} from "../../services/management/vendors.js";
|
} from '../../services/management/vendors.js';
|
||||||
|
|
||||||
// list of vendors
|
// list of vendors
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["country"];
|
const allowedFilters = ['country'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listVendorsRouteHandler(req, res, page, limit, property, filter);
|
listVendorsRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newVendorRouteHandler(req, res);
|
newVendorRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getVendorRouteHandler(req, res);
|
getVendorRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editVendorRouteHandler(req, res);
|
editVendorRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,21 +1,21 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import {
|
import {
|
||||||
listNotesRouteHandler,
|
listNotesRouteHandler,
|
||||||
getNoteRouteHandler,
|
getNoteRouteHandler,
|
||||||
editNoteRouteHandler,
|
editNoteRouteHandler,
|
||||||
newNoteRouteHandler,
|
newNoteRouteHandler,
|
||||||
deleteNoteRouteHandler
|
deleteNoteRouteHandler,
|
||||||
} from "../../services/misc/notes.js";
|
} from '../../services/misc/notes.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
// List notes
|
// List notes
|
||||||
router.get("/", isAuthenticated, async (req, res) => {
|
router.get('/', isAuthenticated, async (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["parent", "user._id"];
|
const allowedFilters = ['parent', 'user._id'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
@ -23,41 +23,24 @@ router.get("/", isAuthenticated, async (req, res) => {
|
|||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const filterObject = parseFilter(key, value);
|
const filterObject = parseFilter(key, value);
|
||||||
filter = {...filter, ...filterObject}
|
filter = { ...filter, ...filterObject };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
listNotesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
listNotesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
|
||||||
// Get single note
|
// Get single note
|
||||||
router.get(
|
router.get('/:id', isAuthenticated, getNoteRouteHandler);
|
||||||
"/:id",
|
|
||||||
isAuthenticated,
|
|
||||||
getNoteRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
// Edit note
|
// Edit note
|
||||||
router.put(
|
router.put('/:id', isAuthenticated, editNoteRouteHandler);
|
||||||
"/:id",
|
|
||||||
isAuthenticated,
|
|
||||||
editNoteRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
// Delete note
|
// Delete note
|
||||||
router.delete(
|
router.delete('/:id', isAuthenticated, deleteNoteRouteHandler);
|
||||||
"/:id",
|
|
||||||
isAuthenticated,
|
|
||||||
deleteNoteRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create new note
|
// Create new note
|
||||||
router.post(
|
router.post('/', isAuthenticated, newNoteRouteHandler);
|
||||||
"/",
|
|
||||||
isAuthenticated,
|
|
||||||
newNoteRouteHandler
|
|
||||||
);
|
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -8,20 +8,20 @@ import {
|
|||||||
getFilamentRouteHandler,
|
getFilamentRouteHandler,
|
||||||
editFilamentRouteHandler,
|
editFilamentRouteHandler,
|
||||||
newFilamentRouteHandler,
|
newFilamentRouteHandler,
|
||||||
} from "../../services/management/filaments.js";
|
} from '../../services/management/filaments.js';
|
||||||
|
|
||||||
// list of filaments
|
// list of filaments
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "vendor.name", "diameter", "color"];
|
const allowedFilters = ['type', 'vendor.name', 'diameter', 'color'];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
filter = {...filter, ...parseFilter(key, value)};
|
filter = { ...filter, ...parseFilter(key, value) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -29,16 +29,16 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listFilamentsRouteHandler(req, res, page, limit, property, filter);
|
listFilamentsRouteHandler(req, res, page, limit, property, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newFilamentRouteHandler(req, res);
|
newFilamentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getFilamentRouteHandler(req, res);
|
getFilamentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editFilamentRouteHandler(req, res);
|
editFilamentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import express from "express";
|
import express from 'express';
|
||||||
import { isAuthenticated } from "../../keycloak.js";
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from "../../util/index.js";
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
@ -11,17 +11,17 @@ import {
|
|||||||
parseGCodeFileHandler,
|
parseGCodeFileHandler,
|
||||||
uploadGCodeFileContentRouteHandler,
|
uploadGCodeFileContentRouteHandler,
|
||||||
getGCodeFileContentRouteHandler,
|
getGCodeFileContentRouteHandler,
|
||||||
} from "../../services/production/gcodefiles.js";
|
} from '../../services/production/gcodefiles.js';
|
||||||
|
|
||||||
// list of printers
|
// list of printers
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, search, sort, order } = req.query;
|
const { page, limit, property, search, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = [
|
const allowedFilters = [
|
||||||
"filament.type",
|
'filament.type',
|
||||||
"filament.vendor.name",
|
'filament.vendor.name',
|
||||||
"filament.diameter",
|
'filament.diameter',
|
||||||
"filament.color",
|
'filament.color',
|
||||||
];
|
];
|
||||||
|
|
||||||
var filter = {};
|
var filter = {};
|
||||||
@ -29,8 +29,8 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
const parsedFilter = parseFilter(key, value)
|
const parsedFilter = parseFilter(key, value);
|
||||||
filter = {...filter, ...parsedFilter};
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -39,28 +39,28 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// new pritner
|
// new pritner
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newGCodeFileRouteHandler(req, res);
|
newGCodeFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getGCodeFileRouteHandler(req, res);
|
getGCodeFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// update printer info
|
// update printer info
|
||||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
editGCodeFileRouteHandler(req, res);
|
editGCodeFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/:id/content", isAuthenticated, (req, res) => {
|
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||||
uploadGCodeFileContentRouteHandler(req, res);
|
uploadGCodeFileContentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/content", isAuthenticated, (req, res) => {
|
router.post('/content', isAuthenticated, (req, res) => {
|
||||||
parseGCodeFileHandler(req, res);
|
parseGCodeFileHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get("/:id/content", isAuthenticated, (req, res) => {
|
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||||
getGCodeFileContentRouteHandler(req, res);
|
getGCodeFileContentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
const materialSchema = new mongoose.Schema({
|
const materialSchema = new mongoose.Schema({
|
||||||
name: { required: true, type: String },
|
name: { required: true, type: String },
|
||||||
@ -7,10 +7,10 @@ const materialSchema = new mongoose.Schema({
|
|||||||
tags: [{ type: String }],
|
tags: [{ type: String }],
|
||||||
});
|
});
|
||||||
|
|
||||||
materialSchema.virtual("id").get(function () {
|
materialSchema.virtual('id').get(function () {
|
||||||
return this._id.toHexString();
|
return this._id.toHexString();
|
||||||
});
|
});
|
||||||
|
|
||||||
materialSchema.set("toJSON", { virtuals: true });
|
materialSchema.set('toJSON', { virtuals: true });
|
||||||
|
|
||||||
export const materialModel = mongoose.model("Material", materialSchema);
|
export const materialModel = mongoose.model('Material', materialSchema);
|
||||||
|
|||||||
@ -1,15 +1,14 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import { filamentModel } from "../../schemas/management/filament.schema.js";
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import mongoose from 'mongoose';
|
||||||
import mongoose from "mongoose";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Filament Stocks");
|
const logger = log4js.getLogger('Filament Stocks');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listFilamentStocksRouteHandler = async (
|
export const listFilamentStocksRouteHandler = async (
|
||||||
@ -17,10 +16,10 @@ export const listFilamentStocksRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {},
|
||||||
sort = "",
|
sort = '',
|
||||||
order = "ascend"
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -31,30 +30,30 @@ export const listFilamentStocksRouteHandler = async (
|
|||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "filaments", // The collection name (usually lowercase plural)
|
from: 'filaments', // The collection name (usually lowercase plural)
|
||||||
localField: "filament", // The field in your current model
|
localField: 'filament', // The field in your current model
|
||||||
foreignField: "_id", // The field in the products collection
|
foreignField: '_id', // The field in the products collection
|
||||||
as: "filament", // The output field name
|
as: 'filament', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: "$filament" });
|
aggregateCommand.push({ $unwind: '$filament' });
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
// use filtering if present
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
// Add sorting if sort parameter is provided
|
||||||
if (sort) {
|
if (sort) {
|
||||||
const sortOrder = order === "descend" ? -1 : 1;
|
const sortOrder = order === 'descend' ? -1 : 1;
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -67,11 +66,11 @@ export const listFilamentStocksRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
filamentStock,
|
filamentStock
|
||||||
);
|
);
|
||||||
res.send(filamentStock);
|
res.send(filamentStock);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing filament stocks:", error);
|
logger.error('Error listing filament stocks:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -85,35 +84,37 @@ export const getFilamentStockRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("filament")
|
.populate('filament')
|
||||||
.populate({
|
.populate({
|
||||||
path: 'stockEvents',
|
path: 'stockEvents',
|
||||||
populate: [
|
populate: [
|
||||||
{
|
{
|
||||||
path: 'subJob',
|
path: 'subJob',
|
||||||
select: 'number'
|
select: 'number',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'job',
|
path: 'job',
|
||||||
select: 'startedAt'
|
select: 'startedAt',
|
||||||
}
|
},
|
||||||
]
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!filamentStock) {
|
if (!filamentStock) {
|
||||||
logger.warn(`Filament stock not found with supplied id.`);
|
logger.warn(`Filament stock not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...filamentStock._doc, auditLogs: auditLogs});
|
res.send({ ...filamentStock._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching filament stock:", error);
|
logger.error('Error fetching filament stock:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -128,7 +129,7 @@ export const editFilamentStockRouteHandler = async (req, res) => {
|
|||||||
if (!filamentStock) {
|
if (!filamentStock) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Filament stock not found with supplied id.`);
|
logger.warn(`Filament stock not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Filament stock not found." });
|
return res.status(404).send({ error: 'Filament stock not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||||
@ -144,21 +145,18 @@ export const editFilamentStockRouteHandler = async (req, res) => {
|
|||||||
email: req.body.email,
|
email: req.body.email,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await filamentStockModel.updateOne(
|
const result = await filamentStockModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No filament stock updated.");
|
logger.error('No filament stock updated.');
|
||||||
res.status(500).send({ error: "No filament stocks updated." });
|
res.status(500).send({ error: 'No filament stocks updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament stock:", updateError);
|
logger.error('Error updating filament stock:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching filament stock:", fetchError);
|
logger.error('Error fetching filament stock:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -176,12 +174,12 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!filament) {
|
if (!filament) {
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Filament not found." });
|
return res.status(404).send({ error: 'Filament not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching filament:", error);
|
logger.error('Error fetching filament:', error);
|
||||||
return res.status(500).send({ error: error.message });
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -195,22 +193,22 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
|||||||
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
||||||
filament: req.body.filament._id,
|
filament: req.body.filament._id,
|
||||||
state: {
|
state: {
|
||||||
type: "unconsumed",
|
type: 'unconsumed',
|
||||||
percent: 0,
|
percent: 0,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await filamentStockModel.create(newFilamentStock);
|
const result = await filamentStockModel.create(newFilamentStock);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No filament stock created.");
|
logger.error('No filament stock created.');
|
||||||
return res.status(500).send({ error: "No filament stock created." });
|
return res.status(500).send({ error: 'No filament stock created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create initial stock event
|
// Create initial stock event
|
||||||
const stockEvent = {
|
const stockEvent = {
|
||||||
type: "initial",
|
type: 'initial',
|
||||||
value: startingNetWeight,
|
value: startingGrossWeight - filament.emptySpoolWeight,
|
||||||
unit: "g",
|
unit: 'g',
|
||||||
filamentStock: result._id,
|
filamentStock: result._id,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
@ -218,8 +216,8 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const eventResult = await stockEventModel.create(stockEvent);
|
const eventResult = await stockEventModel.create(stockEvent);
|
||||||
if (!eventResult) {
|
if (!eventResult) {
|
||||||
logger.error("Failed to create initial stock event.");
|
logger.error('Failed to create initial stock event.');
|
||||||
return res.status(500).send({ error: "Failed to create initial stock event." });
|
return res.status(500).send({ error: 'Failed to create initial stock event.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the filament stock with the stock event reference
|
// Update the filament stock with the stock event reference
|
||||||
@ -228,9 +226,9 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
|||||||
{ $push: { stockEvents: eventResult._id } }
|
{ $push: { stockEvents: eventResult._id } }
|
||||||
);
|
);
|
||||||
|
|
||||||
return res.send({ status: "ok" });
|
return res.send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error adding filament stock:", updateError);
|
logger.error('Error adding filament stock:', updateError);
|
||||||
return res.status(500).send({ error: updateError.message });
|
return res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
|
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("PartStocks");
|
const logger = log4js.getLogger('PartStocks');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPartStocksRouteHandler = async (
|
export const listPartStocksRouteHandler = async (
|
||||||
@ -13,8 +13,8 @@ export const listPartStocksRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -28,9 +28,9 @@ export const listPartStocksRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
@ -44,11 +44,11 @@ export const listPartStocksRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
partStock,
|
partStock
|
||||||
);
|
);
|
||||||
res.send(partStock);
|
res.send(partStock);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing partStocks:", error);
|
logger.error('Error listing partStocks:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -64,13 +64,13 @@ export const getPartStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!partStock) {
|
if (!partStock) {
|
||||||
logger.warn(`PartStock not found with supplied id.`);
|
logger.warn(`PartStock not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||||
res.send(partStock);
|
res.send(partStock);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching PartStock:", error);
|
logger.error('Error fetching PartStock:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -85,7 +85,7 @@ export const editPartStockRouteHandler = async (req, res) => {
|
|||||||
if (!partStock) {
|
if (!partStock) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`PartStock not found with supplied id.`);
|
logger.warn(`PartStock not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||||
@ -101,21 +101,18 @@ export const editPartStockRouteHandler = async (req, res) => {
|
|||||||
email: req.body.email,
|
email: req.body.email,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await partStockModel.updateOne(
|
const result = await partStockModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No PartStock updated.");
|
logger.error('No PartStock updated.');
|
||||||
res.status(500).send({ error: "No partStocks updated." });
|
res.status(500).send({ error: 'No partStocks updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating partStock:", updateError);
|
logger.error('Error updating partStock:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching partStock:", fetchError);
|
logger.error('Error fetching partStock:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -131,12 +128,12 @@ export const newPartStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await partStockModel.create(newPartStock);
|
const result = await partStockModel.create(newPartStock);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No partStock created.");
|
logger.error('No partStock created.');
|
||||||
res.status(500).send({ error: "No partStock created." });
|
res.status(500).send({ error: 'No partStock created.' });
|
||||||
}
|
}
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating partStock:", updateError);
|
logger.error('Error updating partStock:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
|
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Stock Audits");
|
const logger = log4js.getLogger('Stock Audits');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listStockAuditsRouteHandler = async (
|
export const listStockAuditsRouteHandler = async (
|
||||||
@ -14,8 +14,8 @@ export const listStockAuditsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -25,22 +25,22 @@ export const listStockAuditsRouteHandler = async (
|
|||||||
// Lookup createdBy user
|
// Lookup createdBy user
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "users",
|
from: 'users',
|
||||||
localField: "createdBy",
|
localField: 'createdBy',
|
||||||
foreignField: "_id",
|
foreignField: '_id',
|
||||||
as: "createdBy",
|
as: 'createdBy',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: "$createdBy" });
|
aggregateCommand.push({ $unwind: '$createdBy' });
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
@ -50,11 +50,11 @@ export const listStockAuditsRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
stockAudits,
|
stockAudits
|
||||||
);
|
);
|
||||||
res.send(stockAudits);
|
res.send(stockAudits);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing stock audits:", error);
|
logger.error('Error listing stock audits:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -66,24 +66,26 @@ export const getStockAuditRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("createdBy")
|
.populate('createdBy')
|
||||||
.populate("items.filamentStock")
|
.populate('items.filamentStock')
|
||||||
.populate("items.partStock");
|
.populate('items.partStock');
|
||||||
|
|
||||||
if (!stockAudit) {
|
if (!stockAudit) {
|
||||||
logger.warn(`Stock audit not found with supplied id.`);
|
logger.warn(`Stock audit not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Stock audit not found." });
|
return res.status(404).send({ error: 'Stock audit not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
|
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...stockAudit._doc, auditLogs: auditLogs});
|
res.send({ ...stockAudit._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching stock audit:", error);
|
logger.error('Error fetching stock audit:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -92,29 +94,30 @@ export const newStockAuditRouteHandler = async (req, res) => {
|
|||||||
try {
|
try {
|
||||||
const newStockAudit = {
|
const newStockAudit = {
|
||||||
type: req.body.type,
|
type: req.body.type,
|
||||||
status: req.body.status || "pending",
|
status: req.body.status || 'pending',
|
||||||
notes: req.body.notes,
|
notes: req.body.notes,
|
||||||
items: req.body.items.map(item => ({
|
items: req.body.items.map((item) => ({
|
||||||
type: item.type,
|
type: item.type,
|
||||||
stock: item.type === "filament"
|
stock:
|
||||||
|
item.type === 'filament'
|
||||||
? new mongoose.Types.ObjectId(item.filamentStock)
|
? new mongoose.Types.ObjectId(item.filamentStock)
|
||||||
: new mongoose.Types.ObjectId(item.partStock),
|
: new mongoose.Types.ObjectId(item.partStock),
|
||||||
expectedQuantity: item.expectedQuantity,
|
expectedQuantity: item.expectedQuantity,
|
||||||
actualQuantity: item.actualQuantity,
|
actualQuantity: item.actualQuantity,
|
||||||
notes: item.notes
|
notes: item.notes,
|
||||||
})),
|
})),
|
||||||
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
|
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
|
||||||
completedAt: req.body.status === "completed" ? new Date() : null
|
completedAt: req.body.status === 'completed' ? new Date() : null,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await stockAuditModel.create(newStockAudit);
|
const result = await stockAuditModel.create(newStockAudit);
|
||||||
if (!result) {
|
if (!result) {
|
||||||
logger.error("No stock audit created.");
|
logger.error('No stock audit created.');
|
||||||
return res.status(500).send({ error: "No stock audit created." });
|
return res.status(500).send({ error: 'No stock audit created.' });
|
||||||
}
|
}
|
||||||
return res.send({ status: "ok", id: result._id });
|
return res.send({ status: 'ok', id: result._id });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error adding stock audit:", error);
|
logger.error('Error adding stock audit:', error);
|
||||||
return res.status(500).send({ error: error.message });
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -124,33 +127,30 @@ export const updateStockAuditRouteHandler = async (req, res) => {
|
|||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
const updateData = {
|
const updateData = {
|
||||||
...req.body,
|
...req.body,
|
||||||
items: req.body.items?.map(item => ({
|
items: req.body.items?.map((item) => ({
|
||||||
type: item.type,
|
type: item.type,
|
||||||
stock: item.type === "filament"
|
stock:
|
||||||
|
item.type === 'filament'
|
||||||
? new mongoose.Types.ObjectId(item.filamentStock)
|
? new mongoose.Types.ObjectId(item.filamentStock)
|
||||||
: new mongoose.Types.ObjectId(item.partStock),
|
: new mongoose.Types.ObjectId(item.partStock),
|
||||||
expectedQuantity: item.expectedQuantity,
|
expectedQuantity: item.expectedQuantity,
|
||||||
actualQuantity: item.actualQuantity,
|
actualQuantity: item.actualQuantity,
|
||||||
notes: item.notes
|
notes: item.notes,
|
||||||
})),
|
})),
|
||||||
completedAt: req.body.status === "completed" ? new Date() : null
|
completedAt: req.body.status === 'completed' ? new Date() : null,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await stockAuditModel.findByIdAndUpdate(
|
const result = await stockAuditModel.findByIdAndUpdate(id, { $set: updateData }, { new: true });
|
||||||
id,
|
|
||||||
{ $set: updateData },
|
|
||||||
{ new: true }
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
logger.warn(`Stock audit not found with supplied id.`);
|
logger.warn(`Stock audit not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Stock audit not found." });
|
return res.status(404).send({ error: 'Stock audit not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Updated stock audit with ID: ${id}:`, result);
|
logger.trace(`Updated stock audit with ID: ${id}:`, result);
|
||||||
res.send(result);
|
res.send(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error updating stock audit:", error);
|
logger.error('Error updating stock audit:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -162,13 +162,13 @@ export const deleteStockAuditRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
logger.warn(`Stock audit not found with supplied id.`);
|
logger.warn(`Stock audit not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Stock audit not found." });
|
return res.status(404).send({ error: 'Stock audit not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Deleted stock audit with ID: ${id}`);
|
logger.trace(`Deleted stock audit with ID: ${id}`);
|
||||||
res.send({ status: "ok" });
|
res.send({ status: 'ok' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error deleting stock audit:", error);
|
logger.error('Error deleting stock audit:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1,11 +1,10 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Stock Events");
|
const logger = log4js.getLogger('Stock Events');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listStockEventsRouteHandler = async (
|
export const listStockEventsRouteHandler = async (
|
||||||
@ -13,10 +12,10 @@ export const listStockEventsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {},
|
||||||
sort = "",
|
sort = '',
|
||||||
order = "ascend"
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -26,22 +25,22 @@ export const listStockEventsRouteHandler = async (
|
|||||||
// Lookup filamentStock
|
// Lookup filamentStock
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "filamentstocks",
|
from: 'filamentstocks',
|
||||||
localField: "filamentStock",
|
localField: 'filamentStock',
|
||||||
foreignField: "_id",
|
foreignField: '_id',
|
||||||
as: "filamentStock",
|
as: 'filamentStock',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: "$filamentStock" });
|
aggregateCommand.push({ $unwind: '$filamentStock' });
|
||||||
|
|
||||||
// Conditionally lookup subJob only if it exists
|
// Conditionally lookup subJob only if it exists
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "subjobs",
|
from: 'subjobs',
|
||||||
localField: "subJob",
|
localField: 'subJob',
|
||||||
foreignField: "_id",
|
foreignField: '_id',
|
||||||
as: "subJob",
|
as: 'subJob',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -49,26 +48,26 @@ export const listStockEventsRouteHandler = async (
|
|||||||
$addFields: {
|
$addFields: {
|
||||||
subJob: {
|
subJob: {
|
||||||
$cond: {
|
$cond: {
|
||||||
if: { $eq: [{ $size: "$subJob" }, 0] },
|
if: { $eq: [{ $size: '$subJob' }, 0] },
|
||||||
then: null,
|
then: null,
|
||||||
else: { $arrayElemAt: ["$subJob", 0] }
|
else: { $arrayElemAt: ['$subJob', 0] },
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
// Add sorting if sort parameter is provided
|
||||||
if (sort) {
|
if (sort) {
|
||||||
const sortOrder = order === "descend" ? -1 : 1;
|
const sortOrder = order === 'descend' ? -1 : 1;
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,11 +81,11 @@ export const listStockEventsRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||||
stockEvents,
|
stockEvents
|
||||||
);
|
);
|
||||||
res.send(stockEvents);
|
res.send(stockEvents);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing stock events:", error);
|
logger.error('Error listing stock events:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -98,19 +97,19 @@ export const getStockEventRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("filamentStock")
|
.populate('filamentStock')
|
||||||
.populate("subJob")
|
.populate('subJob')
|
||||||
.populate("job");
|
.populate('job');
|
||||||
|
|
||||||
if (!stockEvent) {
|
if (!stockEvent) {
|
||||||
logger.warn(`Stock event not found with supplied id.`);
|
logger.warn(`Stock event not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Stock event not found." });
|
return res.status(404).send({ error: 'Stock event not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
|
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
|
||||||
res.send(stockEvent);
|
res.send(stockEvent);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching stock event:", error);
|
logger.error('Error fetching stock event:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -123,17 +122,17 @@ export const newStockEventRouteHandler = async (req, res) => {
|
|||||||
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
|
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
|
||||||
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
|
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
|
||||||
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
|
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
|
||||||
timestamp: new Date()
|
timestamp: new Date(),
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await stockEventModel.create(newStockEvent);
|
const result = await stockEventModel.create(newStockEvent);
|
||||||
if (!result) {
|
if (!result) {
|
||||||
logger.error("No stock event created.");
|
logger.error('No stock event created.');
|
||||||
return res.status(500).send({ error: "No stock event created." });
|
return res.status(500).send({ error: 'No stock event created.' });
|
||||||
}
|
}
|
||||||
return res.send({ status: "ok", id: result._id });
|
return res.send({ status: 'ok', id: result._id });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error adding stock event:", error);
|
logger.error('Error adding stock event:', error);
|
||||||
return res.status(500).send({ error: error.message });
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1,21 +1,13 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
const logger = log4js.getLogger('AuditLogs');
|
||||||
const logger = log4js.getLogger("AuditLogs");
|
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listAuditLogsRouteHandler = async (
|
export const listAuditLogsRouteHandler = async (req, res, page = 1, limit = 25, filter = {}) => {
|
||||||
req,
|
|
||||||
res,
|
|
||||||
page = 1,
|
|
||||||
limit = 25,
|
|
||||||
property = "",
|
|
||||||
filter = {},
|
|
||||||
) => {
|
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -26,15 +18,12 @@ export const listAuditLogsRouteHandler = async (
|
|||||||
.skip(skip)
|
.skip(skip)
|
||||||
.limit(Number(limit))
|
.limit(Number(limit))
|
||||||
.sort({ createdAt: -1 })
|
.sort({ createdAt: -1 })
|
||||||
.populate('owner', 'name _id')
|
.populate('owner', 'name _id');
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(`List of audit logs (Page ${page}, Limit ${limit}):`, auditLogs);
|
||||||
`List of audit logs (Page ${page}, Limit ${limit}):`,
|
|
||||||
auditLogs,
|
|
||||||
);
|
|
||||||
res.send(auditLogs);
|
res.send(auditLogs);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing audit logs:", error);
|
logger.error('Error listing audit logs:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -44,19 +33,23 @@ export const getAuditLogRouteHandler = async (req, res) => {
|
|||||||
// Get ID from params
|
// Get ID from params
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
// Fetch the audit log with the given ID
|
// Fetch the audit log with the given ID
|
||||||
const auditLog = await auditLogModel.findOne({
|
const auditLog = await auditLogModel
|
||||||
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
}).populate('printer').populate('owner').populate('target');
|
})
|
||||||
|
.populate('printer')
|
||||||
|
.populate('owner')
|
||||||
|
.populate('target');
|
||||||
|
|
||||||
if (!auditLog) {
|
if (!auditLog) {
|
||||||
logger.warn(`Audit log not found with supplied id.`);
|
logger.warn(`Audit log not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Audit log not found." });
|
return res.status(404).send({ error: 'Audit log not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Audit log with ID: ${id}:`, auditLog);
|
logger.trace(`Audit log with ID: ${id}:`, auditLog);
|
||||||
res.send(auditLog);
|
res.send(auditLog);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching audit log:", error);
|
logger.error('Error fetching audit log:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,14 +1,12 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { filamentModel } from "../../schemas/management/filament.schema.js";
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import mongoose from 'mongoose';
|
||||||
import mongoose from "mongoose";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
const logger = log4js.getLogger('Filaments');
|
||||||
const logger = log4js.getLogger("Filaments");
|
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listFilamentsRouteHandler = async (
|
export const listFilamentsRouteHandler = async (
|
||||||
@ -16,8 +14,8 @@ export const listFilamentsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -28,23 +26,23 @@ export const listFilamentsRouteHandler = async (
|
|||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "vendors", // The collection name (usually lowercase plural)
|
from: 'vendors', // The collection name (usually lowercase plural)
|
||||||
localField: "vendor", // The field in your current model
|
localField: 'vendor', // The field in your current model
|
||||||
foreignField: "_id", // The field in the products collection
|
foreignField: '_id', // The field in the products collection
|
||||||
as: "vendor", // The output field name
|
as: 'vendor', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: "$vendor" });
|
aggregateCommand.push({ $unwind: '$vendor' });
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
// use filtering if present
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
@ -58,11 +56,11 @@ export const listFilamentsRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
filament,
|
filament
|
||||||
);
|
);
|
||||||
res.send(filament);
|
res.send(filament);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing filaments:", error);
|
logger.error('Error listing filaments:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -76,22 +74,24 @@ export const getFilamentRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("vendor");
|
.populate('vendor');
|
||||||
|
|
||||||
if (!filament) {
|
if (!filament) {
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...filament._doc, auditLogs: auditLogs});
|
res.send({ ...filament._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Filament:", error);
|
logger.error('Error fetching Filament:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -106,7 +106,7 @@ export const editFilamentRouteHandler = async (req, res) => {
|
|||||||
if (!filament) {
|
if (!filament) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
@ -128,30 +128,20 @@ export const editFilamentRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(filament.toObject(), updateData, id, 'Filament', req.user._id, 'User');
|
||||||
filament.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Filament',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await filamentModel.updateOne(
|
const result = await filamentModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Filament updated.");
|
logger.error('No Filament updated.');
|
||||||
return res.status(500).send({ error: "No filaments updated." });
|
return res.status(500).send({ error: 'No filaments updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament:", updateError);
|
logger.error('Error updating filament:', updateError);
|
||||||
return res.status(500).send({ error: updateError.message });
|
return res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
return res.send("OK");
|
return res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching filament:", fetchError);
|
logger.error('Error fetching filament:', fetchError);
|
||||||
return res.status(500).send({ error: fetchError.message });
|
return res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -177,23 +167,16 @@ export const newFilamentRouteHandler = async (req, res) => {
|
|||||||
const result = await filamentModel.create(newFilament);
|
const result = await filamentModel.create(newFilament);
|
||||||
|
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No filament created.");
|
logger.error('No filament created.');
|
||||||
res.status(500).send({ error: "No filament created." });
|
res.status(500).send({ error: 'No filament created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new filament
|
// Create audit log for new filament
|
||||||
await newAuditLog(
|
await newAuditLog({}, newFilament, result._id, 'Filament', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newFilament,
|
|
||||||
result._id,
|
|
||||||
'Filament',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament:", updateError);
|
logger.error('Error updating filament:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,12 +1,10 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { materialModel } from "../../schemas/management/material.schema.js";
|
import { materialModel } from '../../schemas/management/material.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import mongoose from 'mongoose';
|
||||||
import mongoose from "mongoose";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Materials");
|
const logger = log4js.getLogger('Materials');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listMaterialsRouteHandler = async (
|
export const listMaterialsRouteHandler = async (
|
||||||
@ -14,8 +12,8 @@ export const listMaterialsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -29,9 +27,9 @@ export const listMaterialsRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
@ -45,11 +43,11 @@ export const listMaterialsRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of materials (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of materials (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
material,
|
material
|
||||||
);
|
);
|
||||||
res.send(material);
|
res.send(material);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing materials:", error);
|
logger.error('Error listing materials:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -65,13 +63,13 @@ export const getMaterialRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!material) {
|
if (!material) {
|
||||||
logger.warn(`Material not found with supplied id.`);
|
logger.warn(`Material not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Material with ID: ${id}:`, material);
|
logger.trace(`Material with ID: ${id}:`, material);
|
||||||
res.send(material);
|
res.send(material);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Material:", error);
|
logger.error('Error fetching Material:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -86,30 +84,26 @@ export const editMaterialRouteHandler = async (req, res) => {
|
|||||||
if (!material) {
|
if (!material) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Material not found with supplied id.`);
|
logger.warn(`Material not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Material with ID: ${id}:`, material);
|
logger.trace(`Material with ID: ${id}:`, material);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = req.body;
|
||||||
req.body;
|
|
||||||
|
|
||||||
const result = await materialModel.updateOne(
|
const result = await materialModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Material updated.");
|
logger.error('No Material updated.');
|
||||||
res.status(500).send({ error: "No materials updated." });
|
res.status(500).send({ error: 'No materials updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating material:", updateError);
|
logger.error('Error updating material:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching material:", fetchError);
|
logger.error('Error fetching material:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -125,12 +119,12 @@ export const newMaterialRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await materialModel.create(newMaterial);
|
const result = await materialModel.create(newMaterial);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No material created.");
|
logger.error('No material created.');
|
||||||
res.status(500).send({ error: "No material created." });
|
res.status(500).send({ error: 'No material created.' });
|
||||||
}
|
}
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating material:", updateError);
|
logger.error('Error updating material:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,13 +1,13 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
|
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("NoteTypes");
|
const logger = log4js.getLogger('NoteTypes');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listNoteTypesRouteHandler = async (
|
export const listNoteTypesRouteHandler = async (
|
||||||
@ -15,8 +15,8 @@ export const listNoteTypesRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -27,26 +27,25 @@ export const listNoteTypesRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
console.log(aggregateCommand)
|
console.log(aggregateCommand);
|
||||||
|
|
||||||
|
|
||||||
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
|
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
noteTypes,
|
noteTypes
|
||||||
);
|
);
|
||||||
res.send(noteTypes);
|
res.send(noteTypes);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing note types:", error);
|
logger.error('Error listing note types:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -60,18 +59,20 @@ export const getNoteTypeRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!noteType) {
|
if (!noteType) {
|
||||||
logger.warn(`Note type not found with supplied id.`);
|
logger.warn(`Note type not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Note type not found." });
|
return res.status(404).send({ error: 'Note type not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Note type with ID: ${id}:`, noteType);
|
logger.trace(`Note type with ID: ${id}:`, noteType);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...noteType._doc, auditLogs: auditLogs});
|
res.send({ ...noteType._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching note type:", error);
|
logger.error('Error fetching note type:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -83,7 +84,7 @@ export const editNoteTypeRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!noteType) {
|
if (!noteType) {
|
||||||
logger.warn(`Note type not found with supplied id.`);
|
logger.warn(`Note type not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Note type not found." });
|
return res.status(404).send({ error: 'Note type not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Note type with ID: ${id}:`, noteType);
|
logger.trace(`Note type with ID: ${id}:`, noteType);
|
||||||
@ -97,30 +98,20 @@ export const editNoteTypeRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(noteType.toObject(), updateData, id, 'NoteType', req.user._id, 'User');
|
||||||
noteType.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'NoteType',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await noteTypeModel.updateOne(
|
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No note type updated.");
|
logger.error('No note type updated.');
|
||||||
res.status(500).send({ error: "No note types updated." });
|
res.status(500).send({ error: 'No note types updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating note type:", updateError);
|
logger.error('Error updating note type:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching note type:", fetchError);
|
logger.error('Error fetching note type:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -132,23 +123,16 @@ export const newNoteTypeRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await noteTypeModel.create(newNoteType);
|
const result = await noteTypeModel.create(newNoteType);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No note type created.");
|
logger.error('No note type created.');
|
||||||
res.status(500).send({ error: "No note type created." });
|
res.status(500).send({ error: 'No note type created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new note type
|
// Create audit log for new note type
|
||||||
await newAuditLog(
|
await newAuditLog({}, newNoteType, result._id, 'NoteType', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newNoteType,
|
|
||||||
result._id,
|
|
||||||
'NoteType',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error creating note type:", updateError);
|
logger.error('Error creating note type:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1,16 +1,15 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { partModel } from "../../schemas/management/part.schema.js";
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import multer from "multer";
|
import multer from 'multer';
|
||||||
import fs from "fs";
|
import fs from 'fs';
|
||||||
import path from "path";
|
import path from 'path';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Parts");
|
const logger = log4js.getLogger('Parts');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Set storage engine
|
// Set storage engine
|
||||||
@ -18,7 +17,7 @@ const partsStorage = multer.diskStorage({
|
|||||||
destination: process.env.PART_STORAGE,
|
destination: process.env.PART_STORAGE,
|
||||||
filename: async function (req, file, cb) {
|
filename: async function (req, file, cb) {
|
||||||
// Retrieve custom file name from request body
|
// Retrieve custom file name from request body
|
||||||
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
|
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
|
||||||
// Create the final filename ensuring it ends with .g
|
// Create the final filename ensuring it ends with .g
|
||||||
const finalFilename = `${customFileName}.stl`;
|
const finalFilename = `${customFileName}.stl`;
|
||||||
|
|
||||||
@ -34,7 +33,7 @@ const partUpload = multer({
|
|||||||
fileFilter: function (req, file, cb) {
|
fileFilter: function (req, file, cb) {
|
||||||
checkFileType(file, cb);
|
checkFileType(file, cb);
|
||||||
},
|
},
|
||||||
}).single("partFile"); // The name attribute of the file input in the HTML form
|
}).single('partFile'); // The name attribute of the file input in the HTML form
|
||||||
|
|
||||||
// Check file type
|
// Check file type
|
||||||
function checkFileType(file, cb) {
|
function checkFileType(file, cb) {
|
||||||
@ -47,7 +46,7 @@ function checkFileType(file, cb) {
|
|||||||
console.log(file);
|
console.log(file);
|
||||||
return cb(null, true);
|
return cb(null, true);
|
||||||
} else {
|
} else {
|
||||||
cb("Error: .stl files only!");
|
cb('Error: .stl files only!');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,11 +55,11 @@ export const listPartsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {},
|
||||||
search = "",
|
search = '',
|
||||||
sort = "",
|
sort = '',
|
||||||
order = "ascend"
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -69,40 +68,51 @@ export const listPartsRouteHandler = async (
|
|||||||
let part;
|
let part;
|
||||||
let aggregateCommand = [];
|
let aggregateCommand = [];
|
||||||
|
|
||||||
|
if (search) {
|
||||||
|
// Add a text search match stage for name and brand fields
|
||||||
|
aggregateCommand.push({
|
||||||
|
$match: {
|
||||||
|
$text: {
|
||||||
|
$search: search,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
// use filtering if present
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
logger.error(property);
|
logger.error(property);
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "products", // The collection name (usually lowercase plural)
|
from: 'products', // The collection name (usually lowercase plural)
|
||||||
localField: "product", // The field in your current model
|
localField: 'product', // The field in your current model
|
||||||
foreignField: "_id", // The field in the products collection
|
foreignField: '_id', // The field in the products collection
|
||||||
as: "product", // The output field name
|
as: 'product', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
aggregateCommand.push({ $unwind: "$product" });
|
aggregateCommand.push({ $unwind: '$product' });
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$project: {
|
$project: {
|
||||||
name: 1,
|
name: 1,
|
||||||
_id: 1,
|
_id: 1,
|
||||||
createdAt: 1,
|
createdAt: 1,
|
||||||
updatedAt: 1,
|
updatedAt: 1,
|
||||||
"product._id": 1,
|
'product._id': 1,
|
||||||
"product.name": 1,
|
'product.name': 1,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
// Add sorting if sort parameter is provided
|
||||||
if (sort) {
|
if (sort) {
|
||||||
const sortOrder = order === "descend" ? -1 : 1;
|
const sortOrder = order === 'descend' ? -1 : 1;
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,11 +125,11 @@ export const listPartsRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||||
part,
|
part
|
||||||
);
|
);
|
||||||
res.send(part);
|
res.send(part);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing parts:", error);
|
logger.error('Error listing parts:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -133,22 +143,24 @@ export const getPartRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("product");
|
.populate('product');
|
||||||
|
|
||||||
if (!part) {
|
if (!part) {
|
||||||
logger.warn(`Part not found with supplied id.`);
|
logger.warn(`Part not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Part with ID: ${id}:`, part);
|
logger.trace(`Part with ID: ${id}:`, part);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...part._doc, auditLogs: auditLogs});
|
res.send({ ...part._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Part:", error);
|
logger.error('Error fetching Part:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -163,40 +175,29 @@ export const editPartRouteHandler = async (req, res) => {
|
|||||||
if (!part) {
|
if (!part) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Part not found with supplied id.`);
|
logger.warn(`Part not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Part with ID: ${id}:`, part);
|
logger.trace(`Part with ID: ${id}:`, part);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = req.body;
|
||||||
req.body;
|
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'User');
|
||||||
part.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Part',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await partModel.updateOne(
|
const result = await partModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Part updated.");
|
logger.error('No Part updated.');
|
||||||
res.status(500).send({ error: "No parts updated." });
|
res.status(500).send({ error: 'No parts updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating part:", updateError);
|
logger.error('Error updating part:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching part:", fetchError);
|
logger.error('Error fetching part:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -215,20 +216,13 @@ export const newPartRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const results = await partModel.insertMany(partsToCreate);
|
const results = await partModel.insertMany(partsToCreate);
|
||||||
if (!results.length) {
|
if (!results.length) {
|
||||||
logger.error("No parts created.");
|
logger.error('No parts created.');
|
||||||
return res.status(500).send({ error: "No parts created." });
|
return res.status(500).send({ error: 'No parts created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit logs for each new part
|
// Create audit logs for each new part
|
||||||
for (const result of results) {
|
for (const result of results) {
|
||||||
await newAuditLog(
|
await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'User');
|
||||||
{},
|
|
||||||
result.toObject(),
|
|
||||||
result._id,
|
|
||||||
'Part',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return res.status(200).send(results);
|
return res.status(200).send(results);
|
||||||
@ -244,19 +238,12 @@ export const newPartRouteHandler = async (req, res) => {
|
|||||||
const result = await partModel.create(newPart);
|
const result = await partModel.create(newPart);
|
||||||
|
|
||||||
// Create audit log for new part
|
// Create audit log for new part
|
||||||
await newAuditLog(
|
await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newPart,
|
|
||||||
result._id,
|
|
||||||
'Part',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
return res.status(200).send(result);
|
return res.status(200).send(result);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error creating part(s):", error);
|
logger.error('Error creating part(s):', error);
|
||||||
return res.status(500).send({ error: error.message });
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -270,7 +257,7 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
|
|||||||
if (!part) {
|
if (!part) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Part not found with supplied id.`);
|
logger.warn(`Part not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
logger.trace(`Part with ID: ${id}`);
|
logger.trace(`Part with ID: ${id}`);
|
||||||
try {
|
try {
|
||||||
@ -282,22 +269,22 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
|
|||||||
} else {
|
} else {
|
||||||
if (req.file == undefined) {
|
if (req.file == undefined) {
|
||||||
res.send({
|
res.send({
|
||||||
message: "No file selected!",
|
message: 'No file selected!',
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
res.send({
|
res.send({
|
||||||
status: "OK",
|
status: 'OK',
|
||||||
file: `${req.file.filename}`,
|
file: `${req.file.filename}`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating part:", updateError);
|
logger.error('Error updating part:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching part:", fetchError);
|
logger.error('Error fetching part:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -313,22 +300,22 @@ export const getPartFileContentRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!part) {
|
if (!part) {
|
||||||
logger.warn(`Part not found with supplied id.`);
|
logger.warn(`Part not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Part not found." });
|
return res.status(404).send({ error: 'Part not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Returning part file contents with ID: ${id}:`);
|
logger.trace(`Returning part file contents with ID: ${id}:`);
|
||||||
|
|
||||||
const filePath = path.join(process.env.PART_STORAGE, id + ".stl");
|
const filePath = path.join(process.env.PART_STORAGE, id + '.stl');
|
||||||
|
|
||||||
// Read the file
|
// Read the file
|
||||||
fs.readFile(filePath, "utf8", (err, data) => {
|
fs.readFile(filePath, 'utf8', (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === "ENOENT") {
|
if (err.code === 'ENOENT') {
|
||||||
// File not found
|
// File not found
|
||||||
return res.status(404).send({ error: "File not found!" });
|
return res.status(404).send({ error: 'File not found!' });
|
||||||
} else {
|
} else {
|
||||||
// Other errors
|
// Other errors
|
||||||
return res.status(500).send({ error: "Error reading file." });
|
return res.status(500).send({ error: 'Error reading file.' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -336,7 +323,7 @@ export const getPartFileContentRouteHandler = async (req, res) => {
|
|||||||
res.send(data);
|
res.send(data);
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Part:", error);
|
logger.error('Error fetching Part:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,14 +1,13 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { productModel } from "../../schemas/management/product.schema.js";
|
import { productModel } from '../../schemas/management/product.schema.js';
|
||||||
import { partModel } from "../../schemas/management/part.schema.js";
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Products");
|
const logger = log4js.getLogger('Products');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listProductsRouteHandler = async (
|
export const listProductsRouteHandler = async (
|
||||||
@ -16,8 +15,8 @@ export const listProductsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -31,13 +30,13 @@ export const listProductsRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
|
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$match: {
|
$match: {
|
||||||
$or: [
|
$or: [
|
||||||
{ [property]: null },
|
{ [property]: null },
|
||||||
{ [property]: "" },
|
{ [property]: '' },
|
||||||
{ [property]: [] },
|
{ [property]: [] },
|
||||||
{ [property]: {} },
|
{ [property]: {} },
|
||||||
{ [property]: { $exists: false } },
|
{ [property]: { $exists: false } },
|
||||||
@ -56,13 +55,10 @@ export const listProductsRouteHandler = async (
|
|||||||
|
|
||||||
product = await productModel.aggregate(aggregateCommand);
|
product = await productModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(`List of products (Page ${page}, Limit ${limit}, Property ${property}):`, product);
|
||||||
`List of products (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
product,
|
|
||||||
);
|
|
||||||
res.send(product);
|
res.send(product);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing products:", error);
|
logger.error('Error listing products:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -76,23 +72,25 @@ export const getProductRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("vendor")
|
.populate('vendor')
|
||||||
.populate("parts");
|
.populate('parts');
|
||||||
|
|
||||||
if (!product) {
|
if (!product) {
|
||||||
logger.warn(`Product not found with supplied id.`);
|
logger.warn(`Product not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Product with ID: ${id}:`, product);
|
logger.trace(`Product with ID: ${id}:`, product);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...product._doc, auditLogs: auditLogs});
|
res.send({ ...product._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Product:", error);
|
logger.error('Error fetching Product:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -109,12 +107,12 @@ export const editProductRouteHandler = async (req, res) => {
|
|||||||
if (!product) {
|
if (!product) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Product not found with supplied id.`);
|
logger.warn(`Product not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Product with ID: ${id}:`, product);
|
logger.trace(`Product with ID: ${id}:`, product);
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching product:", fetchError);
|
logger.error('Error fetching product:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,28 +130,18 @@ export const editProductRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(product.toObject(), updateData, id, 'Product', req.user._id, 'User');
|
||||||
product.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Product',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await productModel.updateOne(
|
const result = await productModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Product updated.");
|
logger.error('No Product updated.');
|
||||||
res.status(500).send({ error: "No products updated." });
|
res.status(500).send({ error: 'No products updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating product:", updateError);
|
logger.error('Error updating product:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
};
|
};
|
||||||
|
|
||||||
export const newProductRouteHandler = async (req, res) => {
|
export const newProductRouteHandler = async (req, res) => {
|
||||||
@ -172,19 +160,12 @@ export const newProductRouteHandler = async (req, res) => {
|
|||||||
const newProductResult = await productModel.create(newProduct);
|
const newProductResult = await productModel.create(newProduct);
|
||||||
|
|
||||||
if (newProductResult.nCreated === 0) {
|
if (newProductResult.nCreated === 0) {
|
||||||
logger.error("No product created.");
|
logger.error('No product created.');
|
||||||
res.status(500).send({ error: "No product created." });
|
res.status(500).send({ error: 'No product created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new product
|
// Create audit log for new product
|
||||||
await newAuditLog(
|
await newAuditLog({}, newProduct, newProductResult._id, 'Product', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newProduct,
|
|
||||||
newProductResult._id,
|
|
||||||
'Product',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const parts = req.body.parts || [];
|
const parts = req.body.parts || [];
|
||||||
const productId = newProductResult._id;
|
const productId = newProductResult._id;
|
||||||
@ -201,35 +182,28 @@ export const newProductRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const newPartResult = await partModel.create(newPart);
|
const newPartResult = await partModel.create(newPart);
|
||||||
if (newPartResult.nCreated === 0) {
|
if (newPartResult.nCreated === 0) {
|
||||||
logger.error("No parts created.");
|
logger.error('No parts created.');
|
||||||
res.status(500).send({ error: "No parts created." });
|
res.status(500).send({ error: 'No parts created.' });
|
||||||
}
|
}
|
||||||
partIds.push(newPartResult._id);
|
partIds.push(newPartResult._id);
|
||||||
|
|
||||||
// Create audit log for each new part
|
// Create audit log for each new part
|
||||||
await newAuditLog(
|
await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newPart,
|
|
||||||
newPartResult._id,
|
|
||||||
'Part',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const editProductResult = await productModel.updateOne(
|
const editProductResult = await productModel.updateOne(
|
||||||
{ _id: productId },
|
{ _id: productId },
|
||||||
{ $set: { parts: partIds } },
|
{ $set: { parts: partIds } }
|
||||||
);
|
);
|
||||||
|
|
||||||
if (editProductResult.nModified === 0) {
|
if (editProductResult.nModified === 0) {
|
||||||
logger.error("No product updated.");
|
logger.error('No product updated.');
|
||||||
res.status(500).send({ error: "No products updated." });
|
res.status(500).send({ error: 'No products updated.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.status(200).send({ ...newProductResult, parts: partIds });
|
res.status(200).send({ ...newProductResult, parts: partIds });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating product:", updateError);
|
logger.error('Error updating product:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,26 +1,25 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { jobModel } from "../../schemas/production/job.schema.js";
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import { subJobModel } from "../../schemas/production/subjob.schema.js";
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import { printerModel } from "../../schemas/production/printer.schema.js";
|
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||||
import { filamentModel } from "../../schemas/management/filament.schema.js";
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
|
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||||
import { partModel } from "../../schemas/management/part.schema.js";
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import { productModel } from "../../schemas/management/product.schema.js";
|
import { productModel } from '../../schemas/management/product.schema.js';
|
||||||
import { vendorModel } from "../../schemas/management/vendor.schema.js";
|
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||||
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
|
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||||
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
|
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { userModel } from "../../schemas/management/user.schema.js";
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
|
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||||
import { noteModel } from "../../schemas/misc/note.schema.js";
|
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Jobs");
|
const logger = log4js.getLogger('Jobs');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Map prefixes to models and id fields
|
// Map prefixes to models and id fields
|
||||||
@ -77,16 +76,16 @@ const buildSearchFilter = (params) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const trimSpotlightObject = (object) => {
|
const trimSpotlightObject = (object) => {
|
||||||
return {
|
return {
|
||||||
_id: object._id,
|
_id: object._id,
|
||||||
name: object.name || undefined,
|
name: object.name || undefined,
|
||||||
state: object.state && object?.state.type? { type: object.state.type } : undefined,
|
state: object.state && object?.state.type ? { type: object.state.type } : undefined,
|
||||||
tags: object.tags || undefined,
|
tags: object.tags || undefined,
|
||||||
email: object.email || undefined,
|
email: object.email || undefined,
|
||||||
color: object.color || undefined,
|
color: object.color || undefined,
|
||||||
updatedAt: object.updatedAt || undefined,
|
updatedAt: object.updatedAt || undefined,
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
export const getSpotlightRouteHandler = async (req, res) => {
|
export const getSpotlightRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@ -100,10 +99,10 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
const delimiter = query.substring(3, 4);
|
const delimiter = query.substring(3, 4);
|
||||||
const suffix = query.substring(4);
|
const suffix = query.substring(4);
|
||||||
|
|
||||||
if (delimiter == ":") {
|
if (delimiter == ':') {
|
||||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const { model, idField } = prefixEntry;
|
const { model, idField } = prefixEntry;
|
||||||
@ -123,18 +122,18 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Build the response with only the required fields
|
// Build the response with only the required fields
|
||||||
const response = trimSpotlightObject(doc)
|
const response = trimSpotlightObject(doc);
|
||||||
res.status(200).send(response);
|
res.status(200).send(response);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(queryParams)
|
console.log(queryParams);
|
||||||
|
|
||||||
if (Object.keys(queryParams).length > 0) {
|
if (Object.keys(queryParams).length > 0) {
|
||||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||||
console.log(prefixEntry)
|
console.log(prefixEntry);
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const { model } = prefixEntry;
|
const { model } = prefixEntry;
|
||||||
@ -142,7 +141,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
// Use req.query for search parameters
|
// Use req.query for search parameters
|
||||||
|
|
||||||
if (Object.keys(queryParams).length === 0) {
|
if (Object.keys(queryParams).length === 0) {
|
||||||
res.status(400).send({ error: "No search parameters provided" });
|
res.status(400).send({ error: 'No search parameters provided' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -151,19 +150,16 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
// Perform search with limit
|
// Perform search with limit
|
||||||
const limit = parseInt(req.query.limit) || 10;
|
const limit = parseInt(req.query.limit) || 10;
|
||||||
const docs = await model.find(searchFilter)
|
const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
|
||||||
.limit(limit)
|
|
||||||
.sort({ updatedAt: -1 })
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
// Format response
|
// Format response
|
||||||
const response = docs.map(doc => (trimSpotlightObject(doc)));
|
const response = docs.map((doc) => trimSpotlightObject(doc));
|
||||||
|
|
||||||
res.status(200).send(response);
|
res.status(200).send(response);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error in spotlight lookup:", error);
|
logger.error('Error in spotlight lookup:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,14 +1,12 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { userModel } from "../../schemas/management/user.schema.js";
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import mongoose from 'mongoose';
|
||||||
import mongoose from "mongoose";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Users");
|
const logger = log4js.getLogger('Users');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listUsersRouteHandler = async (
|
export const listUsersRouteHandler = async (
|
||||||
@ -16,8 +14,8 @@ export const listUsersRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -31,10 +29,9 @@ export const listUsersRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
@ -44,13 +41,10 @@ export const listUsersRouteHandler = async (
|
|||||||
|
|
||||||
user = await userModel.aggregate(aggregateCommand);
|
user = await userModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(`List of users (Page ${page}, Limit ${limit}, Property ${property}):`, user);
|
||||||
`List of users (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
user,
|
|
||||||
);
|
|
||||||
res.send(user);
|
res.send(user);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing users:", error);
|
logger.error('Error listing users:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -66,18 +60,20 @@ export const getUserRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
logger.warn(`User not found with supplied id.`);
|
logger.warn(`User not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "User not found." });
|
return res.status(404).send({ error: 'User not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`User with ID: ${id}:`, user);
|
logger.trace(`User with ID: ${id}:`, user);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...user._doc, auditLogs: auditLogs});
|
res.send({ ...user._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching User:", error);
|
logger.error('Error fetching User:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -92,7 +88,7 @@ export const editUserRouteHandler = async (req, res) => {
|
|||||||
if (!user) {
|
if (!user) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`User not found with supplied id.`);
|
logger.warn(`User not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "User not found." });
|
return res.status(404).send({ error: 'User not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`User with ID: ${id}:`, user);
|
logger.trace(`User with ID: ${id}:`, user);
|
||||||
@ -107,33 +103,23 @@ export const editUserRouteHandler = async (req, res) => {
|
|||||||
email: req.body.email,
|
email: req.body.email,
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(req.user)
|
console.log(req.user);
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(user.toObject(), updateData, id, 'User', req.user._id, 'User');
|
||||||
user.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'User',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await userModel.updateOne(
|
const result = await userModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No User updated.");
|
logger.error('No User updated.');
|
||||||
res.status(500).send({ error: "No users updated." });
|
res.status(500).send({ error: 'No users updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating user:", updateError);
|
logger.error('Error updating user:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching user:", fetchError);
|
logger.error('Error fetching user:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -1,14 +1,12 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { vendorModel } from "../../schemas/management/vendor.schema.js";
|
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import mongoose from 'mongoose';
|
||||||
import mongoose from "mongoose";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Vendors");
|
const logger = log4js.getLogger('Vendors');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listVendorsRouteHandler = async (
|
export const listVendorsRouteHandler = async (
|
||||||
@ -16,8 +14,8 @@ export const listVendorsRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -31,9 +29,9 @@ export const listVendorsRouteHandler = async (
|
|||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
@ -45,13 +43,10 @@ export const listVendorsRouteHandler = async (
|
|||||||
|
|
||||||
vendor = await vendorModel.aggregate(aggregateCommand);
|
vendor = await vendorModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`, vendor);
|
||||||
`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
vendor,
|
|
||||||
);
|
|
||||||
res.send(vendor);
|
res.send(vendor);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing vendors:", error);
|
logger.error('Error listing vendors:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -67,18 +62,20 @@ export const getVendorRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!vendor) {
|
if (!vendor) {
|
||||||
logger.warn(`Vendor not found with supplied id.`);
|
logger.warn(`Vendor not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...vendor._doc, auditLogs: auditLogs});
|
res.send({ ...vendor._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching Vendor:", error);
|
logger.error('Error fetching Vendor:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -93,7 +90,7 @@ export const editVendorRouteHandler = async (req, res) => {
|
|||||||
if (!vendor) {
|
if (!vendor) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`Vendor not found with supplied id.`);
|
logger.warn(`Vendor not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||||
@ -109,33 +106,23 @@ export const editVendorRouteHandler = async (req, res) => {
|
|||||||
email: req.body.email,
|
email: req.body.email,
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(req.user)
|
console.log(req.user);
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(vendor.toObject(), updateData, id, 'Vendor', req.user._id, 'User');
|
||||||
vendor.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Vendor',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await vendorModel.updateOne(
|
const result = await vendorModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Vendor updated.");
|
logger.error('No Vendor updated.');
|
||||||
res.status(500).send({ error: "No vendors updated." });
|
res.status(500).send({ error: 'No vendors updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating vendor:", updateError);
|
logger.error('Error updating vendor:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching vendor:", fetchError);
|
logger.error('Error fetching vendor:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -147,8 +134,8 @@ export const newVendorRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await vendorModel.create(newVendor);
|
const result = await vendorModel.create(newVendor);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No vendor created.");
|
logger.error('No vendor created.');
|
||||||
res.status(500).send({ error: "No vendor created." });
|
res.status(500).send({ error: 'No vendor created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new vendor
|
// Create audit log for new vendor
|
||||||
@ -161,9 +148,9 @@ export const newVendorRouteHandler = async (req, res) => {
|
|||||||
'User'
|
'User'
|
||||||
);
|
);
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating vendor:", updateError);
|
logger.error('Error updating vendor:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,30 +1,27 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { keycloak } from "../../keycloak.js";
|
import { keycloak } from '../../keycloak.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import axios from "axios";
|
import axios from 'axios';
|
||||||
import { userModel } from "../../schemas/management/user.schema.js";
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Auth");
|
const logger = log4js.getLogger('Auth');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Login handler
|
// Login handler
|
||||||
export const loginRouteHandler = (req, res) => {
|
export const loginRouteHandler = (req, res) => {
|
||||||
// Get the redirect URL from form data or default to production overview
|
// Get the redirect URL from form data or default to production overview
|
||||||
const redirectUrl = req.query.redirect_uri || "/production/overview";
|
const redirectUrl = req.query.redirect_uri || '/production/overview';
|
||||||
|
|
||||||
// Store the original URL to redirect after login
|
// Store the original URL to redirect after login
|
||||||
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
|
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
|
||||||
const callbackUrl = encodeURIComponent(
|
const callbackUrl = encodeURIComponent(`${process.env.APP_URL_API}/auth/callback`);
|
||||||
`${process.env.APP_URL_API}/auth/callback`,
|
|
||||||
);
|
|
||||||
const state = encodeURIComponent(redirectUrl);
|
const state = encodeURIComponent(redirectUrl);
|
||||||
|
|
||||||
logger.warn(req.query.redirect_uri);
|
logger.warn(req.query.redirect_uri);
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`,
|
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -66,7 +63,7 @@ const fetchAndStoreUser = async (req, token) => {
|
|||||||
|
|
||||||
return fullUserInfo;
|
return fullUserInfo;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching and storing user:", error);
|
logger.error('Error fetching and storing user:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -77,22 +74,22 @@ export const loginCallbackRouteHandler = (req, res) => {
|
|||||||
|
|
||||||
// Extract the code and state from the query parameters
|
// Extract the code and state from the query parameters
|
||||||
const code = req.query.code;
|
const code = req.query.code;
|
||||||
const state = req.query.state || "/production/overview";
|
const state = req.query.state || '/production/overview';
|
||||||
|
|
||||||
if (!code) {
|
if (!code) {
|
||||||
return res.status(400).send("Authorization code missing");
|
return res.status(400).send('Authorization code missing');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Exchange the code for tokens manually
|
// Exchange the code for tokens manually
|
||||||
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||||
const redirectUri = `${process.env.APP_URL_API || "http://localhost:8080"}/auth/callback`;
|
const redirectUri = `${process.env.APP_URL_API || 'http://localhost:8080'}/auth/callback`;
|
||||||
|
|
||||||
// Make a POST request to exchange the code for tokens
|
// Make a POST request to exchange the code for tokens
|
||||||
axios
|
axios
|
||||||
.post(
|
.post(
|
||||||
tokenUrl,
|
tokenUrl,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
grant_type: "authorization_code",
|
grant_type: 'authorization_code',
|
||||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
code: code,
|
code: code,
|
||||||
@ -100,9 +97,9 @@ export const loginCallbackRouteHandler = (req, res) => {
|
|||||||
}).toString(),
|
}).toString(),
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/x-www-form-urlencoded",
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
}
|
||||||
)
|
)
|
||||||
.then(async (response) => {
|
.then(async (response) => {
|
||||||
// Store tokens in session
|
// Store tokens in session
|
||||||
@ -112,7 +109,7 @@ export const loginCallbackRouteHandler = (req, res) => {
|
|||||||
id_token: response.data.id_token,
|
id_token: response.data.id_token,
|
||||||
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
||||||
};
|
};
|
||||||
req.session["keycloak-token"] = tokenData;
|
req.session['keycloak-token'] = tokenData;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Fetch and store user data
|
// Fetch and store user data
|
||||||
@ -120,21 +117,16 @@ export const loginCallbackRouteHandler = (req, res) => {
|
|||||||
|
|
||||||
// Save session and redirect to the original URL
|
// Save session and redirect to the original URL
|
||||||
req.session.save(() => {
|
req.session.save(() => {
|
||||||
res.redirect(
|
res.redirect((process.env.APP_URL_CLIENT || 'http://localhost:3000') + state);
|
||||||
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error during user setup:", error);
|
logger.error('Error during user setup:', error);
|
||||||
res.status(500).send("Error setting up user session");
|
res.status(500).send('Error setting up user session');
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
console.error(
|
console.error('Token exchange error:', error.response?.data || error.message);
|
||||||
"Token exchange error:",
|
res.status(500).send('Authentication failed');
|
||||||
error.response?.data || error.message,
|
|
||||||
);
|
|
||||||
res.status(500).send("Authentication failed");
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -161,13 +153,10 @@ const createOrUpdateUser = async (userInfo) => {
|
|||||||
name,
|
name,
|
||||||
firstName,
|
firstName,
|
||||||
lastName,
|
lastName,
|
||||||
updatedAt: new Date()
|
updatedAt: new Date(),
|
||||||
};
|
};
|
||||||
|
|
||||||
await userModel.updateOne(
|
await userModel.updateOne({ username }, { $set: updateData });
|
||||||
{ username },
|
|
||||||
{ $set: updateData }
|
|
||||||
);
|
|
||||||
|
|
||||||
// Fetch the updated user to return
|
// Fetch the updated user to return
|
||||||
return await userModel.findOne({ username });
|
return await userModel.findOne({ username });
|
||||||
@ -181,14 +170,14 @@ const createOrUpdateUser = async (userInfo) => {
|
|||||||
email,
|
email,
|
||||||
name,
|
name,
|
||||||
firstName,
|
firstName,
|
||||||
lastName
|
lastName,
|
||||||
});
|
});
|
||||||
|
|
||||||
await newUser.save();
|
await newUser.save();
|
||||||
return newUser;
|
return newUser;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error creating/updating user:", error);
|
logger.error('Error creating/updating user:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -197,31 +186,29 @@ export const userRouteHandler = (req, res) => {
|
|||||||
if (req.session && req.session.user) {
|
if (req.session && req.session.user) {
|
||||||
res.json(req.session.user);
|
res.json(req.session.user);
|
||||||
} else {
|
} else {
|
||||||
res.status(401).json({ error: "Not authenticated" });
|
res.status(401).json({ error: 'Not authenticated' });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Logout handler
|
// Logout handler
|
||||||
export const logoutRouteHandler = (req, res) => {
|
export const logoutRouteHandler = (req, res) => {
|
||||||
// Get the redirect URL from query or default to login page
|
// Get the redirect URL from query or default to login page
|
||||||
const redirectUrl = req.query.redirect_uri || "/login";
|
const redirectUrl = req.query.redirect_uri || '/login';
|
||||||
|
|
||||||
// Destroy the session
|
// Destroy the session
|
||||||
req.session.destroy((err) => {
|
req.session.destroy((err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error("Error destroying session:", err);
|
logger.error('Error destroying session:', err);
|
||||||
return res.status(500).json({ error: "Failed to logout" });
|
return res.status(500).json({ error: 'Failed to logout' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Construct the Keycloak logout URL with the redirect URI
|
// Construct the Keycloak logout URL with the redirect URI
|
||||||
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
|
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
|
||||||
const encodedRedirectUri = encodeURIComponent(
|
const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
|
||||||
`${process.env.APP_URL_CLIENT}${redirectUrl}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Redirect to Keycloak logout with the redirect URI
|
// Redirect to Keycloak logout with the redirect URI
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`,
|
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -245,36 +232,32 @@ export const getUserInfoHandler = (req, res) => {
|
|||||||
email: token.content.email,
|
email: token.content.email,
|
||||||
name:
|
name:
|
||||||
token.content.name ||
|
token.content.name ||
|
||||||
`${token.content.given_name || ""} ${token.content.family_name || ""}`.trim(),
|
`${token.content.given_name || ''} ${token.content.family_name || ''}`.trim(),
|
||||||
roles: token.content.realm_access?.roles || [],
|
roles: token.content.realm_access?.roles || [],
|
||||||
};
|
};
|
||||||
return res.json(userInfo);
|
return res.json(userInfo);
|
||||||
}
|
}
|
||||||
return res.status(401).json({ error: "Not authenticated" });
|
return res.status(401).json({ error: 'Not authenticated' });
|
||||||
};
|
};
|
||||||
|
|
||||||
// Register route - Since we're using Keycloak, registration should be handled there
|
// Register route - Since we're using Keycloak, registration should be handled there
|
||||||
// This endpoint will redirect to Keycloak's registration page
|
// This endpoint will redirect to Keycloak's registration page
|
||||||
export const registerRouteHandler = (req, res) => {
|
export const registerRouteHandler = (req, res) => {
|
||||||
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
|
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
|
||||||
const redirectUri = encodeURIComponent(
|
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
|
||||||
process.env.APP_URL_CLIENT + "/auth/login",
|
|
||||||
);
|
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
|
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Forgot password handler - redirect to Keycloak's reset password page
|
// Forgot password handler - redirect to Keycloak's reset password page
|
||||||
export const forgotPasswordRouteHandler = (req, res) => {
|
export const forgotPasswordRouteHandler = (req, res) => {
|
||||||
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
|
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
|
||||||
const redirectUri = encodeURIComponent(
|
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
|
||||||
process.env.APP_URL_CLIENT + "/auth/login",
|
|
||||||
);
|
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
|
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -282,34 +265,34 @@ export const forgotPasswordRouteHandler = (req, res) => {
|
|||||||
export const refreshTokenRouteHandler = (req, res) => {
|
export const refreshTokenRouteHandler = (req, res) => {
|
||||||
if (
|
if (
|
||||||
!req.session ||
|
!req.session ||
|
||||||
!req.session["keycloak-token"] ||
|
!req.session['keycloak-token'] ||
|
||||||
!req.session["keycloak-token"].refresh_token
|
!req.session['keycloak-token'].refresh_token
|
||||||
) {
|
) {
|
||||||
return res.status(401).json({ error: "No refresh token available" });
|
return res.status(401).json({ error: 'No refresh token available' });
|
||||||
}
|
}
|
||||||
|
|
||||||
const refreshToken = req.session["keycloak-token"].refresh_token;
|
const refreshToken = req.session['keycloak-token'].refresh_token;
|
||||||
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||||
|
|
||||||
axios
|
axios
|
||||||
.post(
|
.post(
|
||||||
tokenUrl,
|
tokenUrl,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
grant_type: "refresh_token",
|
grant_type: 'refresh_token',
|
||||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
refresh_token: refreshToken,
|
refresh_token: refreshToken,
|
||||||
}).toString(),
|
}).toString(),
|
||||||
{
|
{
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/x-www-form-urlencoded",
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
}
|
||||||
)
|
)
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
// Update session with new tokens
|
// Update session with new tokens
|
||||||
req.session["keycloak-token"] = {
|
req.session['keycloak-token'] = {
|
||||||
...req.session["keycloak-token"],
|
...req.session['keycloak-token'],
|
||||||
access_token: response.data.access_token,
|
access_token: response.data.access_token,
|
||||||
refresh_token: response.data.refresh_token,
|
refresh_token: response.data.refresh_token,
|
||||||
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
||||||
@ -319,22 +302,19 @@ export const refreshTokenRouteHandler = (req, res) => {
|
|||||||
req.session.save(() => {
|
req.session.save(() => {
|
||||||
res.json({
|
res.json({
|
||||||
access_token: response.data.access_token,
|
access_token: response.data.access_token,
|
||||||
expires_at: req.session["keycloak-token"].expires_at,
|
expires_at: req.session['keycloak-token'].expires_at,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(
|
logger.error('Token refresh error:', error.response?.data || error.message);
|
||||||
"Token refresh error:",
|
|
||||||
error.response?.data || error.message,
|
|
||||||
);
|
|
||||||
|
|
||||||
// If refresh token is invalid, clear the session
|
// If refresh token is invalid, clear the session
|
||||||
if (error.response?.status === 400) {
|
if (error.response?.status === 400) {
|
||||||
req.session.destroy();
|
req.session.destroy();
|
||||||
}
|
}
|
||||||
|
|
||||||
res.status(500).json({ error: "Failed to refresh token" });
|
res.status(500).json({ error: 'Failed to refresh token' });
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +1,12 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { noteModel } from "../../schemas/misc/note.schema.js";
|
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Notes");
|
const logger = log4js.getLogger('Notes');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listNotesRouteHandler = async (
|
export const listNotesRouteHandler = async (
|
||||||
@ -15,8 +14,8 @@ export const listNotesRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {}
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -29,35 +28,35 @@ export const listNotesRouteHandler = async (
|
|||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "users", // The collection name (usually lowercase plural)
|
from: 'users', // The collection name (usually lowercase plural)
|
||||||
localField: "user", // The field in your current model
|
localField: 'user', // The field in your current model
|
||||||
foreignField: "_id", // The field in the users collection
|
foreignField: '_id', // The field in the users collection
|
||||||
as: "user", // The output field name
|
as: 'user', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
aggregateCommand.push({ $unwind: "$user" });
|
aggregateCommand.push({ $unwind: '$user' });
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "notetypes", // The collection name (usually lowercase plural)
|
from: 'notetypes', // The collection name (usually lowercase plural)
|
||||||
localField: "noteType", // The field in your current model
|
localField: 'noteType', // The field in your current model
|
||||||
foreignField: "_id", // The field in the users collection
|
foreignField: '_id', // The field in the users collection
|
||||||
as: "noteType", // The output field name
|
as: 'noteType', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
aggregateCommand.push({ $unwind: "$noteType" });
|
aggregateCommand.push({ $unwind: '$noteType' });
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$project: {
|
$project: {
|
||||||
name: 1,
|
name: 1,
|
||||||
_id: 1,
|
_id: 1,
|
||||||
createdAt: 1,
|
createdAt: 1,
|
||||||
updatedAt: 1,
|
updatedAt: 1,
|
||||||
"noteType._id": 1,
|
'noteType._id': 1,
|
||||||
"noteType.name": 1,
|
'noteType.name': 1,
|
||||||
"noteType.color": 1,
|
'noteType.color': 1,
|
||||||
"user._id": 1,
|
'user._id': 1,
|
||||||
"user.name": 1,
|
'user.name': 1,
|
||||||
content: 1,
|
content: 1,
|
||||||
parent: 1
|
parent: 1,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -66,14 +65,10 @@ export const listNotesRouteHandler = async (
|
|||||||
|
|
||||||
notes = await noteModel.aggregate(aggregateCommand);
|
notes = await noteModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
|
logger.trace(`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`, notes);
|
||||||
logger.trace(
|
|
||||||
`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
notes,
|
|
||||||
);
|
|
||||||
res.send(notes);
|
res.send(notes);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing notes:", error);
|
logger.error('Error listing notes:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -87,18 +82,20 @@ export const getNoteRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!note) {
|
if (!note) {
|
||||||
logger.warn(`Note not found with supplied id.`);
|
logger.warn(`Note not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Note not found." });
|
return res.status(404).send({ error: 'Note not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Note with ID: ${id}:`, note);
|
logger.trace(`Note with ID: ${id}:`, note);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...note._doc, auditLogs: auditLogs});
|
res.send({ ...note._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching note:", error);
|
logger.error('Error fetching note:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -110,7 +107,7 @@ export const editNoteRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!note) {
|
if (!note) {
|
||||||
logger.warn(`Note not found with supplied id.`);
|
logger.warn(`Note not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Note not found." });
|
return res.status(404).send({ error: 'Note not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Note with ID: ${id}:`, note);
|
logger.trace(`Note with ID: ${id}:`, note);
|
||||||
@ -124,30 +121,20 @@ export const editNoteRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(note.toObject(), updateData, id, 'Note', req.user._id, 'User');
|
||||||
note.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Note',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await noteModel.updateOne(
|
const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No note updated.");
|
logger.error('No note updated.');
|
||||||
res.status(500).send({ error: "No notes updated." });
|
res.status(500).send({ error: 'No notes updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating note:", updateError);
|
logger.error('Error updating note:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching note:", fetchError);
|
logger.error('Error fetching note:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -159,23 +146,16 @@ export const newNoteRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await noteModel.create(newNote);
|
const result = await noteModel.create(newNote);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No note created.");
|
logger.error('No note created.');
|
||||||
res.status(500).send({ error: "No note created." });
|
res.status(500).send({ error: 'No note created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new note
|
// Create audit log for new note
|
||||||
await newAuditLog(
|
await newAuditLog({}, newNote, result._id, 'Note', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newNote,
|
|
||||||
result._id,
|
|
||||||
'Note',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error creating note:", updateError);
|
logger.error('Error creating note:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -187,13 +167,13 @@ export const deleteNoteRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!note) {
|
if (!note) {
|
||||||
logger.warn(`Note not found with supplied id.`);
|
logger.warn(`Note not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Note not found." });
|
return res.status(404).send({ error: 'Note not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the current user owns this note
|
// Check if the current user owns this note
|
||||||
if (note.user.toString() !== req.user._id.toString()) {
|
if (note.user.toString() !== req.user._id.toString()) {
|
||||||
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
|
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
|
||||||
return res.status(403).send({ error: "You can only delete your own notes." });
|
return res.status(403).send({ error: 'You can only delete your own notes.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Deleting note with ID: ${id} and all its children`);
|
logger.trace(`Deleting note with ID: ${id} and all its children`);
|
||||||
@ -202,25 +182,16 @@ export const deleteNoteRouteHandler = async (req, res) => {
|
|||||||
const deletedNoteIds = await recursivelyDeleteNotes(id);
|
const deletedNoteIds = await recursivelyDeleteNotes(id);
|
||||||
|
|
||||||
// Create audit log for the deletion
|
// Create audit log for the deletion
|
||||||
await newAuditLog(
|
await newAuditLog(note.toObject(), {}, id, 'Note', req.user._id, 'User', 'DELETE');
|
||||||
note.toObject(),
|
|
||||||
{},
|
|
||||||
id,
|
|
||||||
'Note',
|
|
||||||
req.user._id,
|
|
||||||
'User',
|
|
||||||
'DELETE'
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
|
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
|
||||||
res.send({
|
res.send({
|
||||||
status: "ok",
|
status: 'ok',
|
||||||
deletedNoteIds: deletedNoteIds,
|
deletedNoteIds: deletedNoteIds,
|
||||||
message: `Deleted ${deletedNoteIds.length} notes`
|
message: `Deleted ${deletedNoteIds.length} notes`,
|
||||||
});
|
});
|
||||||
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error deleting note:", error);
|
logger.error('Error deleting note:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,26 +1,25 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { jobModel } from "../../schemas/production/job.schema.js";
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import { subJobModel } from "../../schemas/production/subjob.schema.js";
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
import { printerModel } from "../../schemas/production/printer.schema.js";
|
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||||
import { filamentModel } from "../../schemas/management/filament.schema.js";
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
|
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||||
import { partModel } from "../../schemas/management/part.schema.js";
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import { productModel } from "../../schemas/management/product.schema.js";
|
import { productModel } from '../../schemas/management/product.schema.js';
|
||||||
import { vendorModel } from "../../schemas/management/vendor.schema.js";
|
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||||
import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
|
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||||
import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
|
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { userModel } from "../../schemas/management/user.schema.js";
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
|
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||||
import { noteModel } from "../../schemas/misc/note.schema.js";
|
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Jobs");
|
const logger = log4js.getLogger('Jobs');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Map prefixes to models and id fields
|
// Map prefixes to models and id fields
|
||||||
@ -77,16 +76,16 @@ const buildSearchFilter = (params) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const trimSpotlightObject = (object) => {
|
const trimSpotlightObject = (object) => {
|
||||||
return {
|
return {
|
||||||
_id: object._id,
|
_id: object._id,
|
||||||
name: object.name || undefined,
|
name: object.name || undefined,
|
||||||
state: object.state && object?.state.type? { type: object.state.type } : undefined,
|
state: object.state && object?.state.type ? { type: object.state.type } : undefined,
|
||||||
tags: object.tags || undefined,
|
tags: object.tags || undefined,
|
||||||
email: object.email || undefined,
|
email: object.email || undefined,
|
||||||
color: object.color || undefined,
|
color: object.color || undefined,
|
||||||
updatedAt: object.updatedAt || undefined,
|
updatedAt: object.updatedAt || undefined,
|
||||||
};
|
};
|
||||||
}
|
};
|
||||||
|
|
||||||
export const getSpotlightRouteHandler = async (req, res) => {
|
export const getSpotlightRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
@ -100,10 +99,10 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
const delimiter = query.substring(3, 4);
|
const delimiter = query.substring(3, 4);
|
||||||
const suffix = query.substring(4);
|
const suffix = query.substring(4);
|
||||||
|
|
||||||
if (delimiter == ":") {
|
if (delimiter == ':') {
|
||||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const { model, idField } = prefixEntry;
|
const { model, idField } = prefixEntry;
|
||||||
@ -123,18 +122,18 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Build the response with only the required fields
|
// Build the response with only the required fields
|
||||||
const response = trimSpotlightObject(doc)
|
const response = trimSpotlightObject(doc);
|
||||||
res.status(200).send(response);
|
res.status(200).send(response);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(queryParams)
|
console.log(queryParams);
|
||||||
|
|
||||||
if (Object.keys(queryParams).length > 0) {
|
if (Object.keys(queryParams).length > 0) {
|
||||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||||
console.log(prefixEntry)
|
console.log(prefixEntry);
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: "Invalid or unsupported prefix" });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const { model } = prefixEntry;
|
const { model } = prefixEntry;
|
||||||
@ -142,7 +141,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
// Use req.query for search parameters
|
// Use req.query for search parameters
|
||||||
|
|
||||||
if (Object.keys(queryParams).length === 0) {
|
if (Object.keys(queryParams).length === 0) {
|
||||||
res.status(400).send({ error: "No search parameters provided" });
|
res.status(400).send({ error: 'No search parameters provided' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -151,19 +150,16 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
// Perform search with limit
|
// Perform search with limit
|
||||||
const limit = parseInt(req.query.limit) || 10;
|
const limit = parseInt(req.query.limit) || 10;
|
||||||
const docs = await model.find(searchFilter)
|
const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
|
||||||
.limit(limit)
|
|
||||||
.sort({ updatedAt: -1 })
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
// Format response
|
// Format response
|
||||||
const response = docs.map(doc => (trimSpotlightObject(doc)));
|
const response = docs.map((doc) => trimSpotlightObject(doc));
|
||||||
|
|
||||||
res.status(200).send(response);
|
res.status(200).send(response);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error in spotlight lookup:", error);
|
logger.error('Error in spotlight lookup:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,21 +1,18 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
|
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||||
import { filamentModel } from "../../schemas/management/filament.schema.js";
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import jwt from "jsonwebtoken";
|
import log4js from 'log4js';
|
||||||
import log4js from "log4js";
|
import multer from 'multer';
|
||||||
import multer from "multer";
|
import path from 'path';
|
||||||
import crypto from "crypto";
|
import fs from 'fs';
|
||||||
import path from "path";
|
import mongoose from 'mongoose';
|
||||||
import fs from "fs";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import mongoose from "mongoose";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { extractConfigBlock } from '../../utils.js';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
import { extractConfigBlock } from "../../util/index.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("GCodeFiles");
|
const logger = log4js.getLogger('GCodeFiles');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Set storage engine
|
// Set storage engine
|
||||||
@ -23,7 +20,7 @@ const gcodeStorage = multer.diskStorage({
|
|||||||
destination: process.env.GCODE_STORAGE,
|
destination: process.env.GCODE_STORAGE,
|
||||||
filename: async function (req, file, cb) {
|
filename: async function (req, file, cb) {
|
||||||
// Retrieve custom file name from request body
|
// Retrieve custom file name from request body
|
||||||
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
|
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
|
||||||
|
|
||||||
// Create the final filename ensuring it ends with .gcode
|
// Create the final filename ensuring it ends with .gcode
|
||||||
const finalFilename = `${customFileName}.gcode`;
|
const finalFilename = `${customFileName}.gcode`;
|
||||||
@ -40,7 +37,7 @@ const gcodeUpload = multer({
|
|||||||
fileFilter: function (req, file, cb) {
|
fileFilter: function (req, file, cb) {
|
||||||
checkFileType(file, cb);
|
checkFileType(file, cb);
|
||||||
},
|
},
|
||||||
}).single("gcodeFile"); // The name attribute of the file input in the HTML form
|
}).single('gcodeFile'); // The name attribute of the file input in the HTML form
|
||||||
|
|
||||||
// Check file type
|
// Check file type
|
||||||
function checkFileType(file, cb) {
|
function checkFileType(file, cb) {
|
||||||
@ -53,7 +50,7 @@ function checkFileType(file, cb) {
|
|||||||
console.log(file);
|
console.log(file);
|
||||||
return cb(null, true);
|
return cb(null, true);
|
||||||
} else {
|
} else {
|
||||||
cb("Error: .g, .gco, and .gcode files only!");
|
cb('Error: .g, .gco, and .gcode files only!');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,11 +59,11 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
res,
|
res,
|
||||||
page = 1,
|
page = 1,
|
||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = '',
|
||||||
filter = {},
|
filter = {},
|
||||||
search = "",
|
search = '',
|
||||||
sort = "",
|
sort = '',
|
||||||
order = "ascend"
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -88,60 +85,60 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "filaments", // The name of the Filament collection
|
from: 'filaments', // The name of the Filament collection
|
||||||
localField: "filament",
|
localField: 'filament',
|
||||||
foreignField: "_id",
|
foreignField: '_id',
|
||||||
as: "filament",
|
as: 'filament',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$unwind: {
|
$unwind: {
|
||||||
path: "$filament",
|
path: '$filament',
|
||||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
|
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$addFields: {
|
$addFields: {
|
||||||
filament: "$filament",
|
filament: '$filament',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: "vendors", // The collection name (usually lowercase plural)
|
from: 'vendors', // The collection name (usually lowercase plural)
|
||||||
localField: "filament.vendor", // The field in your current model
|
localField: 'filament.vendor', // The field in your current model
|
||||||
foreignField: "_id", // The field in the products collection
|
foreignField: '_id', // The field in the products collection
|
||||||
as: "filament.vendor", // The output field name
|
as: 'filament.vendor', // The output field name
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: "$filament.vendor" });
|
aggregateCommand.push({ $unwind: '$filament.vendor' });
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
// use filtering if present
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != '') {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({
|
aggregateCommand.push({
|
||||||
$project: {
|
$project: {
|
||||||
"filament.gcodeFileInfo.estimatedPrintingTimeNormalMode": 0,
|
'filament.gcodeFileInfo.estimatedPrintingTimeNormalMode': 0,
|
||||||
url: 0,
|
url: 0,
|
||||||
"filament.image": 0,
|
'filament.image': 0,
|
||||||
"filament.createdAt": 0,
|
'filament.createdAt': 0,
|
||||||
"filament.updatedAt": 0,
|
'filament.updatedAt': 0,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
// Add sorting if sort parameter is provided
|
||||||
if (sort) {
|
if (sort) {
|
||||||
const sortOrder = order === "descend" ? -1 : 1;
|
const sortOrder = order === 'descend' ? -1 : 1;
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,11 +151,11 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||||
gcodeFile,
|
gcodeFile
|
||||||
);
|
);
|
||||||
res.send(gcodeFile);
|
res.send(gcodeFile);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing gcode files:", error);
|
logger.error('Error listing gcode files:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -174,25 +171,22 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!gcodeFile) {
|
if (!gcodeFile) {
|
||||||
logger.warn(`GCodeFile not found with supplied id.`);
|
logger.warn(`GCodeFile not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Returning GCode File contents with ID: ${id}:`);
|
logger.trace(`Returning GCode File contents with ID: ${id}:`);
|
||||||
|
|
||||||
const filePath = path.join(
|
const filePath = path.join(process.env.GCODE_STORAGE, gcodeFile.gcodeFileName);
|
||||||
process.env.GCODE_STORAGE,
|
|
||||||
gcodeFile.gcodeFileName,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Read the file
|
// Read the file
|
||||||
fs.readFile(filePath, "utf8", (err, data) => {
|
fs.readFile(filePath, 'utf8', (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
if (err.code === "ENOENT") {
|
if (err.code === 'ENOENT') {
|
||||||
// File not found
|
// File not found
|
||||||
return res.status(404).send({ error: "File not found!" });
|
return res.status(404).send({ error: 'File not found!' });
|
||||||
} else {
|
} else {
|
||||||
// Other errors
|
// Other errors
|
||||||
return res.status(500).send({ error: "Error reading file." });
|
return res.status(500).send({ error: 'Error reading file.' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,7 +194,7 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
|
|||||||
res.send(data);
|
res.send(data);
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching GCodeFile:", error);
|
logger.error('Error fetching GCodeFile:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -215,7 +209,7 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
|||||||
if (!gcodeFile) {
|
if (!gcodeFile) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`GCodeFile not found with supplied id.`);
|
logger.warn(`GCodeFile not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||||
@ -228,30 +222,20 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(gcodeFile.toObject(), updateData, id, 'GCodeFile', req.user._id, 'User');
|
||||||
gcodeFile.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'GCodeFile',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await gcodeFileModel.updateOne(
|
const result = await gcodeFileModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No gcodeFile updated.");
|
logger.error('No gcodeFile updated.');
|
||||||
res.status(500).send({ error: "No gcodeFiles updated." });
|
res.status(500).send({ error: 'No gcodeFiles updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating gcodeFile:", updateError);
|
logger.error('Error updating gcodeFile:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching gcodeFile:", fetchError);
|
logger.error('Error fetching gcodeFile:', fetchError);
|
||||||
//res.status(500).send({ error: fetchError.message });
|
//res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -269,11 +253,11 @@ export const newGCodeFileRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!filament) {
|
if (!filament) {
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Filament not found." });
|
return res.status(404).send({ error: 'Filament not found.' });
|
||||||
}
|
}
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching filament:", error);
|
logger.error('Error fetching filament:', error);
|
||||||
return res.status(500).send({ error: error.message });
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,23 +273,16 @@ export const newGCodeFileRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const result = await gcodeFileModel.create(newGCodeFile);
|
const result = await gcodeFileModel.create(newGCodeFile);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No gcode file created.");
|
logger.error('No gcode file created.');
|
||||||
res.status(500).send({ error: "No gcode file created." });
|
res.status(500).send({ error: 'No gcode file created.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create audit log for new gcodefile
|
// Create audit log for new gcodefile
|
||||||
await newAuditLog(
|
await newAuditLog({}, newGCodeFile, result._id, 'GCodeFile', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newGCodeFile,
|
|
||||||
result._id,
|
|
||||||
'GCodeFile',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: 'ok' });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error creating gcode file:", updateError);
|
logger.error('Error creating gcode file:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -322,7 +299,7 @@ export const parseGCodeFileHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (req.file == undefined) {
|
if (req.file == undefined) {
|
||||||
return res.send({
|
return res.send({
|
||||||
message: "No file selected!",
|
message: 'No file selected!',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -331,7 +308,7 @@ export const parseGCodeFileHandler = async (req, res) => {
|
|||||||
const filePath = path.join(req.file.destination, req.file.filename);
|
const filePath = path.join(req.file.destination, req.file.filename);
|
||||||
|
|
||||||
// Read the file content
|
// Read the file content
|
||||||
const fileContent = fs.readFileSync(filePath, "utf8");
|
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||||
|
|
||||||
// Extract the config block
|
// Extract the config block
|
||||||
const configInfo = extractConfigBlock(fileContent);
|
const configInfo = extractConfigBlock(fileContent);
|
||||||
@ -342,12 +319,12 @@ export const parseGCodeFileHandler = async (req, res) => {
|
|||||||
// Optionally clean up the file after processing if it's not needed
|
// Optionally clean up the file after processing if it's not needed
|
||||||
fs.unlinkSync(filePath);
|
fs.unlinkSync(filePath);
|
||||||
} catch (parseError) {
|
} catch (parseError) {
|
||||||
logger.error("Error parsing GCode file:", parseError);
|
logger.error('Error parsing GCode file:', parseError);
|
||||||
res.status(500).send({ error: parseError.message });
|
res.status(500).send({ error: parseError.message });
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error in parseGCodeFileHandler:", error);
|
logger.error('Error in parseGCodeFileHandler:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -361,7 +338,7 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
|
|||||||
if (!gcodeFile) {
|
if (!gcodeFile) {
|
||||||
// Error handling
|
// Error handling
|
||||||
logger.warn(`GCodeFile not found with supplied id.`);
|
logger.warn(`GCodeFile not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
logger.trace(`GCodeFile with ID: ${id}`);
|
logger.trace(`GCodeFile with ID: ${id}`);
|
||||||
try {
|
try {
|
||||||
@ -373,15 +350,9 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
|
|||||||
} else {
|
} else {
|
||||||
if (req.file == undefined) {
|
if (req.file == undefined) {
|
||||||
res.send({
|
res.send({
|
||||||
message: "No file selected!",
|
message: 'No file selected!',
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// Get the path to the uploaded file
|
|
||||||
const filePath = path.join(req.file.destination, req.file.filename);
|
|
||||||
|
|
||||||
// Read the file content
|
|
||||||
const fileContent = fs.readFileSync(filePath, "utf8");
|
|
||||||
|
|
||||||
// Update the gcodeFile document with the filename and the extracted config
|
// Update the gcodeFile document with the filename and the extracted config
|
||||||
const result = await gcodeFileModel.updateOne(
|
const result = await gcodeFileModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
@ -389,27 +360,27 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
|
|||||||
$set: {
|
$set: {
|
||||||
gcodeFileName: req.file.filename,
|
gcodeFileName: req.file.filename,
|
||||||
},
|
},
|
||||||
},
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No gcodeFile updated.");
|
logger.error('No gcodeFile updated.');
|
||||||
res.status(500).send({ error: "No gcodeFiles updated." });
|
res.status(500).send({ error: 'No gcodeFiles updated.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.send({
|
res.send({
|
||||||
status: "OK",
|
status: 'OK',
|
||||||
file: `${req.file.filename}`,
|
file: `${req.file.filename}`,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating gcodeFile:", updateError);
|
logger.error('Error updating gcodeFile:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching gcodeFile:", fetchError);
|
logger.error('Error fetching gcodeFile:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -423,22 +394,24 @@ export const getGCodeFileRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("filament");
|
.populate('filament');
|
||||||
|
|
||||||
if (!gcodeFile) {
|
if (!gcodeFile) {
|
||||||
logger.warn(`GCodeFile not found with supplied id.`);
|
logger.warn(`GCodeFile not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: id
|
.find({
|
||||||
}).populate('owner');
|
target: id,
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...gcodeFile._doc, auditLogs: auditLogs});
|
res.send({ ...gcodeFile._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching GCodeFile:", error);
|
logger.error('Error fetching GCodeFile:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,23 +1,16 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { jobModel } from "../../schemas/production/job.schema.js";
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import { subJobModel } from "../../schemas/production/subjob.schema.js";
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
import { noteModel } from "../../schemas/misc/note.schema.js";
|
import log4js from 'log4js';
|
||||||
import jwt from "jsonwebtoken";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import log4js from "log4js";
|
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Jobs");
|
const logger = log4js.getLogger('Jobs');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listJobsRouteHandler = async (
|
export const listJobsRouteHandler = async (req, res, page = 1, limit = 25) => {
|
||||||
req,
|
|
||||||
res,
|
|
||||||
page = 1,
|
|
||||||
limit = 25,
|
|
||||||
) => {
|
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -28,13 +21,13 @@ export const listJobsRouteHandler = async (
|
|||||||
.sort({ createdAt: -1 })
|
.sort({ createdAt: -1 })
|
||||||
.skip(skip)
|
.skip(skip)
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.populate("subJobs", "state")
|
.populate('subJobs', 'state')
|
||||||
.populate("gcodeFile", "name");
|
.populate('gcodeFile', 'name');
|
||||||
|
|
||||||
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
|
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
|
||||||
res.send(jobs);
|
res.send(jobs);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing print jobs:", error);
|
logger.error('Error listing print jobs:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -48,26 +41,28 @@ export const getJobRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("printers", "name state")
|
.populate('printers', 'name state')
|
||||||
.populate("gcodeFile")
|
.populate('gcodeFile')
|
||||||
.populate("subJobs")
|
.populate('subJobs')
|
||||||
.populate("notes");
|
.populate('notes');
|
||||||
|
|
||||||
if (!job) {
|
if (!job) {
|
||||||
logger.warn(`Job not found with supplied id.`);
|
logger.warn(`Job not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Job with ID: ${id}:`, job);
|
logger.trace(`Job with ID: ${id}:`, job);
|
||||||
|
|
||||||
const targetIds = [id, ...job.subJobs.map(subJob => subJob._id)];
|
const targetIds = [id, ...job.subJobs.map((subJob) => subJob._id)];
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: { $in: targetIds.map(id => new mongoose.Types.ObjectId(id)) }
|
.find({
|
||||||
}).populate('owner');
|
target: { $in: targetIds.map((id) => new mongoose.Types.ObjectId(id)) },
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...job._doc, auditLogs: auditLogs});
|
res.send({ ...job._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching job:", error);
|
logger.error('Error fetching job:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -82,27 +77,23 @@ export const editJobRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!job) {
|
if (!job) {
|
||||||
logger.warn(`Job not found with supplied id.`);
|
logger.warn(`Job not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: 'Print job not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Job with ID: ${id}:`, job);
|
logger.trace(`Job with ID: ${id}:`, job);
|
||||||
|
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = req.body;
|
||||||
req.body;
|
|
||||||
|
|
||||||
const result = await jobModel.updateOne(
|
const result = await jobModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.warn("No jobs updated.");
|
logger.warn('No jobs updated.');
|
||||||
return res.status(400).send({ error: "No jobs updated." });
|
return res.status(400).send({ error: 'No jobs updated.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.send({ message: "Print job updated successfully" });
|
res.send({ message: 'Print job updated successfully' });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error updating job:", error);
|
logger.error('Error updating job:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -112,9 +103,7 @@ export const createJobRouteHandler = async (req, res) => {
|
|||||||
const { gcodeFile, printers, quantity = 1 } = req.body;
|
const { gcodeFile, printers, quantity = 1 } = req.body;
|
||||||
|
|
||||||
if (!printers || printers.length === 0) {
|
if (!printers || printers.length === 0) {
|
||||||
return res
|
return res.status(400).send({ error: 'At least one printer must be specified' });
|
||||||
.status(400)
|
|
||||||
.send({ error: "At least one printer must be specified" });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert printer IDs to ObjectIds
|
// Convert printer IDs to ObjectIds
|
||||||
@ -122,14 +111,14 @@ export const createJobRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
// Create new print job
|
// Create new print job
|
||||||
const newJob = new jobModel({
|
const newJob = new jobModel({
|
||||||
state: { type: "draft" },
|
state: { type: 'draft' },
|
||||||
printers: printerIds,
|
printers: printerIds,
|
||||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||||
quantity,
|
quantity,
|
||||||
subJobs: [], // Initialize empty array for subjob references
|
subJobs: [], // Initialize empty array for subjob references
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
startedAt: null
|
startedAt: null,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Save the print job first to get its ID
|
// Save the print job first to get its ID
|
||||||
@ -143,25 +132,23 @@ export const createJobRouteHandler = async (req, res) => {
|
|||||||
job: savedJob._id,
|
job: savedJob._id,
|
||||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||||
subJobId: `subjob-${index + 1}`,
|
subJobId: `subjob-${index + 1}`,
|
||||||
state: { type: "draft" },
|
state: { type: 'draft' },
|
||||||
number: index + 1,
|
number: index + 1,
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
});
|
});
|
||||||
return subJob.save();
|
return subJob.save();
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
// Update the print job with the subjob references
|
// Update the print job with the subjob references
|
||||||
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
|
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
|
||||||
await savedJob.save();
|
await savedJob.save();
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`);
|
||||||
`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`,
|
|
||||||
);
|
|
||||||
res.status(201).send({ job: savedJob, subJobs });
|
res.status(201).send({ job: savedJob, subJobs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error creating print job:", error);
|
logger.error('Error creating print job:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -171,10 +158,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
|
|||||||
const stats = await jobModel.aggregate([
|
const stats = await jobModel.aggregate([
|
||||||
{
|
{
|
||||||
$group: {
|
$group: {
|
||||||
_id: "$state.type",
|
_id: '$state.type',
|
||||||
count: { $sum: 1 }
|
count: { $sum: 1 },
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Transform the results into a more readable format
|
// Transform the results into a more readable format
|
||||||
@ -183,11 +170,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
|
|||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
logger.trace("Print job stats by state:", formattedStats);
|
logger.trace('Print job stats by state:', formattedStats);
|
||||||
res.send(formattedStats);
|
res.send(formattedStats);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching print job stats:", error);
|
logger.error('Error fetching print job stats:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,21 +1,16 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from 'dotenv';
|
||||||
import { printerModel } from "../../schemas/production/printer.schema.js";
|
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||||
import log4js from "log4js";
|
import log4js from 'log4js';
|
||||||
import { newAuditLog } from "../../util/index.js";
|
import { newAuditLog } from '../../utils.js';
|
||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger("Printers");
|
const logger = log4js.getLogger('Printers');
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPrintersRouteHandler = async (
|
export const listPrintersRouteHandler = async (req, res, page = 1, limit = 25) => {
|
||||||
req,
|
|
||||||
res,
|
|
||||||
page = 1,
|
|
||||||
limit = 25,
|
|
||||||
) => {
|
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
@ -26,7 +21,7 @@ export const listPrintersRouteHandler = async (
|
|||||||
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
|
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
|
||||||
res.send(printers);
|
res.send(printers);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing users:", error);
|
logger.error('Error listing users:', error);
|
||||||
res.status(500).send({ error: error });
|
res.status(500).send({ error: error });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -38,41 +33,45 @@ export const getPrinterRouteHandler = async (req, res) => {
|
|||||||
// Fetch the printer with the given remote address
|
// Fetch the printer with the given remote address
|
||||||
const printer = await printerModel
|
const printer = await printerModel
|
||||||
.findOne({ _id: id })
|
.findOne({ _id: id })
|
||||||
.populate("subJobs")
|
.populate('subJobs')
|
||||||
.populate("currentJob")
|
.populate('currentJob')
|
||||||
.populate({
|
.populate({
|
||||||
path: "currentJob",
|
path: 'currentJob',
|
||||||
populate: {
|
populate: {
|
||||||
path: "gcodeFile",
|
path: 'gcodeFile',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.populate("currentSubJob")
|
.populate('currentSubJob')
|
||||||
.populate({
|
.populate({
|
||||||
path: "subJobs",
|
path: 'subJobs',
|
||||||
populate: {
|
populate: {
|
||||||
path: "job",
|
path: 'job',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
.populate("vendor")
|
.populate('vendor')
|
||||||
.populate({ path: "currentFilamentStock",
|
.populate({
|
||||||
|
path: 'currentFilamentStock',
|
||||||
populate: {
|
populate: {
|
||||||
path: "filament",
|
path: 'filament',
|
||||||
},})
|
},
|
||||||
|
});
|
||||||
|
|
||||||
if (!printer) {
|
if (!printer) {
|
||||||
logger.warn(`Printer with id ${id} not found.`);
|
logger.warn(`Printer with id ${id} not found.`);
|
||||||
return res.status(404).send({ error: "Printer not found" });
|
return res.status(404).send({ error: 'Printer not found' });
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Printer with id ${id}:`, printer);
|
logger.trace(`Printer with id ${id}:`, printer);
|
||||||
|
|
||||||
const auditLogs = await auditLogModel.find({
|
const auditLogs = await auditLogModel
|
||||||
target: new mongoose.Types.ObjectId(id)
|
.find({
|
||||||
}).populate('owner');
|
target: new mongoose.Types.ObjectId(id),
|
||||||
|
})
|
||||||
|
.populate('owner');
|
||||||
|
|
||||||
res.send({...printer._doc, auditLogs: auditLogs});
|
res.send({ ...printer._doc, auditLogs: auditLogs });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching printer:", error);
|
logger.error('Error fetching printer:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -85,7 +84,7 @@ export const editPrinterRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
if (!printer) {
|
if (!printer) {
|
||||||
logger.warn(`Printer not found with supplied id.`);
|
logger.warn(`Printer not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Printer not found." });
|
return res.status(404).send({ error: 'Printer not found.' });
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -98,55 +97,41 @@ export const editPrinterRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
await newAuditLog(
|
await newAuditLog(printer.toObject(), updateData, id, 'Printer', req.user._id, 'User');
|
||||||
printer.toObject(),
|
|
||||||
updateData,
|
|
||||||
id,
|
|
||||||
'Printer',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await printerModel.updateOne(
|
const result = await printerModel.updateOne({ _id: id }, { $set: updateData });
|
||||||
{ _id: id },
|
|
||||||
{ $set: updateData },
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No printers updated.");
|
logger.error('No printers updated.');
|
||||||
res.status(500).send({ error: "No printers updated." });
|
res.status(500).send({ error: 'No printers updated.' });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating printer:", updateError);
|
logger.error('Error updating printer:', updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send('OK');
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching printer:", fetchError);
|
logger.error('Error fetching printer:', fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const createPrinterRouteHandler = async (req, res) => {
|
export const createPrinterRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const { name, moonraker, tags = [], firmware = "n/a" } = req.body;
|
const { name, moonraker, tags = [], firmware = 'n/a' } = req.body;
|
||||||
|
|
||||||
// Validate required fields
|
// Validate required fields
|
||||||
if (!name || !moonraker) {
|
if (!name || !moonraker) {
|
||||||
logger.warn("Missing required fields in printer creation request");
|
logger.warn('Missing required fields in printer creation request');
|
||||||
return res.status(400).send({
|
return res.status(400).send({
|
||||||
error:
|
error: 'Missing required fields. name and moonraker configuration are required.',
|
||||||
"Missing required fields. name and moonraker configuration are required.",
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate moonraker configuration
|
// Validate moonraker configuration
|
||||||
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
|
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
|
||||||
logger.warn(
|
logger.warn('Invalid moonraker configuration in printer creation request');
|
||||||
"Invalid moonraker configuration in printer creation request",
|
|
||||||
);
|
|
||||||
return res.status(400).send({
|
return res.status(400).send({
|
||||||
error:
|
error: 'Invalid moonraker configuration. host, port, protocol are required.',
|
||||||
"Invalid moonraker configuration. host, port, protocol are required.",
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,7 +143,7 @@ export const createPrinterRouteHandler = async (req, res) => {
|
|||||||
firmware,
|
firmware,
|
||||||
online: false,
|
online: false,
|
||||||
state: {
|
state: {
|
||||||
type: "offline",
|
type: 'offline',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -166,19 +151,12 @@ export const createPrinterRouteHandler = async (req, res) => {
|
|||||||
const savedPrinter = await newPrinter.save();
|
const savedPrinter = await newPrinter.save();
|
||||||
|
|
||||||
// Create audit log for new printer
|
// Create audit log for new printer
|
||||||
await newAuditLog(
|
await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'Printer', req.user._id, 'User');
|
||||||
{},
|
|
||||||
newPrinter.toObject(),
|
|
||||||
savedPrinter._id,
|
|
||||||
'Printer',
|
|
||||||
req.user._id,
|
|
||||||
'User'
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info(`Created new printer: ${name}`);
|
logger.info(`Created new printer: ${name}`);
|
||||||
res.status(201).send(savedPrinter);
|
res.status(201).send(savedPrinter);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error creating printer:", error);
|
logger.error('Error creating printer:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -188,10 +166,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
|
|||||||
const stats = await printerModel.aggregate([
|
const stats = await printerModel.aggregate([
|
||||||
{
|
{
|
||||||
$group: {
|
$group: {
|
||||||
_id: "$state.type",
|
_id: '$state.type',
|
||||||
count: { $sum: 1 }
|
count: { $sum: 1 },
|
||||||
}
|
},
|
||||||
}
|
},
|
||||||
]);
|
]);
|
||||||
|
|
||||||
// Transform the results into a more readable format
|
// Transform the results into a more readable format
|
||||||
@ -200,10 +178,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
|
|||||||
return acc;
|
return acc;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
logger.trace("Printer stats by state:", formattedStats);
|
logger.trace('Printer stats by state:', formattedStats);
|
||||||
res.send(formattedStats);
|
res.send(formattedStats);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error fetching printer stats:", error);
|
logger.error('Error fetching printer stats:', error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import { ObjectId } from "mongodb"; // Only needed in Node.js with MongoDB driver
|
import { ObjectId } from 'mongodb'; // Only needed in Node.js with MongoDB driver
|
||||||
|
|
||||||
function parseFilter(property, value) {
|
function parseFilter(property, value) {
|
||||||
if (typeof value === "string") {
|
if (typeof value === 'string') {
|
||||||
const trimmed = value.trim();
|
const trimmed = value.trim();
|
||||||
|
|
||||||
// Handle booleans
|
// Handle booleans
|
||||||
if (trimmed.toLowerCase() === "true") return { [property]: true };
|
if (trimmed.toLowerCase() === 'true') return { [property]: true };
|
||||||
if (trimmed.toLowerCase() === "false") return { [property]: false };
|
if (trimmed.toLowerCase() === 'false') return { [property]: false };
|
||||||
|
|
||||||
// Handle ObjectId (24-char hex)
|
// Handle ObjectId (24-char hex)
|
||||||
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
|
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
|
||||||
@ -22,8 +22,8 @@ function parseFilter(property, value) {
|
|||||||
return {
|
return {
|
||||||
[property]: {
|
[property]: {
|
||||||
$regex: trimmed,
|
$regex: trimmed,
|
||||||
$options: "i"
|
$options: 'i',
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,29 +41,25 @@ function convertToCamelCase(obj) {
|
|||||||
// Convert the key to camelCase
|
// Convert the key to camelCase
|
||||||
let camelKey = key
|
let camelKey = key
|
||||||
// First handle special cases with spaces, brackets and other characters
|
// First handle special cases with spaces, brackets and other characters
|
||||||
.replace(/\s*\[.*?\]\s*/g, "") // Remove brackets and their contents
|
.replace(/\s*\[.*?\]\s*/g, '') // Remove brackets and their contents
|
||||||
.replace(/\s+/g, " ") // Normalize spaces
|
.replace(/\s+/g, ' ') // Normalize spaces
|
||||||
.trim()
|
.trim()
|
||||||
// Split by common separators (space, underscore, hyphen)
|
// Split by common separators (space, underscore, hyphen)
|
||||||
.split(/[\s_-]/)
|
.split(/[\s_-]/)
|
||||||
// Convert to camelCase
|
// Convert to camelCase
|
||||||
.map((word, index) => {
|
.map((word, index) => {
|
||||||
// Remove any non-alphanumeric characters
|
// Remove any non-alphanumeric characters
|
||||||
word = word.replace(/[^a-zA-Z0-9]/g, "");
|
word = word.replace(/[^a-zA-Z0-9]/g, '');
|
||||||
|
|
||||||
// Lowercase first word, uppercase others
|
// Lowercase first word, uppercase others
|
||||||
return index === 0
|
return index === 0
|
||||||
? word.toLowerCase()
|
? word.toLowerCase()
|
||||||
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
|
||||||
})
|
})
|
||||||
.join("");
|
.join('');
|
||||||
|
|
||||||
// Handle values that are objects recursively
|
// Handle values that are objects recursively
|
||||||
if (
|
if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
|
||||||
value !== null &&
|
|
||||||
typeof value === "object" &&
|
|
||||||
!Array.isArray(value)
|
|
||||||
) {
|
|
||||||
result[camelKey] = convertToCamelCase(value);
|
result[camelKey] = convertToCamelCase(value);
|
||||||
} else {
|
} else {
|
||||||
result[camelKey] = value;
|
result[camelKey] = value;
|
||||||
@ -78,14 +74,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
const configObject = {};
|
const configObject = {};
|
||||||
|
|
||||||
// Extract header information
|
// Extract header information
|
||||||
const headerBlockRegex =
|
const headerBlockRegex = /; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
|
||||||
/; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
|
|
||||||
const headerBlockMatch = fileContent.match(headerBlockRegex);
|
const headerBlockMatch = fileContent.match(headerBlockRegex);
|
||||||
if (headerBlockMatch && headerBlockMatch[1]) {
|
if (headerBlockMatch && headerBlockMatch[1]) {
|
||||||
const headerLines = headerBlockMatch[1].split("\n");
|
const headerLines = headerBlockMatch[1].split('\n');
|
||||||
headerLines.forEach((line) => {
|
headerLines.forEach((line) => {
|
||||||
// Match lines with info after semicolon
|
|
||||||
const headerLineRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
|
|
||||||
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
|
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
|
||||||
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
|
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
|
||||||
|
|
||||||
@ -96,24 +89,22 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
let value = match[2].trim();
|
let value = match[2].trim();
|
||||||
|
|
||||||
// Try to convert value to appropriate type
|
// Try to convert value to appropriate type
|
||||||
if (!isNaN(value) && value !== "") {
|
if (!isNaN(value) && value !== '') {
|
||||||
value = Number(value);
|
value = Number(value);
|
||||||
}
|
}
|
||||||
configObject[key] = value;
|
configObject[key] = value;
|
||||||
} else {
|
} else {
|
||||||
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
|
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
|
||||||
match = line.match(simpleValueRegex);
|
match = line.match(simpleValueRegex);
|
||||||
if (match && match[1] && !match[1].includes("HEADER_BLOCK")) {
|
if (match && match[1] && !match[1].includes('HEADER_BLOCK')) {
|
||||||
const text = match[1].trim();
|
const text = match[1].trim();
|
||||||
|
|
||||||
// Extract slicer info
|
// Extract slicer info
|
||||||
const slicerMatch = text.match(
|
const slicerMatch = text.match(/generated by (.*?) on (.*?) at (.*?)$/);
|
||||||
/generated by (.*?) on (.*?) at (.*?)$/,
|
|
||||||
);
|
|
||||||
if (slicerMatch) {
|
if (slicerMatch) {
|
||||||
configObject["slicer"] = slicerMatch[1].trim();
|
configObject['slicer'] = slicerMatch[1].trim();
|
||||||
configObject["date"] = slicerMatch[2].trim();
|
configObject['date'] = slicerMatch[2].trim();
|
||||||
configObject["time"] = slicerMatch[3].trim();
|
configObject['time'] = slicerMatch[3].trim();
|
||||||
} else {
|
} else {
|
||||||
// Just add as a general header entry if it doesn't match any specific pattern
|
// Just add as a general header entry if it doesn't match any specific pattern
|
||||||
const key = `header_${Object.keys(configObject).length}`;
|
const key = `header_${Object.keys(configObject).length}`;
|
||||||
@ -125,12 +116,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract thumbnail data
|
// Extract thumbnail data
|
||||||
const thumbnailBlockRegex =
|
const thumbnailBlockRegex = /; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
|
||||||
/; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
|
|
||||||
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
|
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
|
||||||
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
|
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
|
||||||
const thumbnailLines = thumbnailBlockMatch[1].split("\n");
|
const thumbnailLines = thumbnailBlockMatch[1].split('\n');
|
||||||
let base64Data = "";
|
let base64Data = '';
|
||||||
let thumbnailInfo = {};
|
let thumbnailInfo = {};
|
||||||
|
|
||||||
thumbnailLines.forEach((line) => {
|
thumbnailLines.forEach((line) => {
|
||||||
@ -142,13 +132,10 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
thumbnailInfo.width = parseInt(match[1], 10);
|
thumbnailInfo.width = parseInt(match[1], 10);
|
||||||
thumbnailInfo.height = parseInt(match[2], 10);
|
thumbnailInfo.height = parseInt(match[2], 10);
|
||||||
thumbnailInfo.size = parseInt(match[3], 10);
|
thumbnailInfo.size = parseInt(match[3], 10);
|
||||||
} else if (
|
} else if (line.trim().startsWith('; ') && !line.includes('THUMBNAIL_BLOCK')) {
|
||||||
line.trim().startsWith("; ") &&
|
|
||||||
!line.includes("THUMBNAIL_BLOCK")
|
|
||||||
) {
|
|
||||||
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
|
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
|
||||||
const dataLine = line.trim().substring(2);
|
const dataLine = line.trim().substring(2);
|
||||||
if (dataLine && dataLine != "thumbnail end") {
|
if (dataLine && dataLine != 'thumbnail end') {
|
||||||
base64Data += dataLine;
|
base64Data += dataLine;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -164,12 +151,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract CONFIG_BLOCK
|
// Extract CONFIG_BLOCK
|
||||||
const configBlockRegex =
|
const configBlockRegex = /; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
|
||||||
/; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
|
|
||||||
const configBlockMatch = fileContent.match(configBlockRegex);
|
const configBlockMatch = fileContent.match(configBlockRegex);
|
||||||
if (configBlockMatch && configBlockMatch[1]) {
|
if (configBlockMatch && configBlockMatch[1]) {
|
||||||
// Extract each config line
|
// Extract each config line
|
||||||
const configLines = configBlockMatch[1].split("\n");
|
const configLines = configBlockMatch[1].split('\n');
|
||||||
// Process each line
|
// Process each line
|
||||||
configLines.forEach((line) => {
|
configLines.forEach((line) => {
|
||||||
// Check if the line starts with a semicolon and has an equals sign
|
// Check if the line starts with a semicolon and has an equals sign
|
||||||
@ -179,11 +165,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
const key = match[1].trim();
|
const key = match[1].trim();
|
||||||
let value = match[2].trim();
|
let value = match[2].trim();
|
||||||
// Try to convert value to appropriate type
|
// Try to convert value to appropriate type
|
||||||
if (value === "true" || value === "false") {
|
if (value === 'true' || value === 'false') {
|
||||||
value = value === "true";
|
value = value === 'true';
|
||||||
} else if (!isNaN(value) && value !== "") {
|
} else if (!isNaN(value) && value !== '') {
|
||||||
// Check if it's a number (but not a percentage)
|
// Check if it's a number (but not a percentage)
|
||||||
if (!value.includes("%")) {
|
if (!value.includes('%')) {
|
||||||
value = Number(value);
|
value = Number(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -197,31 +183,31 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
|
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
|
||||||
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
|
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
|
||||||
if (additionalVarsMatch && additionalVarsMatch[1]) {
|
if (additionalVarsMatch && additionalVarsMatch[1]) {
|
||||||
const additionalLines = additionalVarsMatch[1].split("\n");
|
const additionalLines = additionalVarsMatch[1].split('\n');
|
||||||
additionalLines.forEach((line) => {
|
additionalLines.forEach((line) => {
|
||||||
// Match both standard format and the special case for "total filament cost"
|
// Match both standard format and the special case for "total filament cost"
|
||||||
const varRegex =
|
const varRegex =
|
||||||
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
|
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
|
||||||
const match = line.match(varRegex);
|
const match = line.match(varRegex);
|
||||||
if (match) {
|
if (match) {
|
||||||
const key = match[1].replace(/\[([^\]]+)\]/g, "$1").trim();
|
const key = match[1].replace(/\[([^\]]+)\]/g, '$1').trim();
|
||||||
let value = match[2].trim();
|
let value = match[2].trim();
|
||||||
// Clean up values - remove units in brackets and handle special cases
|
// Clean up values - remove units in brackets and handle special cases
|
||||||
if (key.includes("filament used")) {
|
if (key.includes('filament used')) {
|
||||||
// Extract just the numeric value, ignoring units in brackets
|
// Extract just the numeric value, ignoring units in brackets
|
||||||
const numMatch = value.match(/(\d+\.\d+)/);
|
const numMatch = value.match(/(\d+\.\d+)/);
|
||||||
if (numMatch) {
|
if (numMatch) {
|
||||||
value = parseFloat(numMatch[1]);
|
value = parseFloat(numMatch[1]);
|
||||||
}
|
}
|
||||||
} else if (key.includes("filament cost")) {
|
} else if (key.includes('filament cost')) {
|
||||||
// Extract just the numeric value
|
// Extract just the numeric value
|
||||||
const numMatch = value.match(/(\d+\.\d+)/);
|
const numMatch = value.match(/(\d+\.\d+)/);
|
||||||
if (numMatch) {
|
if (numMatch) {
|
||||||
value = parseFloat(numMatch[1]);
|
value = parseFloat(numMatch[1]);
|
||||||
}
|
}
|
||||||
} else if (key.includes("total layers count")) {
|
} else if (key.includes('total layers count')) {
|
||||||
value = parseInt(value, 10);
|
value = parseInt(value, 10);
|
||||||
} else if (key.includes("estimated printing time")) {
|
} else if (key.includes('estimated printing time')) {
|
||||||
// Keep as string but trim any additional whitespace
|
// Keep as string but trim any additional whitespace
|
||||||
value = value.trim();
|
value = value.trim();
|
||||||
}
|
}
|
||||||
@ -243,7 +229,7 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
|
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
|
||||||
const postConfigMatch = fileContent.match(postConfigParams);
|
const postConfigMatch = fileContent.match(postConfigParams);
|
||||||
if (postConfigMatch && postConfigMatch[1]) {
|
if (postConfigMatch && postConfigMatch[1]) {
|
||||||
const postConfigLines = postConfigMatch[1].split("\n");
|
const postConfigLines = postConfigMatch[1].split('\n');
|
||||||
postConfigLines.forEach((line) => {
|
postConfigLines.forEach((line) => {
|
||||||
// Match lines with format "; parameter_name = value"
|
// Match lines with format "; parameter_name = value"
|
||||||
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
|
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
|
||||||
@ -253,11 +239,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
|||||||
let value = match[2].trim();
|
let value = match[2].trim();
|
||||||
|
|
||||||
// Try to convert value to appropriate type
|
// Try to convert value to appropriate type
|
||||||
if (value === "true" || value === "false") {
|
if (value === 'true' || value === 'false') {
|
||||||
value = value === "true";
|
value = value === 'true';
|
||||||
} else if (!isNaN(value) && value !== "") {
|
} else if (!isNaN(value) && value !== '') {
|
||||||
// Check if it's a number (but not a percentage)
|
// Check if it's a number (but not a percentage)
|
||||||
if (!value.includes("%")) {
|
if (!value.includes('%')) {
|
||||||
value = Number(value);
|
value = Number(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -292,7 +278,7 @@ function getChangedValues(oldObj, newObj) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
|
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
|
||||||
const { auditLogModel } = await import('../schemas/management/auditlog.schema.js');
|
const { auditLogModel } = await import('./schemas/management/auditlog.schema.js');
|
||||||
|
|
||||||
// Get only the changed values
|
// Get only the changed values
|
||||||
const changedValues = getChangedValues(oldValue, newValue);
|
const changedValues = getChangedValues(oldValue, newValue);
|
||||||
@ -314,9 +300,4 @@ async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, o
|
|||||||
await auditLog.save();
|
await auditLog.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
export {
|
export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog };
|
||||||
parseFilter,
|
|
||||||
convertToCamelCase,
|
|
||||||
extractConfigBlock,
|
|
||||||
newAuditLog
|
|
||||||
};
|
|
||||||
Loading…
x
Reference in New Issue
Block a user