Using global functions for database interaction.
This commit is contained in:
parent
a6ab95a8b0
commit
01619d621a
@ -1,32 +1,34 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { isAuthenticated } from '../../keycloak.js';
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from '../../utils.js';
|
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
listFilamentStocksRouteHandler,
|
listFilamentStocksRouteHandler,
|
||||||
getFilamentStockRouteHandler,
|
getFilamentStockRouteHandler,
|
||||||
|
editFilamentStockRouteHandler,
|
||||||
newFilamentStockRouteHandler,
|
newFilamentStockRouteHandler,
|
||||||
|
deleteFilamentStockRouteHandler,
|
||||||
|
listFilamentStocksByPropertiesRouteHandler,
|
||||||
} from '../../services/inventory/filamentstocks.js';
|
} from '../../services/inventory/filamentstocks.js';
|
||||||
|
|
||||||
// list of filamentStocks
|
// list of filament stocks
|
||||||
router.get('/', isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, sort, order } = req.query;
|
const { page, limit, property, search, sort, order } = req.query;
|
||||||
|
const allowedFilters = ['filament', 'state', 'startingWeight', 'currentWeight'];
|
||||||
|
const filter = getFilter(req.query, allowedFilters);
|
||||||
|
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||||
|
});
|
||||||
|
|
||||||
const allowedFilters = ['filament', 'filament._id'];
|
router.get('/properties', isAuthenticated, (req, res) => {
|
||||||
|
let properties = convertPropertiesString(req.query.properties);
|
||||||
var filter = {};
|
const allowedFilters = ['filament', 'state.type'];
|
||||||
|
const filter = getFilter(req.query, allowedFilters, false);
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
var masterFilter = {};
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
if (req.query.masterFilter) {
|
||||||
if (key == allowedFilters[i]) {
|
masterFilter = JSON.parse(req.query.masterFilter);
|
||||||
const parsedFilter = parseFilter(key, value);
|
|
||||||
filter = { ...filter, ...parsedFilter };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
listFilamentStocksByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||||
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post('/', isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
@ -37,4 +39,12 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
|||||||
getFilamentStockRouteHandler(req, res);
|
getFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||||
|
editFilamentStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||||
|
deleteFilamentStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@ -1,34 +1,34 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { isAuthenticated } from '../../keycloak.js';
|
import { isAuthenticated } from '../../keycloak.js';
|
||||||
import { parseFilter } from '../../utils.js';
|
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
listUsersRouteHandler,
|
listUsersRouteHandler,
|
||||||
|
listUsersByPropertiesRouteHandler,
|
||||||
getUserRouteHandler,
|
getUserRouteHandler,
|
||||||
editUserRouteHandler,
|
editUserRouteHandler,
|
||||||
} from '../../services/management/users.js';
|
} from '../../services/management/users.js';
|
||||||
|
|
||||||
// list of users
|
// list of document templates
|
||||||
router.get('/', isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, search, sort, order } = req.query;
|
const { page, limit, property, search, sort, order } = req.query;
|
||||||
|
const allowedFilters = ['name', 'username', 'firstName', 'lastName', 'email'];
|
||||||
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
|
const filter = getFilter(req.query, allowedFilters);
|
||||||
|
|
||||||
var filter = {};
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
|
||||||
if (key == allowedFilters[i]) {
|
|
||||||
const parsedFilter = parseFilter(key, value);
|
|
||||||
filter = { ...filter, ...parsedFilter };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
listUsersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
listUsersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
router.get('/properties', isAuthenticated, (req, res) => {
|
||||||
|
let properties = convertPropertiesString(req.query.properties);
|
||||||
|
const allowedFilters = [];
|
||||||
|
const filter = getFilter(req.query, allowedFilters, false);
|
||||||
|
var masterFilter = {};
|
||||||
|
if (req.query.masterFilter) {
|
||||||
|
masterFilter = JSON.parse(req.query.masterFilter);
|
||||||
|
}
|
||||||
|
listUsersByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||||
|
});
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getUserRouteHandler(req, res);
|
getUserRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -4,21 +4,29 @@ import { isAuthenticated } from '../../keycloak.js';
|
|||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
import {
|
import {
|
||||||
listJobsRouteHandler,
|
listJobsRouteHandler,
|
||||||
|
listJobsByPropertiesRouteHandler,
|
||||||
getJobRouteHandler,
|
getJobRouteHandler,
|
||||||
newJobRouteHandler,
|
newJobRouteHandler,
|
||||||
deleteJobRouteHandler,
|
deleteJobRouteHandler,
|
||||||
getJobStatsRouteHandler,
|
getJobStatsRouteHandler,
|
||||||
} from '../../services/production/jobs.js';
|
} from '../../services/production/jobs.js';
|
||||||
import { getFilter } from '../../utils.js';
|
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||||
|
|
||||||
// list of jobs
|
// list of jobs
|
||||||
router.get('/', isAuthenticated, (req, res) => {
|
router.get('/', isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, search, sort, order } = req.query;
|
const { page, limit, property, search, sort, order } = req.query;
|
||||||
const allowedFilters = ['country'];
|
const allowedFilters = ['state'];
|
||||||
const filter = getFilter(req.query, allowedFilters);
|
const filter = getFilter(req.query, allowedFilters);
|
||||||
listJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
listJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
router.get('/properties', isAuthenticated, (req, res) => {
|
||||||
|
let properties = convertPropertiesString(req.query.properties);
|
||||||
|
const allowedFilters = ['state'];
|
||||||
|
const filter = getFilter(req.query, allowedFilters, false);
|
||||||
|
listJobsByPropertiesRouteHandler(req, res, properties, filter);
|
||||||
|
});
|
||||||
|
|
||||||
router.post('/', isAuthenticated, (req, res) => {
|
router.post('/', isAuthenticated, (req, res) => {
|
||||||
newJobRouteHandler(req, res);
|
newJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,11 +1,15 @@
|
|||||||
import dotenv from 'dotenv';
|
import dotenv from 'dotenv';
|
||||||
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
|
||||||
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { distributeNew, flatternObjectIds, getAuditLogs, newAuditLog } from '../../utils.js';
|
import {
|
||||||
|
deleteObject,
|
||||||
|
listObjects,
|
||||||
|
getObject,
|
||||||
|
editObject,
|
||||||
|
newObject,
|
||||||
|
listObjectsByProperties,
|
||||||
|
} from '../../database/database.js';
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Filament Stocks');
|
const logger = log4js.getLogger('Filament Stocks');
|
||||||
@ -18,173 +22,137 @@ export const listFilamentStocksRouteHandler = async (
|
|||||||
limit = 25,
|
limit = 25,
|
||||||
property = '',
|
property = '',
|
||||||
filter = {},
|
filter = {},
|
||||||
|
search = '',
|
||||||
sort = '',
|
sort = '',
|
||||||
order = 'ascend'
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
const result = await listObjects({
|
||||||
// Calculate the skip value based on the page number and limit
|
model: filamentStockModel,
|
||||||
const skip = (page - 1) * limit;
|
page,
|
||||||
|
limit,
|
||||||
|
property,
|
||||||
|
filter,
|
||||||
|
search,
|
||||||
|
sort,
|
||||||
|
order,
|
||||||
|
populate: [{ path: 'filament' }],
|
||||||
|
});
|
||||||
|
|
||||||
let filamentStock;
|
if (result?.error) {
|
||||||
let aggregateCommand = [];
|
logger.error('Error listing filament stocks.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
aggregateCommand.push({
|
return;
|
||||||
$lookup: {
|
|
||||||
from: 'filaments', // The collection name (usually lowercase plural)
|
|
||||||
localField: 'filament', // The field in your current model
|
|
||||||
foreignField: '_id', // The field in the products collection
|
|
||||||
as: 'filament', // The output field name
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: '$filament' });
|
|
||||||
|
|
||||||
if (filter != {}) {
|
|
||||||
// use filtering if present
|
|
||||||
aggregateCommand.push({ $match: filter });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (property != '') {
|
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
|
||||||
} else {
|
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
|
||||||
if (sort) {
|
|
||||||
const sortOrder = order === 'descend' ? -1 : 1;
|
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
|
||||||
}
|
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
|
||||||
|
|
||||||
console.log(aggregateCommand);
|
|
||||||
|
|
||||||
filamentStock = await filamentStockModel.aggregate(aggregateCommand);
|
|
||||||
|
|
||||||
logger.trace(
|
|
||||||
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
filamentStock
|
|
||||||
);
|
|
||||||
res.send(filamentStock);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing filament stocks:', error);
|
|
||||||
res.status(500).send({ error: error });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of filament stocks (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const listFilamentStocksByPropertiesRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
properties = '',
|
||||||
|
filter = {},
|
||||||
|
masterFilter = {}
|
||||||
|
) => {
|
||||||
|
const result = await listObjectsByProperties({
|
||||||
|
model: filamentStockModel,
|
||||||
|
properties,
|
||||||
|
filter,
|
||||||
|
populate: ['filament'],
|
||||||
|
masterFilter,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result?.error) {
|
||||||
|
logger.error('Error listing filament stocks.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of filament stocks. Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFilamentStockRouteHandler = async (req, res) => {
|
export const getFilamentStockRouteHandler = async (req, res) => {
|
||||||
try {
|
const id = req.params.id;
|
||||||
// Get ID from params
|
const result = await getObject({
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
model: filamentStockModel,
|
||||||
// Fetch the filamentStock with the given remote address
|
id,
|
||||||
const filamentStock = await filamentStockModel
|
populate: [{ path: 'filament' }],
|
||||||
.findOne({
|
});
|
||||||
_id: id,
|
if (result?.error) {
|
||||||
})
|
logger.warn(`Filament Stock not found with supplied id.`);
|
||||||
.populate('filament');
|
return res.status(result.code).send(result);
|
||||||
|
|
||||||
if (!filamentStock) {
|
|
||||||
logger.warn(`Filament stock not found with supplied id.`);
|
|
||||||
return res.status(404).send({ error: 'Print job not found.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
|
||||||
|
|
||||||
const auditLogs = await getAuditLogs(id);
|
|
||||||
|
|
||||||
res.send({ ...filamentStock._doc, auditLogs: auditLogs });
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching filament stock:', error);
|
|
||||||
res.status(500).send({ error: error.message });
|
|
||||||
}
|
}
|
||||||
|
logger.debug(`Retreived filament stock with ID: ${id}`);
|
||||||
|
res.send(result);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const editFilamentStockRouteHandler = async (req, res) => {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
|
||||||
|
logger.trace(`Filament Stock with ID: ${id}`);
|
||||||
|
|
||||||
|
const updateData = {};
|
||||||
|
// Create audit log before updating
|
||||||
|
const result = await editObject({
|
||||||
|
model: filamentStockModel,
|
||||||
|
id,
|
||||||
|
updateData,
|
||||||
|
user: req.user,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
logger.error('Error editing filament stock:', result.error);
|
||||||
|
res.status(result).send(result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`Edited filament stock with ID: ${id}`);
|
||||||
|
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const newFilamentStockRouteHandler = async (req, res) => {
|
export const newFilamentStockRouteHandler = async (req, res) => {
|
||||||
var filament = null;
|
const newData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
try {
|
startingWeight: req.body.startingWeight,
|
||||||
// Get ID from params
|
currentWeight: req.body.currentWeight,
|
||||||
const id = new mongoose.Types.ObjectId(req.body.filament._id);
|
filament: req.body.filament,
|
||||||
// Fetch the filament with the given remote address
|
};
|
||||||
filament = await filamentModel.findOne({
|
const result = await newObject({
|
||||||
_id: id,
|
model: filamentStockModel,
|
||||||
});
|
newData,
|
||||||
|
user: req.user,
|
||||||
if (!filament) {
|
});
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
if (result.error) {
|
||||||
return res.status(404).send({ error: 'Filament not found.' });
|
logger.error('No filament stock created:', result.error);
|
||||||
}
|
return res.status(result.code).send(result);
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching filament:', error);
|
|
||||||
return res.status(500).send({ error: error.message });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
logger.debug(`New filament stock with ID: ${result._id}`);
|
||||||
logger.warn(req.body);
|
|
||||||
const startingWeight = req.body.startingWeight; // { net, gross }
|
|
||||||
if (!startingWeight || typeof startingWeight.gross !== 'number') {
|
|
||||||
return res.status(400).send({ error: 'startingWeight.gross is required' });
|
|
||||||
}
|
|
||||||
// Calculate net if not provided
|
|
||||||
const net =
|
|
||||||
typeof startingWeight.net === 'number'
|
|
||||||
? startingWeight.net
|
|
||||||
: startingWeight.gross - filament.emptySpoolWeight;
|
|
||||||
const starting = {
|
|
||||||
gross: startingWeight.gross,
|
|
||||||
net: net,
|
|
||||||
};
|
|
||||||
const newFilamentStock = {
|
|
||||||
startingWeight: starting,
|
|
||||||
currentWeight: { ...starting },
|
|
||||||
filament: req.body.filament,
|
|
||||||
state: {
|
|
||||||
type: 'unconsumed',
|
|
||||||
percent: '0', // schema requires string
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const result = await filamentStockModel.create(flatternObjectIds(newFilamentStock));
|
res.send(result);
|
||||||
|
};
|
||||||
if (!result) {
|
|
||||||
logger.error('No filament stock created.');
|
export const deleteFilamentStockRouteHandler = async (req, res) => {
|
||||||
return res.status(500).send({ error: 'No filament stock created.' });
|
// Get ID from params
|
||||||
}
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
|
||||||
await newAuditLog(newFilamentStock, result._id, 'filamentStock', req.user);
|
logger.trace(`Filament Stock with ID: ${id}`);
|
||||||
await distributeNew(result._id, 'filamentStock');
|
|
||||||
|
const result = await deleteObject({
|
||||||
console.log(result);
|
model: filamentStockModel,
|
||||||
|
id,
|
||||||
// Create initial stock event (optional, but keep logic if needed)
|
user: req.user,
|
||||||
const stockEvent = {
|
});
|
||||||
value: starting.net,
|
if (result.error) {
|
||||||
current: starting.net,
|
logger.error('No filament stock deleted:', result.error);
|
||||||
unit: 'g',
|
return res.status(result.code).send(result);
|
||||||
parent: result,
|
}
|
||||||
parentType: 'filamentStock',
|
|
||||||
owner: req.user,
|
logger.debug(`Deleted filament stock with ID: ${result._id}`);
|
||||||
ownerType: 'user',
|
|
||||||
createdAt: new Date(),
|
res.send(result);
|
||||||
updatedAt: new Date(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const eventResult = await stockEventModel.create(flatternObjectIds(stockEvent));
|
|
||||||
if (!eventResult) {
|
|
||||||
logger.error('Failed to create initial stock event.');
|
|
||||||
return res.status(500).send({ error: 'Failed to create initial stock event.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
await newAuditLog(stockEvent, eventResult._id, 'stockEvent', req.user);
|
|
||||||
|
|
||||||
return res.send({ status: 'ok' });
|
|
||||||
} catch (updateError) {
|
|
||||||
logger.error('Error adding filament stock:', updateError);
|
|
||||||
return res.status(500).send({ error: updateError.message });
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,15 +1,14 @@
|
|||||||
import dotenv from 'dotenv';
|
import dotenv, { populate } from 'dotenv';
|
||||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
newAuditLog,
|
getObject,
|
||||||
editAuditLog,
|
listObjects,
|
||||||
distributeUpdate,
|
listObjectsByProperties,
|
||||||
flatternObjectIds,
|
editObject,
|
||||||
distributeNew,
|
newObject,
|
||||||
} from '../../utils.js';
|
} from '../../database/database.js';
|
||||||
import { listObjectsByProperties } from '../../database/database.js';
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
const logger = log4js.getLogger('Filaments');
|
const logger = log4js.getLogger('Filaments');
|
||||||
@ -26,69 +25,26 @@ export const listFilamentsRouteHandler = async (
|
|||||||
sort = '',
|
sort = '',
|
||||||
order = 'ascend'
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
const result = await listObjects({
|
||||||
// Calculate the skip value based on the page number and limit
|
model: filamentModel,
|
||||||
const skip = (page - 1) * limit;
|
page,
|
||||||
|
limit,
|
||||||
|
property,
|
||||||
|
filter,
|
||||||
|
search,
|
||||||
|
sort,
|
||||||
|
order,
|
||||||
|
populate: ['vendor'],
|
||||||
|
});
|
||||||
|
|
||||||
let filament;
|
if (result?.error) {
|
||||||
let aggregateCommand = [];
|
logger.error('Error listing filaments.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
if (search) {
|
return;
|
||||||
// Add a text search match stage for name and brand fields
|
|
||||||
aggregateCommand.push({
|
|
||||||
$match: {
|
|
||||||
$text: {
|
|
||||||
$search: search,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
aggregateCommand.push({
|
|
||||||
$lookup: {
|
|
||||||
from: 'vendors', // The collection name (usually lowercase plural)
|
|
||||||
localField: 'vendor', // The field in your current model
|
|
||||||
foreignField: '_id', // The field in the products collection
|
|
||||||
as: 'vendor', // The output field name
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
aggregateCommand.push({ $unwind: '$vendor' });
|
|
||||||
|
|
||||||
if (filter != {}) {
|
|
||||||
// use filtering if present
|
|
||||||
aggregateCommand.push({ $match: filter });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (property != '') {
|
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
|
||||||
} else {
|
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
|
||||||
if (sort) {
|
|
||||||
const sortOrder = order === 'descend' ? -1 : 1;
|
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
|
||||||
}
|
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
|
||||||
|
|
||||||
console.log(aggregateCommand);
|
|
||||||
|
|
||||||
filament = await filamentModel.aggregate(aggregateCommand);
|
|
||||||
|
|
||||||
logger.trace(
|
|
||||||
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
|
|
||||||
filament
|
|
||||||
);
|
|
||||||
res.send(filament);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing filaments:', error);
|
|
||||||
res.status(500).send({ error: error });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of filaments (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const listFilamentsByPropertiesRouteHandler = async (
|
export const listFilamentsByPropertiesRouteHandler = async (
|
||||||
@ -115,118 +71,86 @@ export const listFilamentsByPropertiesRouteHandler = async (
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getFilamentRouteHandler = async (req, res) => {
|
export const getFilamentRouteHandler = async (req, res) => {
|
||||||
try {
|
const id = req.params.id;
|
||||||
// Get ID from params
|
const result = await getObject({
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
model: filamentModel,
|
||||||
// Fetch the filament with the given remote address
|
id,
|
||||||
const filament = await filamentModel
|
populate: 'vendor',
|
||||||
.findOne({
|
});
|
||||||
_id: id,
|
if (result?.error) {
|
||||||
})
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
.populate('vendor');
|
return res.status(result.code).send(result);
|
||||||
|
|
||||||
if (!filament) {
|
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
|
||||||
return res.status(404).send({ error: 'Print job not found.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
|
||||||
|
|
||||||
res.send({ ...filament._doc });
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching Filament:', error);
|
|
||||||
res.status(500).send({ error: error.message });
|
|
||||||
}
|
}
|
||||||
|
logger.debug(`Retreived filament with ID: ${id}`);
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const editFilamentRouteHandler = async (req, res) => {
|
export const editFilamentRouteHandler = async (req, res) => {
|
||||||
try {
|
// Get ID from params
|
||||||
// Get ID from params
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
|
||||||
// Fetch the filament with the given remote address
|
|
||||||
const filament = await filamentModel.findOne({ _id: id });
|
|
||||||
|
|
||||||
if (!filament) {
|
logger.trace(`Filament with ID: ${id}`);
|
||||||
// Error handling
|
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
|
||||||
return res.status(404).send({ error: 'Print job not found.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
barcode: req.body.barcode,
|
||||||
|
url: req.body.url,
|
||||||
|
image: req.body.image,
|
||||||
|
color: req.body.color,
|
||||||
|
vendor: req.body.vendor,
|
||||||
|
type: req.body.type,
|
||||||
|
cost: req.body.cost,
|
||||||
|
diameter: req.body.diameter,
|
||||||
|
density: req.body.density,
|
||||||
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
|
};
|
||||||
|
const result = await editObject({
|
||||||
|
model: filamentModel,
|
||||||
|
id,
|
||||||
|
updateData,
|
||||||
|
user: req.user,
|
||||||
|
});
|
||||||
|
|
||||||
try {
|
if (result.error) {
|
||||||
const updateData = {
|
logger.error('Error editing filament:', result.error);
|
||||||
updatedAt: new Date(),
|
res.status(result).send(result);
|
||||||
name: req.body.name,
|
return;
|
||||||
barcode: req.body.barcode,
|
|
||||||
url: req.body.url,
|
|
||||||
image: req.body.image,
|
|
||||||
color: req.body.color,
|
|
||||||
vendor: req.body.vendor,
|
|
||||||
type: req.body.type,
|
|
||||||
cost: req.body.cost,
|
|
||||||
diameter: req.body.diameter,
|
|
||||||
density: req.body.density,
|
|
||||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create audit log before updating
|
|
||||||
await editAuditLog(filament.toObject(), updateData, id, 'filament', req.user);
|
|
||||||
|
|
||||||
const result = await filamentModel.updateOne(
|
|
||||||
{ _id: id },
|
|
||||||
{ $set: flatternObjectIds(updateData) }
|
|
||||||
);
|
|
||||||
if (result.nModified === 0) {
|
|
||||||
logger.error('No Filament updated.');
|
|
||||||
return res.status(500).send({ error: 'No filaments updated.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
await distributeUpdate(updateData, id, 'filament');
|
|
||||||
} catch (updateError) {
|
|
||||||
logger.error('Error updating filament:', updateError);
|
|
||||||
return res.status(500).send({ error: updateError.message });
|
|
||||||
}
|
|
||||||
|
|
||||||
return res.send('OK');
|
|
||||||
} catch (fetchError) {
|
|
||||||
logger.error('Error fetching filament:', fetchError);
|
|
||||||
return res.status(500).send({ error: fetchError.message });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`Edited filament with ID: ${id}`);
|
||||||
|
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const newFilamentRouteHandler = async (req, res) => {
|
export const newFilamentRouteHandler = async (req, res) => {
|
||||||
try {
|
const newData = {
|
||||||
const newFilament = {
|
createdAt: new Date(),
|
||||||
createdAt: new Date(),
|
updatedAt: new Date(),
|
||||||
updatedAt: new Date(),
|
name: req.body.name,
|
||||||
name: req.body.name,
|
barcode: req.body.barcode,
|
||||||
barcode: req.body.barcode,
|
url: req.body.url,
|
||||||
url: req.body.url,
|
image: req.body.image,
|
||||||
image: req.body.image,
|
color: req.body.color,
|
||||||
color: req.body.color,
|
vendor: req.body.vendor,
|
||||||
vendor: req.body.vendor,
|
type: req.body.type,
|
||||||
type: req.body.type,
|
cost: req.body.cost,
|
||||||
cost: req.body.cost,
|
diameter: req.body.diameter,
|
||||||
diameter: req.body.diameter,
|
density: req.body.density,
|
||||||
density: req.body.density,
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
};
|
||||||
};
|
|
||||||
|
|
||||||
const result = await filamentModel.create(flatternObjectIds(newFilament));
|
const result = await newObject({
|
||||||
|
model: filamentModel,
|
||||||
if (result.nCreated === 0) {
|
newData,
|
||||||
logger.error('No filament created.');
|
user: req.user,
|
||||||
res.status(500).send({ error: 'No filament created.' });
|
});
|
||||||
}
|
if (result.error) {
|
||||||
|
logger.error('No filament created:', result.error);
|
||||||
// Create audit log for new filament
|
return res.status(result.code).send(result);
|
||||||
await newAuditLog(newFilament, result._id, 'filament', req.user);
|
|
||||||
await distributeNew(result._id, 'filament');
|
|
||||||
|
|
||||||
res.status(200).send({ status: 'ok' });
|
|
||||||
} catch (updateError) {
|
|
||||||
logger.error('Error updating filament:', updateError);
|
|
||||||
res.status(500).send({ error: updateError.message });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`New filament with ID: ${result._id}`);
|
||||||
|
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -47,11 +47,18 @@ export const listHostsRouteHandler = async (
|
|||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const listHostsByPropertiesRouteHandler = async (req, res, properties = '', filter = {}) => {
|
export const listHostsByPropertiesRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
properties = '',
|
||||||
|
filter = {},
|
||||||
|
masterFilter = {}
|
||||||
|
) => {
|
||||||
const result = await listObjectsByProperties({
|
const result = await listObjectsByProperties({
|
||||||
model: hostModel,
|
model: hostModel,
|
||||||
properties,
|
properties,
|
||||||
filter,
|
filter,
|
||||||
|
masterFilter,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
@ -88,8 +95,9 @@ export const editHostRouteHandler = async (req, res) => {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
name: req.body.name,
|
name: req.body.name,
|
||||||
active: req.body.active,
|
active: req.body.active,
|
||||||
|
tags: req.body.tags,
|
||||||
};
|
};
|
||||||
// Create audit log before updating
|
|
||||||
const result = await editObject({
|
const result = await editObject({
|
||||||
model: hostModel,
|
model: hostModel,
|
||||||
id,
|
id,
|
||||||
|
|||||||
@ -2,7 +2,12 @@ import dotenv from 'dotenv';
|
|||||||
import { userModel } from '../../schemas/management/user.schema.js';
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { distributeUpdate, editAuditLog } from '../../utils.js';
|
import {
|
||||||
|
listObjects,
|
||||||
|
listObjectsByProperties,
|
||||||
|
getObject,
|
||||||
|
editObject,
|
||||||
|
} from '../../database/database.js';
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
@ -20,122 +25,93 @@ export const listUsersRouteHandler = async (
|
|||||||
sort = '',
|
sort = '',
|
||||||
order = 'ascend'
|
order = 'ascend'
|
||||||
) => {
|
) => {
|
||||||
try {
|
const result = await listObjects({
|
||||||
// Calculate the skip value based on the page number and limit
|
model: userModel,
|
||||||
const skip = (page - 1) * limit;
|
page,
|
||||||
|
limit,
|
||||||
|
property,
|
||||||
|
filter,
|
||||||
|
search,
|
||||||
|
sort,
|
||||||
|
order,
|
||||||
|
});
|
||||||
|
|
||||||
let user;
|
if (result?.error) {
|
||||||
let aggregateCommand = [];
|
logger.error('Error listing users.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
if (search) {
|
return;
|
||||||
// Add a text search match stage for name and brand fields
|
|
||||||
aggregateCommand.push({
|
|
||||||
$match: {
|
|
||||||
$text: {
|
|
||||||
$search: search,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (filter != {}) {
|
|
||||||
// use filtering if present
|
|
||||||
aggregateCommand.push({ $match: filter });
|
|
||||||
}
|
|
||||||
|
|
||||||
if (property != '') {
|
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add sorting if sort parameter is provided
|
|
||||||
if (sort) {
|
|
||||||
const sortOrder = order === 'descend' ? -1 : 1;
|
|
||||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
|
||||||
}
|
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
|
||||||
|
|
||||||
console.log(aggregateCommand);
|
|
||||||
|
|
||||||
user = await userModel.aggregate(aggregateCommand);
|
|
||||||
|
|
||||||
logger.trace(`List of users (Page ${page}, Limit ${limit}, Property ${property}):`, user);
|
|
||||||
res.send(user);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing users:', error);
|
|
||||||
res.status(500).send({ error: error });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of users (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
|
};
|
||||||
|
|
||||||
|
export const listUsersByPropertiesRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
properties = '',
|
||||||
|
filter = {},
|
||||||
|
masterFilter = {}
|
||||||
|
) => {
|
||||||
|
const result = await listObjectsByProperties({
|
||||||
|
model: userModel,
|
||||||
|
properties,
|
||||||
|
filter,
|
||||||
|
masterFilter,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result?.error) {
|
||||||
|
logger.error('Error listing users.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of users. Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getUserRouteHandler = async (req, res) => {
|
export const getUserRouteHandler = async (req, res) => {
|
||||||
try {
|
const id = req.params.id;
|
||||||
// Get ID from params
|
const result = await getObject({
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
model: userModel,
|
||||||
// Fetch the user with the given ID
|
id,
|
||||||
const user = await userModel.findOne({
|
});
|
||||||
_id: id,
|
if (result?.error) {
|
||||||
});
|
logger.warn(`User not found with supplied id.`);
|
||||||
|
return res.status(result.code).send(result);
|
||||||
if (!user) {
|
|
||||||
logger.warn(`User not found with supplied id.`);
|
|
||||||
return res.status(404).send({ error: 'User not found.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`User with ID: ${id}:`, user);
|
|
||||||
|
|
||||||
res.send({ ...user._doc });
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error fetching User:', error);
|
|
||||||
res.status(500).send({ error: error.message });
|
|
||||||
}
|
}
|
||||||
|
logger.debug(`Retreived user with ID: ${id}`);
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const editUserRouteHandler = async (req, res) => {
|
export const editUserRouteHandler = async (req, res) => {
|
||||||
try {
|
// Get ID from params
|
||||||
// Get ID from params
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
|
||||||
// Fetch the user with the given ID
|
|
||||||
const user = await userModel.findOne({ _id: id });
|
|
||||||
|
|
||||||
if (!user) {
|
logger.trace(`User with ID: ${id}`);
|
||||||
// Error handling
|
|
||||||
logger.warn(`User not found with supplied id.`);
|
|
||||||
return res.status(404).send({ error: 'User not found.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.trace(`User with ID: ${id}:`, user);
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
firstName: req.body.firstName,
|
||||||
|
lastName: req.body.lastName,
|
||||||
|
email: req.body.email,
|
||||||
|
};
|
||||||
|
// Create audit log before updating
|
||||||
|
const result = await editObject({
|
||||||
|
model: userModel,
|
||||||
|
id,
|
||||||
|
updateData,
|
||||||
|
user: req.user,
|
||||||
|
});
|
||||||
|
|
||||||
try {
|
if (result.error) {
|
||||||
const updateData = {
|
logger.error('Error editing user:', result.error);
|
||||||
updatedAt: new Date(),
|
res.status(result).send(result);
|
||||||
username: req.body.username,
|
return;
|
||||||
name: req.body.name,
|
|
||||||
firstName: req.body.firstName,
|
|
||||||
lastName: req.body.lastName,
|
|
||||||
email: req.body.email,
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(req.user);
|
|
||||||
|
|
||||||
// Create audit log before updating
|
|
||||||
await editAuditLog(user.toObject(), updateData, id, 'user', req.user);
|
|
||||||
|
|
||||||
const result = await userModel.updateOne({ _id: id }, { $set: updateData });
|
|
||||||
if (result.nModified === 0) {
|
|
||||||
logger.error('No User updated.');
|
|
||||||
res.status(500).send({ error: 'No users updated.' });
|
|
||||||
}
|
|
||||||
|
|
||||||
await distributeUpdate(updateData, id, 'user');
|
|
||||||
} catch (updateError) {
|
|
||||||
logger.error('Error updating user:', updateError);
|
|
||||||
res.status(500).send({ error: updateError.message });
|
|
||||||
}
|
|
||||||
res.send('OK');
|
|
||||||
} catch (fetchError) {
|
|
||||||
logger.error('Error fetching user:', fetchError);
|
|
||||||
res.status(500).send({ error: fetchError.message });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`Edited user with ID: ${id}`);
|
||||||
|
|
||||||
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|||||||
@ -2,7 +2,13 @@ import dotenv from 'dotenv';
|
|||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { jobModel } from '../../schemas/production/job.schema.js';
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import { deleteObject, getObject, listObjects, newObject } from '../../database/database.js';
|
import {
|
||||||
|
deleteObject,
|
||||||
|
getObject,
|
||||||
|
listObjects,
|
||||||
|
listObjectsByProperties,
|
||||||
|
newObject,
|
||||||
|
} from '../../database/database.js';
|
||||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
|
||||||
@ -42,6 +48,30 @@ export const listJobsRouteHandler = async (
|
|||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const listJobsByPropertiesRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
properties = '',
|
||||||
|
filter = {},
|
||||||
|
masterFilter = {}
|
||||||
|
) => {
|
||||||
|
const result = await listObjectsByProperties({
|
||||||
|
model: jobModel,
|
||||||
|
properties,
|
||||||
|
filter,
|
||||||
|
masterFilter,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result?.error) {
|
||||||
|
logger.error('Error listing jobs.');
|
||||||
|
res.status(result.code).send(result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug(`List of jobs. Count: ${result.length}`);
|
||||||
|
res.send(result);
|
||||||
|
};
|
||||||
|
|
||||||
export const getJobRouteHandler = async (req, res) => {
|
export const getJobRouteHandler = async (req, res) => {
|
||||||
const id = req.params.id;
|
const id = req.params.id;
|
||||||
const result = await getObject({
|
const result = await getObject({
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user