Update dependencies, refactor routes, and enhance caching: Upgraded various package versions in package.json and package-lock.json for improved performance and security. Refactored routes to include new document-related endpoints and integrated caching mechanisms for user and token lookups to optimize authentication processes. Updated schemas to support new data structures and relationships, ensuring better data integrity and retrieval.
This commit is contained in:
parent
a6f6f57c1d
commit
509d74ad99
7152
package-lock.json
generated
7152
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
65
package.json
65
package.json
@ -4,52 +4,43 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@simplewebauthn/server": "^10.0.0",
|
||||
"@tremor/react": "^3.17.2",
|
||||
"antd": "*",
|
||||
"axios": "^1.8.4",
|
||||
"bcrypt": "*",
|
||||
"body-parser": "*",
|
||||
"axios": "^1.11.0",
|
||||
"bcrypt": "^6.0.0",
|
||||
"body-parser": "^2.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "*",
|
||||
"express": "^4.19.2",
|
||||
"express-session": "^1.18.0",
|
||||
"dotenv": "^17.2.1",
|
||||
"etcd3": "^1.1.2",
|
||||
"express": "^5.1.0",
|
||||
"express-session": "^1.18.2",
|
||||
"i": "^0.3.7",
|
||||
"jsonwebtoken": "*",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"keycloak-connect": "^26.1.1",
|
||||
"log4js": "^6.9.1",
|
||||
"mongodb": "*",
|
||||
"mongoose": "*",
|
||||
"mongoose-sequence": "^6.0.1",
|
||||
"mongoose-unique-array": "^0.4.2",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"mysql": "^2.18.1",
|
||||
"mysql2": "^2.3.3",
|
||||
"node-cron": "^3.0.2",
|
||||
"mongodb": "^6.18.0",
|
||||
"mongoose": "^8.17.1",
|
||||
"multer": "^2.0.2",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-cron": "^4.2.1",
|
||||
"nodemailer": "*",
|
||||
"nodemon": "^2.0.16",
|
||||
"passport": "*",
|
||||
"passport-jwt": "*",
|
||||
"passport-local": "*",
|
||||
"pg": "^8.7.3",
|
||||
"random-token": "*",
|
||||
"sequelize": "^6.20.1"
|
||||
"nodemon": "^3.1.10",
|
||||
"pg": "^8.16.3",
|
||||
"sequelize": "^6.37.7"
|
||||
},
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.17.10",
|
||||
"@babel/core": "^7.18.5",
|
||||
"@babel/node": "^7.18.5",
|
||||
"@babel/plugin-proposal-class-properties": "^7.17.12",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
|
||||
"@babel/preset-env": "^7.18.2",
|
||||
"@babel/register": "^7.17.7",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-config-prettier": "^10.1.5",
|
||||
"eslint-plugin-prettier": "^5.5.1",
|
||||
"@babel/cli": "^7.28.3",
|
||||
"@babel/core": "^7.28.3",
|
||||
"@babel/node": "^7.28.0",
|
||||
"@babel/plugin-proposal-class-properties": "^7.18.6",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.20.7",
|
||||
"@babel/preset-env": "^7.28.3",
|
||||
"@babel/register": "^7.28.3",
|
||||
"eslint": "^9.33.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"prettier": "^3.6.2",
|
||||
"sequelize-cli": "^6.4.1",
|
||||
"standard": "^17.1.0"
|
||||
"sequelize-cli": "^6.6.3",
|
||||
"standard": "^17.1.2"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js",
|
||||
|
||||
372
src/database/database.js
Normal file
372
src/database/database.js
Normal file
@ -0,0 +1,372 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { deleteAuditLog, expandObjectIds } from '../utils.js';
|
||||
import log4js from 'log4js';
|
||||
import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Filament Stocks');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
||||
export const listObjects = async ({
|
||||
model,
|
||||
populate = [],
|
||||
page = 1,
|
||||
limit = 25,
|
||||
filter = {},
|
||||
sort = '',
|
||||
order = 'ascend',
|
||||
project, // optional: override default projection
|
||||
}) => {
|
||||
try {
|
||||
logger.trace('Listing object:', {
|
||||
model,
|
||||
populate,
|
||||
page,
|
||||
limit,
|
||||
filter,
|
||||
sort,
|
||||
order,
|
||||
project,
|
||||
});
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
// Fix: descend should be -1, ascend should be 1
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
|
||||
if (!sort || sort === '') {
|
||||
sort = 'createdAt';
|
||||
}
|
||||
// Translate parent._id to parent for Mongoose
|
||||
if (filter['parent._id']) {
|
||||
filter.parent = filter['parent._id'];
|
||||
delete filter['parent._id'];
|
||||
}
|
||||
|
||||
// Translate owner._id to owner for Mongoose
|
||||
if (filter['owner._id']) {
|
||||
filter.owner = filter['owner._id'];
|
||||
delete filter['owner._id'];
|
||||
}
|
||||
|
||||
// Use find with population and filter
|
||||
let query = model
|
||||
.find(filter)
|
||||
.sort({ [sort]: sortOrder })
|
||||
.skip(skip)
|
||||
.limit(Number(limit));
|
||||
|
||||
// Handle populate (array or single value)
|
||||
if (populate) {
|
||||
if (Array.isArray(populate)) {
|
||||
for (const pop of populate) {
|
||||
query = query.populate(pop);
|
||||
}
|
||||
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
||||
query = query.populate(populate);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle select (projection)
|
||||
if (project) {
|
||||
query = query.select(project);
|
||||
}
|
||||
|
||||
query = query.lean();
|
||||
|
||||
const queryResult = await query;
|
||||
return expandObjectIds(queryResult);
|
||||
} catch (error) {
|
||||
logger.error('Object list error:', error);
|
||||
return { error: error, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// New function to list unique property values
|
||||
export const listPropertyValues = async ({ model, property, filter = {}, search = '' }) => {
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: { $search: search },
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (filter && Object.keys(filter).length > 0) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||
|
||||
return await model.aggregate(aggregateCommand);
|
||||
};
|
||||
|
||||
// Helper to build nested structure for listObjectsByProperty
|
||||
function nestGroups(groups, props, filter, idx = 0) {
|
||||
if (idx >= props.length) return groups;
|
||||
const prop = props[idx];
|
||||
const filterPresent = Object.prototype.hasOwnProperty.call(filter, prop);
|
||||
|
||||
// Helper to extract a display key and possible filter values from a value
|
||||
function getKeyAndFilterVals(value) {
|
||||
if (value && typeof value === 'object') {
|
||||
if (value.name) return { key: value.name, filterVals: [value._id?.toString?.(), value.name] };
|
||||
if (value._id) return { key: value._id.toString(), filterVals: [value._id.toString()] };
|
||||
}
|
||||
return { key: value, filterVals: [value] };
|
||||
}
|
||||
|
||||
// Build a map of key -> groups for this property
|
||||
const keyToGroups = {};
|
||||
for (const group of groups) {
|
||||
const { key } = getKeyAndFilterVals(group._id[prop]);
|
||||
if (!keyToGroups[key]) keyToGroups[key] = [];
|
||||
keyToGroups[key].push(group);
|
||||
}
|
||||
|
||||
let map = {};
|
||||
|
||||
if (filterPresent) {
|
||||
const filterValue = filter[prop]?.toString?.() ?? filter[prop];
|
||||
for (const [key, groupList] of Object.entries(keyToGroups)) {
|
||||
// Check if any group in this key matches the filter (by _id or name)
|
||||
const matches = groupList.filter((group) => {
|
||||
const { filterVals } = getKeyAndFilterVals(group._id[prop]);
|
||||
return filterVals.some((val) => val?.toString() === filterValue);
|
||||
});
|
||||
if (matches.length > 0) {
|
||||
if (idx === props.length - 1) {
|
||||
// Last property in filter, return items
|
||||
let items = [];
|
||||
for (const group of matches) {
|
||||
items = items.concat(group.objects.map(expandObjectIds));
|
||||
}
|
||||
map[key] = items;
|
||||
} else {
|
||||
map[key] = nestGroups(matches, props, filter, idx + 1);
|
||||
}
|
||||
} else {
|
||||
map[key] = {};
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// No filter for this property, just show all keys at this level with empty objects
|
||||
for (const key of Object.keys(keyToGroups)) {
|
||||
map[key] = {};
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
// Group objects by multiple properties and return nested groupings
|
||||
export const listObjectsByProperties = async ({
|
||||
model,
|
||||
properties = [],
|
||||
filter = {},
|
||||
masterFilter = {},
|
||||
populate,
|
||||
}) => {
|
||||
try {
|
||||
console.log('Props', properties);
|
||||
const propertiesPresent = !(
|
||||
!Array.isArray(properties) ||
|
||||
properties.length === 0 ||
|
||||
properties[0] == ''
|
||||
);
|
||||
|
||||
// Build aggregation pipeline
|
||||
const pipeline = [];
|
||||
|
||||
// Handle populate (array or single value)
|
||||
if (populate) {
|
||||
const populates = Array.isArray(populate) ? populate : [populate];
|
||||
for (const pop of populates) {
|
||||
// Support both string and object syntax for populate
|
||||
if (typeof pop === 'string') {
|
||||
pipeline.push({
|
||||
$lookup: {
|
||||
from: pop.toLowerCase() + 's', // crude pluralization, adjust if needed
|
||||
localField: pop,
|
||||
foreignField: '_id',
|
||||
as: pop,
|
||||
},
|
||||
});
|
||||
// Unwind if it's a single reference
|
||||
pipeline.push({
|
||||
$unwind: {
|
||||
path: `$${pop}`,
|
||||
preserveNullAndEmptyArrays: true,
|
||||
},
|
||||
});
|
||||
} else if (typeof pop === 'object' && pop.path) {
|
||||
pipeline.push({
|
||||
$lookup: {
|
||||
from:
|
||||
pop.options && pop.options.from ? pop.options.from : pop.path.toLowerCase() + 's',
|
||||
localField: pop.path,
|
||||
foreignField: '_id',
|
||||
as: pop.path,
|
||||
},
|
||||
});
|
||||
if (!pop.justOne === false) {
|
||||
// default to unwind unless justOne is explicitly false
|
||||
pipeline.push({
|
||||
$unwind: {
|
||||
path: `$${pop.path}`,
|
||||
preserveNullAndEmptyArrays: true,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (masterFilter != {}) {
|
||||
pipeline.push({ $match: { ...masterFilter } });
|
||||
}
|
||||
|
||||
if (propertiesPresent) {
|
||||
// Build the $group _id object for all properties
|
||||
const groupId = {};
|
||||
for (const prop of properties) {
|
||||
groupId[prop] = `$${prop}`;
|
||||
}
|
||||
pipeline.push({
|
||||
$group: {
|
||||
_id: groupId,
|
||||
objects: { $push: '$$ROOT' },
|
||||
},
|
||||
});
|
||||
|
||||
// Run aggregation
|
||||
const results = await model.aggregate(pipeline);
|
||||
return nestGroups(results, properties, filter);
|
||||
} else {
|
||||
// If no properties specified, just return all objects without grouping
|
||||
// Ensure pipeline is not empty by adding a $match stage if needed
|
||||
if (pipeline.length === 0) {
|
||||
pipeline.push({ $match: {} });
|
||||
}
|
||||
const results = await model.aggregate(pipeline);
|
||||
return results;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('listObjectsByProperty error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to get a single object by ID
|
||||
export const getObject = async ({ model, id, populate }) => {
|
||||
try {
|
||||
let query = model.findById(id).lean();
|
||||
// Handle populate (array or single value)
|
||||
if (populate) {
|
||||
if (Array.isArray(populate)) {
|
||||
for (const pop of populate) {
|
||||
query = query.populate(pop);
|
||||
}
|
||||
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
||||
query = query.populate(populate);
|
||||
}
|
||||
}
|
||||
const result = await query;
|
||||
|
||||
if (!result) {
|
||||
return { error: 'Object not found.', code: 404 };
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
return { error: error, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to edit an object by ID, with audit logging and distribution
|
||||
export const editObject = async ({ model, id, updateData, user, populate }) => {
|
||||
try {
|
||||
// Determine parentType from model name
|
||||
const parentType = model.modelName ? model.modelName : 'unknown';
|
||||
// Fetch the and update object
|
||||
var query = model.findByIdAndUpdate(id, updateData).lean();
|
||||
|
||||
if (populate) {
|
||||
if (Array.isArray(populate)) {
|
||||
for (const pop of populate) {
|
||||
query = query.populate(pop);
|
||||
}
|
||||
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
||||
query = query.populate(populate);
|
||||
}
|
||||
}
|
||||
|
||||
const previousObject = await query;
|
||||
|
||||
if (!previousObject) {
|
||||
return { error: `${parentType} not found.`, code: 404 };
|
||||
}
|
||||
|
||||
const previousExpandedObject = expandObjectIds(previousObject);
|
||||
// Audit log before update
|
||||
await editAuditLog(
|
||||
previousExpandedObject,
|
||||
{ ...previousExpandedObject, ...updateData },
|
||||
id,
|
||||
parentType,
|
||||
user
|
||||
);
|
||||
// Distribute update
|
||||
await distributeUpdate(updateData, id, parentType);
|
||||
return { ...previousExpandedObject, ...updateData };
|
||||
} catch (error) {
|
||||
logger.error('editObject error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to create a new object
|
||||
export const newObject = async ({ model, newData, user = null }) => {
|
||||
try {
|
||||
const parentType = model.modelName ? model.modelName : 'unknown';
|
||||
|
||||
const result = await model.create(newData);
|
||||
if (!result || result.length === 0) {
|
||||
return { error: 'No object created.', code: 500 };
|
||||
}
|
||||
const created = result;
|
||||
|
||||
await newAuditLog(newData, created._id, parentType, user);
|
||||
await distributeNew(created._id, parentType);
|
||||
|
||||
return created;
|
||||
} catch (error) {
|
||||
logger.error('newObject error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to delete an object by ID, with audit logging and distribution
|
||||
export const deleteObject = async ({ model, id, user = null }) => {
|
||||
try {
|
||||
const parentType = model.modelName ? model.modelName : 'unknown';
|
||||
// Delete the object
|
||||
const result = await model.findByIdAndDelete(id);
|
||||
|
||||
if (!result) {
|
||||
return { error: `${parentType} not found.`, code: 404 };
|
||||
}
|
||||
// Audit log the deletion
|
||||
await deleteAuditLog(result, id, parentType, user, 'delete');
|
||||
// Distribute the deletion event
|
||||
await distributeUpdate({ deleted: true }, id, parentType);
|
||||
|
||||
return { deleted: true, id };
|
||||
} catch (error) {
|
||||
logger.error('deleteObject error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
}
|
||||
};
|
||||
110
src/database/etcd.js
Normal file
110
src/database/etcd.js
Normal file
@ -0,0 +1,110 @@
|
||||
import { Etcd3 } from 'etcd3';
|
||||
import log4js from 'log4js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const ETCD_HOST = process.env.ETCD_HOST || 'localhost';
|
||||
const ETCD_PORT = process.env.ETCD_PORT || 2379;
|
||||
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
|
||||
|
||||
const logger = log4js.getLogger('Etcd');
|
||||
logger.level = LOG_LEVEL;
|
||||
|
||||
class EtcdServer {
|
||||
constructor() {
|
||||
this.client = null;
|
||||
this.watchers = new Map();
|
||||
this.hosts = [`${ETCD_HOST}:${ETCD_PORT}`];
|
||||
logger.debug(`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`);
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (!this.client) {
|
||||
logger.info('Connecting to Etcd...');
|
||||
logger.debug(`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`);
|
||||
this.client = new Etcd3({
|
||||
hosts: this.hosts,
|
||||
});
|
||||
|
||||
// Test connection
|
||||
try {
|
||||
await this.client.get('test-connection').string();
|
||||
logger.debug('Etcd client connected successfully.');
|
||||
} catch (error) {
|
||||
if (error.code === 'NOT_FOUND') {
|
||||
logger.debug('Etcd client connected successfully (test key not found as expected).');
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logger.debug('Etcd client already exists, skipping connection.');
|
||||
}
|
||||
return this.client;
|
||||
}
|
||||
|
||||
async getClient() {
|
||||
logger.trace('Checking if Etcd client exists.');
|
||||
if (!this.client) {
|
||||
logger.debug('No client found, calling connect().');
|
||||
await this.connect();
|
||||
}
|
||||
logger.trace('Returning Etcd client.');
|
||||
return this.client;
|
||||
}
|
||||
|
||||
// Hash-like functionality using etcd
|
||||
async setKey(key, value) {
|
||||
const client = await this.getClient();
|
||||
const stringValue = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
|
||||
await client.put(key).value(stringValue);
|
||||
logger.debug(`Set key: ${key}, value: ${stringValue}`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async getKey(key) {
|
||||
const client = await this.getClient();
|
||||
|
||||
try {
|
||||
const value = await client.get(key).string();
|
||||
logger.debug(`Retrieved key: ${key}, value: ${value}`);
|
||||
|
||||
// Try to parse as JSON, fallback to string
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
} catch (error) {
|
||||
if (error.code === 'NOT_FOUND') {
|
||||
logger.debug(`Key not found: ${key}`);
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
logger.info('Disconnecting from Etcd...');
|
||||
|
||||
// Stop all watchers
|
||||
for (const [key, watcher] of this.watchers) {
|
||||
logger.debug(`Stopping watcher: ${key}`);
|
||||
watcher.removeAllListeners();
|
||||
await watcher.close();
|
||||
}
|
||||
this.watchers.clear();
|
||||
|
||||
if (this.client) {
|
||||
await this.client.close();
|
||||
this.client = null;
|
||||
logger.info('Disconnected from Etcd');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const etcdServer = new EtcdServer();
|
||||
|
||||
export { EtcdServer, etcdServer };
|
||||
20
src/index.js
20
src/index.js
@ -9,6 +9,7 @@ import {
|
||||
userRoutes,
|
||||
printerRoutes,
|
||||
jobRoutes,
|
||||
subJobRoutes,
|
||||
gcodeFileRoutes,
|
||||
filamentRoutes,
|
||||
spotlightRoutes,
|
||||
@ -23,12 +24,17 @@ import {
|
||||
auditLogRoutes,
|
||||
noteTypeRoutes,
|
||||
noteRoutes,
|
||||
hostRoutes,
|
||||
documentSizesRoutes,
|
||||
documentTemplatesRoutes,
|
||||
documentPrintersRoutes,
|
||||
} from './routes/index.js';
|
||||
import path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import cron from 'node-cron';
|
||||
import ReseedAction from './database/ReseedAction.js';
|
||||
import log4js from 'log4js';
|
||||
import { etcdServer } from './database/etcd.js';
|
||||
import { populateUserMiddleware } from './services/misc/auth.js';
|
||||
|
||||
dotenv.config();
|
||||
@ -55,6 +61,15 @@ const corsOptions = {
|
||||
|
||||
dbConnect();
|
||||
|
||||
// Connect to Etcd (await)
|
||||
try {
|
||||
etcdServer.connect();
|
||||
logger.info('Connected to Etcd');
|
||||
} catch (err) {
|
||||
logger.error('Failed to connect to Etcd:', err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
app.use(cors(corsOptions));
|
||||
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
|
||||
app.use(express.json());
|
||||
@ -71,7 +86,9 @@ app.use('/auth', authRoutes);
|
||||
app.use('/users', userRoutes);
|
||||
app.use('/spotlight', spotlightRoutes);
|
||||
app.use('/printers', printerRoutes);
|
||||
app.use('/hosts', hostRoutes);
|
||||
app.use('/jobs', jobRoutes);
|
||||
app.use('/subjobs', subJobRoutes);
|
||||
app.use('/gcodefiles', gcodeFileRoutes);
|
||||
app.use('/filaments', filamentRoutes);
|
||||
app.use('/parts', partRoutes);
|
||||
@ -84,6 +101,9 @@ app.use('/stockevents', stockEventRoutes);
|
||||
app.use('/stockaudits', stockAuditRoutes);
|
||||
app.use('/auditlogs', auditLogRoutes);
|
||||
app.use('/notetypes', noteTypeRoutes);
|
||||
app.use('/documentsizes', documentSizesRoutes);
|
||||
app.use('/documenttemplates', documentTemplatesRoutes);
|
||||
app.use('/documentprinters', documentPrintersRoutes);
|
||||
app.use('/notes', noteRoutes);
|
||||
|
||||
if (process.env.SCHEDULE_HOUR) {
|
||||
|
||||
@ -4,11 +4,54 @@ import dotenv from 'dotenv';
|
||||
import axios from 'axios';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import log4js from 'log4js';
|
||||
import NodeCache from 'node-cache';
|
||||
import { userModel } from './schemas/management/user.schema.js';
|
||||
|
||||
dotenv.config();
|
||||
const logger = log4js.getLogger('Keycloak');
|
||||
logger.level = process.env.LOG_LEVEL || 'info';
|
||||
|
||||
// Initialize NodeCache with 5-minute TTL
|
||||
const userCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
||||
|
||||
// Cache event listeners for monitoring
|
||||
userCache.on('expired', (key, value) => {
|
||||
logger.debug(`Cache entry expired: ${key}`);
|
||||
});
|
||||
|
||||
userCache.on('flush', () => {
|
||||
logger.info('Cache flushed');
|
||||
});
|
||||
|
||||
// User lookup function with caching
|
||||
const lookupUser = async (preferredUsername) => {
|
||||
try {
|
||||
// Check cache first
|
||||
const cachedUser = userCache.get(preferredUsername);
|
||||
if (cachedUser) {
|
||||
logger.debug(`User found in cache: ${preferredUsername}`);
|
||||
return cachedUser;
|
||||
}
|
||||
|
||||
// If not in cache, query database
|
||||
logger.debug(`User not in cache, querying database: ${preferredUsername}`);
|
||||
const user = await userModel.findOne({ username: preferredUsername });
|
||||
|
||||
if (user) {
|
||||
// Store in cache
|
||||
userCache.set(preferredUsername, user);
|
||||
logger.debug(`User stored in cache: ${preferredUsername}`);
|
||||
return user;
|
||||
}
|
||||
|
||||
logger.warn(`User not found in database: ${preferredUsername}`);
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(`Error looking up user ${preferredUsername}:`, error.message);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize Keycloak
|
||||
const keycloakConfig = {
|
||||
realm: process.env.KEYCLOAK_REALM || 'farm-control',
|
||||
@ -43,8 +86,7 @@ var keycloak = new Keycloak({ store: memoryStore }, keycloakConfig);
|
||||
const isAuthenticated = async (req, res, next) => {
|
||||
let token = null;
|
||||
|
||||
// Try to get token from Authorization header
|
||||
const authHeader = req.headers.authorization;
|
||||
const authHeader = req.headers.authorization || req.headers.Authorization;
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
token = authHeader.substring(7);
|
||||
|
||||
@ -67,27 +109,19 @@ const isAuthenticated = async (req, res, next) => {
|
||||
const introspection = response.data;
|
||||
if (!introspection.active) {
|
||||
logger.info('Token is not active');
|
||||
return res.status(401).json({ error: 'Not authenticated' });
|
||||
logger.debug('Token:', token);
|
||||
return res.status(401).json({ error: 'Session Inactive', code: 'UNAUTHORIZED' });
|
||||
}
|
||||
|
||||
// Parse token to extract user info
|
||||
const decodedToken = jwt.decode(token);
|
||||
req.user = {
|
||||
id: decodedToken.sub,
|
||||
username: decodedToken.preferred_username,
|
||||
email: decodedToken.email,
|
||||
name: decodedToken.name,
|
||||
roles: extractRoles(decodedToken),
|
||||
};
|
||||
|
||||
return next();
|
||||
} catch (error) {
|
||||
logger.error('Token verification error:', error.message);
|
||||
return res.status(401).json({ error: 'Not authenticated' });
|
||||
return res.status(401).json({ error: 'Verification Error', code: 'UNAUTHORIZED' });
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to session-based authentication
|
||||
console.log('Using session token');
|
||||
if (req.session && req.session['keycloak-token']) {
|
||||
const sessionToken = req.session['keycloak-token'];
|
||||
if (sessionToken.expires_at > new Date().getTime()) {
|
||||
@ -95,7 +129,7 @@ const isAuthenticated = async (req, res, next) => {
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(401).json({ error: 'Not authenticated' });
|
||||
return res.status(401).json({ error: 'Not Authenticated', code: 'UNAUTHORIZED' });
|
||||
};
|
||||
|
||||
// Helper function to extract roles from token
|
||||
@ -119,4 +153,27 @@ function extractRoles(token) {
|
||||
return roles;
|
||||
}
|
||||
|
||||
export { keycloak, expressSession, isAuthenticated };
|
||||
// Cache management utility functions
|
||||
const clearUserCache = () => {
|
||||
userCache.flushAll();
|
||||
logger.info('User cache cleared');
|
||||
};
|
||||
|
||||
const getUserCacheStats = () => {
|
||||
return userCache.getStats();
|
||||
};
|
||||
|
||||
const removeUserFromCache = (username) => {
|
||||
userCache.del(username);
|
||||
logger.debug(`User removed from cache: ${username}`);
|
||||
};
|
||||
|
||||
export {
|
||||
keycloak,
|
||||
expressSession,
|
||||
isAuthenticated,
|
||||
lookupUser,
|
||||
clearUserCache,
|
||||
getUserCacheStats,
|
||||
removeUserFromCache,
|
||||
};
|
||||
|
||||
@ -1,27 +1,34 @@
|
||||
import userRoutes from "./management/users.js";
|
||||
import authRoutes from "./misc/auth.js";
|
||||
import printerRoutes from "./production/printers.js";
|
||||
import jobRoutes from "./production/jobs.js";
|
||||
import gcodeFileRoutes from "./production/gcodefiles.js";
|
||||
import filamentRoutes from "./production/filaments.js";
|
||||
import spotlightRoutes from "./misc/spotlight.js";
|
||||
import partRoutes from "./management/parts.js";
|
||||
import productRoutes from "./management/products.js";
|
||||
import vendorRoutes from "./management/vendors.js";
|
||||
import materialRoutes from "./management/materials.js";
|
||||
import partStockRoutes from "./inventory/partstocks.js";
|
||||
import filamentStockRoutes from "./inventory/filamentstocks.js";
|
||||
import stockEventRoutes from "./inventory/stockevents.js";
|
||||
import stockAuditRoutes from "./inventory/stockaudits.js";
|
||||
import auditLogRoutes from "./management/auditlogs.js";
|
||||
import noteTypeRoutes from "./management/notetypes.js";
|
||||
import noteRoutes from "./misc/notes.js";
|
||||
import userRoutes from './management/users.js';
|
||||
import authRoutes from './misc/auth.js';
|
||||
import printerRoutes from './production/printers.js';
|
||||
import hostRoutes from './management/hosts.js';
|
||||
import jobRoutes from './production/jobs.js';
|
||||
import subJobRoutes from './production/subjobs.js';
|
||||
import gcodeFileRoutes from './production/gcodefiles.js';
|
||||
import filamentRoutes from './management/filaments.js';
|
||||
import spotlightRoutes from './misc/spotlight.js';
|
||||
import partRoutes from './management/parts.js';
|
||||
import productRoutes from './management/products.js';
|
||||
import vendorRoutes from './management/vendors.js';
|
||||
import materialRoutes from './management/materials.js';
|
||||
import partStockRoutes from './inventory/partstocks.js';
|
||||
import filamentStockRoutes from './inventory/filamentstocks.js';
|
||||
import stockEventRoutes from './inventory/stockevents.js';
|
||||
import stockAuditRoutes from './inventory/stockaudits.js';
|
||||
import auditLogRoutes from './management/auditlogs.js';
|
||||
import noteTypeRoutes from './management/notetypes.js';
|
||||
import documentSizesRoutes from './management/documentsizes.js';
|
||||
import documentTemplatesRoutes from './management/documenttemplates.js';
|
||||
import documentPrintersRoutes from './management/documentprinters.js';
|
||||
import noteRoutes from './misc/notes.js';
|
||||
|
||||
export {
|
||||
userRoutes,
|
||||
authRoutes,
|
||||
printerRoutes,
|
||||
hostRoutes,
|
||||
jobRoutes,
|
||||
subJobRoutes,
|
||||
gcodeFileRoutes,
|
||||
filamentRoutes,
|
||||
spotlightRoutes,
|
||||
@ -35,5 +42,8 @@ export {
|
||||
stockAuditRoutes,
|
||||
auditLogRoutes,
|
||||
noteTypeRoutes,
|
||||
noteRoutes
|
||||
noteRoutes,
|
||||
documentSizesRoutes,
|
||||
documentTemplatesRoutes,
|
||||
documentPrintersRoutes,
|
||||
};
|
||||
|
||||
@ -6,7 +6,6 @@ const router = express.Router();
|
||||
import {
|
||||
listFilamentStocksRouteHandler,
|
||||
getFilamentStockRouteHandler,
|
||||
editFilamentStockRouteHandler,
|
||||
newFilamentStockRouteHandler,
|
||||
} from '../../services/inventory/filamentstocks.js';
|
||||
|
||||
@ -14,7 +13,7 @@ import {
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['country'];
|
||||
const allowedFilters = ['filament', 'filament._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -38,9 +37,4 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getFilamentStockRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editFilamentStockRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -11,9 +11,9 @@ import {
|
||||
|
||||
// List stock events
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
const { page, limit, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['type', 'filamentStock'];
|
||||
const allowedFilters = ['owner_.id', 'parent._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -25,7 +25,7 @@ router.get('/', isAuthenticated, (req, res) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
listStockEventsRouteHandler(req, res, page, limit, property, filter, sort, order);
|
||||
listStockEventsRouteHandler(req, res, page, limit, filter, sort, order);
|
||||
});
|
||||
|
||||
// Create new stock event
|
||||
|
||||
@ -10,9 +10,9 @@ const router = express.Router();
|
||||
|
||||
// List note types
|
||||
router.get('/', isAuthenticated, async (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
const { page, limit, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['target', 'owner'];
|
||||
const allowedFilters = ['parent._id', 'owner._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -24,8 +24,7 @@ router.get('/', isAuthenticated, async (req, res) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listAuditLogsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||
listAuditLogsRouteHandler(req, res, page, limit, filter, sort, order);
|
||||
});
|
||||
|
||||
/**
|
||||
|
||||
46
src/routes/management/documentprinters.js
Normal file
46
src/routes/management/documentprinters.js
Normal file
@ -0,0 +1,46 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listDocumentPrintersRouteHandler,
|
||||
getDocumentPrinterRouteHandler,
|
||||
editDocumentPrinterRouteHandler,
|
||||
newDocumentPrinterRouteHandler,
|
||||
deleteDocumentPrinterRouteHandler,
|
||||
listDocumentPrintersByPropertiesRouteHandler,
|
||||
} from '../../services/management/documentprinters.js';
|
||||
|
||||
// list of document printers
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['name', 'tags', 'active', 'isGlobal'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listDocumentPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['tags'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listDocumentPrintersByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newDocumentPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getDocumentPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editDocumentPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteDocumentPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
46
src/routes/management/documentsizes.js
Normal file
46
src/routes/management/documentsizes.js
Normal file
@ -0,0 +1,46 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listDocumentSizesRouteHandler,
|
||||
getDocumentSizeRouteHandler,
|
||||
editDocumentSizeRouteHandler,
|
||||
newDocumentSizeRouteHandler,
|
||||
deleteDocumentSizeRouteHandler,
|
||||
listDocumentSizesByPropertiesRouteHandler,
|
||||
} from '../../services/management/documentsizes.js';
|
||||
|
||||
// list of document sizes
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['name', 'width', 'height'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listDocumentSizesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = [];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listDocumentSizesByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newDocumentSizeRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getDocumentSizeRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editDocumentSizeRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteDocumentSizeRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
50
src/routes/management/documenttemplates.js
Normal file
50
src/routes/management/documenttemplates.js
Normal file
@ -0,0 +1,50 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listDocumentTemplatesRouteHandler,
|
||||
getDocumentTemplateRouteHandler,
|
||||
editDocumentTemplateRouteHandler,
|
||||
newDocumentTemplateRouteHandler,
|
||||
deleteDocumentTemplateRouteHandler,
|
||||
listDocumentTemplatesByPropertiesRouteHandler,
|
||||
} from '../../services/management/documenttemplates.js';
|
||||
|
||||
// list of document templates
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['name', 'tags', 'active', 'isGlobal'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listDocumentTemplatesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['documentSize', 'tags'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
var masterFilter = {};
|
||||
if (req.query.masterFilter) {
|
||||
masterFilter = JSON.parse(req.query.masterFilter);
|
||||
}
|
||||
listDocumentTemplatesByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newDocumentTemplateRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getDocumentTemplateRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editDocumentTemplateRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteDocumentTemplateRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,10 +1,11 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { convertPropertiesString, getFilter, parseFilter } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listFilamentsRouteHandler,
|
||||
listFilamentsByPropertiesRouteHandler,
|
||||
getFilamentRouteHandler,
|
||||
editFilamentRouteHandler,
|
||||
newFilamentRouteHandler,
|
||||
@ -12,9 +13,18 @@ import {
|
||||
|
||||
// list of filaments
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['type', 'vendor.name', 'diameter', 'color'];
|
||||
const allowedFilters = [
|
||||
'_id',
|
||||
'type',
|
||||
'vendor.name',
|
||||
'diameter',
|
||||
'color',
|
||||
'name',
|
||||
'vendor._id',
|
||||
'cost',
|
||||
];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -26,7 +36,14 @@ router.get('/', isAuthenticated, (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
listFilamentsRouteHandler(req, res, page, limit, property, filter);
|
||||
listFilamentsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['diameter', 'type', 'vendor'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listFilamentsByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
46
src/routes/management/hosts.js
Normal file
46
src/routes/management/hosts.js
Normal file
@ -0,0 +1,46 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listHostsRouteHandler,
|
||||
getHostRouteHandler,
|
||||
editHostRouteHandler,
|
||||
newHostRouteHandler,
|
||||
deleteHostRouteHandler,
|
||||
listHostsByPropertiesRouteHandler,
|
||||
} from '../../services/management/hosts.js';
|
||||
|
||||
// list of hosts
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['_id', 'name', 'tags'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listHostsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['tags'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listHostsByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newHostRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getHostRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editHostRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteHostRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -12,9 +12,9 @@ const router = express.Router();
|
||||
|
||||
// List note types
|
||||
router.get('/', isAuthenticated, async (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['name', 'active'];
|
||||
const allowedFilters = ['name', 'active', 'color', '_id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -27,7 +27,7 @@ router.get('/', isAuthenticated, async (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
listNoteTypesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||
listNoteTypesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
// Get single note type
|
||||
|
||||
@ -16,7 +16,7 @@ import {
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['products', 'name'];
|
||||
const allowedFilters = ['products', 'name', 'product._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
@ -36,14 +36,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
||||
newPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||
uploadPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getPartRouteHandler(req, res);
|
||||
});
|
||||
@ -53,4 +45,12 @@ router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||
uploadPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
|
||||
// list of users
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
|
||||
|
||||
@ -26,7 +26,7 @@ router.get('/', isAuthenticated, (req, res) => {
|
||||
}
|
||||
}
|
||||
|
||||
listUsersRouteHandler(req, res, page, limit, property, filter);
|
||||
listUsersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -8,26 +8,23 @@ import {
|
||||
getVendorRouteHandler,
|
||||
editVendorRouteHandler,
|
||||
newVendorRouteHandler,
|
||||
deleteVendorRouteHandler,
|
||||
listVendorsByPropertiesRouteHandler,
|
||||
} from '../../services/management/vendors.js';
|
||||
|
||||
// list of vendors
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['country'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listVendorsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listVendorsRouteHandler(req, res, page, limit, property, filter);
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['country'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listVendorsByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
@ -38,9 +35,12 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated, keycloak } from "../../keycloak.js";
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import {
|
||||
forgotPasswordRouteHandler,
|
||||
loginRouteHandler,
|
||||
@ -7,31 +7,48 @@ import {
|
||||
userRouteHandler,
|
||||
logoutRouteHandler,
|
||||
refreshTokenRouteHandler,
|
||||
} from "../../services/misc/auth.js";
|
||||
loginTokenRouteHandler,
|
||||
} from '../../services/misc/auth.js';
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
router.get("/login", async (req, res) => {
|
||||
router.get('/login', async (req, res) => {
|
||||
loginRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/callback", async (req, res) => {
|
||||
router.get('/app/login', async (req, res) => {
|
||||
loginRouteHandler(req, res, true);
|
||||
});
|
||||
|
||||
router.get('/callback', async (req, res) => {
|
||||
loginCallbackRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/refresh", async (req, res) => {
|
||||
router.get('/app/callback', async (req, res) => {
|
||||
loginCallbackRouteHandler(req, res, true);
|
||||
});
|
||||
|
||||
router.get('/token', async (req, res) => {
|
||||
loginTokenRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/app/token', async (req, res) => {
|
||||
loginTokenRouteHandler(req, res, true);
|
||||
});
|
||||
|
||||
router.get('/refresh', async (req, res) => {
|
||||
refreshTokenRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/user", isAuthenticated, async (req, res) => {
|
||||
router.get('/user', isAuthenticated, async (req, res) => {
|
||||
userRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/logout", (req, res) => {
|
||||
router.get('/logout', (req, res) => {
|
||||
logoutRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post("/password-forgot", async (req, res) => {
|
||||
router.post('/password-forgot', async (req, res) => {
|
||||
const { email } = req.body;
|
||||
await forgotPasswordRouteHandler(req, res, email);
|
||||
});
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
|
||||
const router = express.Router();
|
||||
import { getSpotlightRouteHandler } from "../../services/misc/spotlight.js";
|
||||
import { getSpotlightRouteHandler } from '../../services/misc/spotlight.js';
|
||||
|
||||
router.get("/:query", isAuthenticated, (req, res) => {
|
||||
router.get('/:query', isAuthenticated, (req, res) => {
|
||||
getSpotlightRouteHandler(req, res);
|
||||
});
|
||||
|
||||
|
||||
@ -1,44 +1,35 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listGCodeFilesRouteHandler,
|
||||
listGCodeFilesByPropertiesRouteHandler,
|
||||
getGCodeFileRouteHandler,
|
||||
editGCodeFileRouteHandler,
|
||||
newGCodeFileRouteHandler,
|
||||
parseGCodeFileHandler,
|
||||
uploadGCodeFileContentRouteHandler,
|
||||
getGCodeFileContentRouteHandler,
|
||||
deleteGCodeFileRouteHandler,
|
||||
} from '../../services/production/gcodefiles.js';
|
||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||
|
||||
// list of printers
|
||||
// list of vendors
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = [
|
||||
'filament.type',
|
||||
'filament.vendor.name',
|
||||
'filament.diameter',
|
||||
'filament.color',
|
||||
];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const allowedFilters = ['_id', 'filament'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
// new pritner
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['filament'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listGCodeFilesByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
@ -47,11 +38,14 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||
uploadGCodeFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
@ -1,38 +1,39 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listJobsRouteHandler,
|
||||
getJobRouteHandler,
|
||||
editJobRouteHandler,
|
||||
createJobRouteHandler,
|
||||
getJobStatsRouteHandler
|
||||
} from "../../services/production/jobs.js";
|
||||
newJobRouteHandler,
|
||||
deleteJobRouteHandler,
|
||||
getJobStatsRouteHandler,
|
||||
} from '../../services/production/jobs.js';
|
||||
import { getFilter } from '../../utils.js';
|
||||
|
||||
// list of print jobs
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit } = req.body;
|
||||
listJobsRouteHandler(req, res, page, limit);
|
||||
// list of jobs
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['country'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
// get printer stats
|
||||
router.get("/stats", isAuthenticated, (req, res) => {
|
||||
getJobStatsRouteHandler(req, res);
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// create new print job
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
createJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update job info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editJobRouteHandler(req, res);
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteJobRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// get printer stats
|
||||
router.get('/stats', isAuthenticated, (req, res) => {
|
||||
getJobStatsRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -1,40 +1,49 @@
|
||||
import express from "express";
|
||||
import passport from "passport";
|
||||
import { keycloak, isAuthenticated } from "../../keycloak.js";
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listPrintersRouteHandler,
|
||||
editPrinterRouteHandler,
|
||||
getPrinterRouteHandler,
|
||||
createPrinterRouteHandler,
|
||||
newPrinterRouteHandler,
|
||||
getPrinterStatsRouteHandler,
|
||||
} from "../../services/production/printers.js";
|
||||
listPrintersByPropertiesRouteHandler,
|
||||
} from '../../services/production/printers.js';
|
||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||
|
||||
// list of printers
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit } = req.body;
|
||||
listPrintersRouteHandler(req, res, page, limit);
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['tags'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['tags'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listPrintersByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
// create new printer
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
createPrinterRouteHandler(req, res);
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// get printer stats
|
||||
router.get("/stats", isAuthenticated, (req, res) => {
|
||||
router.get('/stats', isAuthenticated, (req, res) => {
|
||||
getPrinterStatsRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editPrinterRouteHandler(req, res);
|
||||
});
|
||||
|
||||
|
||||
export default router;
|
||||
|
||||
27
src/routes/production/subjobs.js
Normal file
27
src/routes/production/subjobs.js
Normal file
@ -0,0 +1,27 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
|
||||
const router = express.Router();
|
||||
import { listSubJobsRouteHandler } from '../../services/production/subjobs.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
|
||||
// list of print subjobs
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['_id', 'job._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listSubJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -8,12 +8,15 @@ const filamentStockSchema = new Schema(
|
||||
type: { type: String, required: true },
|
||||
percent: { type: String, required: true },
|
||||
},
|
||||
startingGrossWeight: { type: Number, required: true },
|
||||
startingNetWeight: { type: Number, required: true },
|
||||
currentGrossWeight: { type: Number, required: true },
|
||||
currentNetWeight: { type: Number, required: true },
|
||||
startingWeight: {
|
||||
net: { type: Number, required: true },
|
||||
gross: { type: Number, required: true },
|
||||
},
|
||||
currentWeight: {
|
||||
net: { type: Number, required: true },
|
||||
gross: { type: Number, required: true },
|
||||
},
|
||||
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament' },
|
||||
stockEvents: [{ type: mongoose.Schema.Types.ObjectId, ref: 'stockEvent' }],
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
@ -3,12 +3,29 @@ const { Schema } = mongoose;
|
||||
|
||||
const stockEventSchema = new Schema(
|
||||
{
|
||||
type: { type: String, required: true },
|
||||
value: { type: Number, required: true },
|
||||
current: { type: Number, required: true },
|
||||
unit: { type: String, required: true },
|
||||
subJob: { type: Schema.Types.ObjectId, ref: 'subJob', required: false },
|
||||
job: { type: Schema.Types.ObjectId, ref: 'job', required: false },
|
||||
filamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock', required: true },
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'parentType',
|
||||
required: true,
|
||||
},
|
||||
parentType: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['filamentStock', 'partStock', 'productStock'], // Add other models as needed
|
||||
},
|
||||
owner: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'ownerType',
|
||||
required: true,
|
||||
},
|
||||
ownerType: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['user', 'subJob', 'stockAudit'],
|
||||
},
|
||||
timestamp: { type: Date, default: Date.now },
|
||||
},
|
||||
{ timestamps: true }
|
||||
|
||||
@ -3,14 +3,20 @@ const { Schema } = mongoose;
|
||||
|
||||
const auditLogSchema = new Schema(
|
||||
{
|
||||
oldValue: { type: Object, required: true },
|
||||
newValue: { type: Object, required: true },
|
||||
target: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'targetModel',
|
||||
changes: {
|
||||
old: { type: Object, required: false },
|
||||
new: { type: Object, required: false },
|
||||
},
|
||||
operation: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
targetModel: {
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'parentType',
|
||||
required: true,
|
||||
},
|
||||
parentType: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: [
|
||||
@ -21,6 +27,7 @@ const auditLogSchema = new Schema(
|
||||
'stockEvent',
|
||||
'vendor',
|
||||
'part',
|
||||
'host',
|
||||
'product',
|
||||
'material',
|
||||
'filament',
|
||||
@ -28,17 +35,20 @@ const auditLogSchema = new Schema(
|
||||
'noteType',
|
||||
'note',
|
||||
'user',
|
||||
'documentSize',
|
||||
'documentTemplate',
|
||||
'documentPrinter',
|
||||
], // Add other models as needed
|
||||
},
|
||||
owner: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'ownerModel',
|
||||
refPath: 'ownerType',
|
||||
required: true,
|
||||
},
|
||||
ownerModel: {
|
||||
ownerType: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['user', 'printer'],
|
||||
enum: ['user', 'printer', 'host'],
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
|
||||
31
src/schemas/management/documentprinter.schema.js
Normal file
31
src/schemas/management/documentprinter.schema.js
Normal file
@ -0,0 +1,31 @@
|
||||
import mongoose from 'mongoose';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentPrinterSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
tags: [{ type: String }],
|
||||
online: { type: Boolean, required: true, default: false },
|
||||
state: {
|
||||
type: { type: String, required: true, default: 'offline' },
|
||||
message: { type: String, required: false },
|
||||
percent: { type: Number, required: false },
|
||||
},
|
||||
connectedAt: { type: Date, default: null },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
documentPrinterSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
documentPrinterSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
export const documentPrinterModel = mongoose.model('documentPrinter', documentPrinterSchema);
|
||||
33
src/schemas/management/documentsize.schema.js
Normal file
33
src/schemas/management/documentsize.schema.js
Normal file
@ -0,0 +1,33 @@
|
||||
import mongoose from 'mongoose';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentSizeSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
width: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
height: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
documentSizeSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
documentSizeSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
export const documentSizeModel = mongoose.model('documentSize', documentSizeSchema);
|
||||
61
src/schemas/management/documenttemplate.schema.js
Normal file
61
src/schemas/management/documenttemplate.schema.js
Normal file
@ -0,0 +1,61 @@
|
||||
import mongoose from 'mongoose';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentTemplateSchema = new Schema(
|
||||
{
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
objectType: { type: String, required: false },
|
||||
tags: [{ type: String }],
|
||||
active: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: true,
|
||||
},
|
||||
global: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'documentTemplate',
|
||||
required: false,
|
||||
},
|
||||
documentSize: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'documentSize',
|
||||
required: true,
|
||||
},
|
||||
documentPrinters: [
|
||||
{
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'documentPrinter',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
content: {
|
||||
type: String,
|
||||
required: false,
|
||||
default: '<Container></Container>',
|
||||
},
|
||||
testObject: {
|
||||
type: Schema.Types.Mixed,
|
||||
required: false,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
documentTemplateSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
documentTemplateSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
export const documentTemplateModel = mongoose.model('documentTemplate', documentTemplateSchema);
|
||||
@ -1,10 +1,58 @@
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
// Define the device schema
|
||||
const deviceInfoSchema = new mongoose.Schema(
|
||||
{
|
||||
os: {
|
||||
platform: { type: String },
|
||||
type: { type: String },
|
||||
release: { type: String },
|
||||
arch: { type: String },
|
||||
hostname: { type: String },
|
||||
uptime: { type: Number },
|
||||
},
|
||||
cpu: {
|
||||
cores: { type: Number },
|
||||
model: { type: String },
|
||||
speedMHz: { type: Number },
|
||||
},
|
||||
memory: {
|
||||
totalGB: { type: String }, // stored as string from .toFixed(2), could also use Number
|
||||
freeGB: { type: String },
|
||||
},
|
||||
network: {
|
||||
type: mongoose.Schema.Types.Mixed, // since it's an object with dynamic interface names
|
||||
},
|
||||
user: {
|
||||
uid: { type: Number },
|
||||
gid: { type: Number },
|
||||
username: { type: String },
|
||||
homedir: { type: String },
|
||||
shell: { type: String },
|
||||
},
|
||||
process: {
|
||||
nodeVersion: { type: String },
|
||||
pid: { type: Number },
|
||||
cwd: { type: String },
|
||||
execPath: { type: String },
|
||||
},
|
||||
},
|
||||
{ _id: false }
|
||||
);
|
||||
|
||||
const hostSchema = new mongoose.Schema({
|
||||
online: { required: true, type: Boolean },
|
||||
hostId: { required: true, type: String },
|
||||
connectedAt: { required: true, type: Date },
|
||||
status: { type: { required: true, type: String } },
|
||||
name: { required: true, type: String },
|
||||
tags: [{ required: false, type: String }],
|
||||
online: { required: true, type: Boolean, default: false },
|
||||
state: {
|
||||
type: { type: String, required: true, default: 'offline' },
|
||||
message: { type: String, required: false },
|
||||
percent: { type: Number, required: false },
|
||||
},
|
||||
active: { required: true, type: Boolean, default: true },
|
||||
connectedAt: { required: false, type: Date },
|
||||
authCode: { type: { required: false, type: String } },
|
||||
deviceInfo: { deviceInfoSchema },
|
||||
});
|
||||
|
||||
hostSchema.virtual('id').get(function () {
|
||||
|
||||
@ -9,7 +9,8 @@ const partSchema = new Schema(
|
||||
product: { type: mongoose.Schema.Types.ObjectId, ref: 'product' },
|
||||
globalPricing: { type: Boolean, default: true },
|
||||
priceMode: { type: String, default: 'margin' },
|
||||
price: { type: Number, required: false },
|
||||
amount: { type: Number, required: false },
|
||||
margin: { type: Number, required: false },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
@ -8,9 +8,9 @@ const productSchema = new Schema(
|
||||
tags: [{ type: String }],
|
||||
version: { type: String },
|
||||
priceMode: { type: String, default: 'margin' },
|
||||
price: { type: Number, required: false },
|
||||
margin: { type: Number, required: false },
|
||||
amount: { type: Number, required: false },
|
||||
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
|
||||
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: 'part' }],
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
@ -4,6 +4,11 @@ const { Schema } = mongoose;
|
||||
const noteSchema = new mongoose.Schema({
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'parentType',
|
||||
required: true,
|
||||
},
|
||||
parentType: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
content: {
|
||||
|
||||
@ -1,29 +1,32 @@
|
||||
import mongoose from 'mongoose';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const jobSchema = new mongoose.Schema({
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
const jobSchema = new mongoose.Schema(
|
||||
{
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
},
|
||||
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
|
||||
createdAt: { required: true, type: Date },
|
||||
updatedAt: { required: true, type: Date },
|
||||
startedAt: { required: false, type: Date },
|
||||
finishedAt: { required: false, type: Date },
|
||||
gcodeFile: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'gcodeFile',
|
||||
required: false,
|
||||
},
|
||||
quantity: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 1,
|
||||
min: 1,
|
||||
},
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
|
||||
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
|
||||
},
|
||||
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
|
||||
createdAt: { required: true, type: Date },
|
||||
updatedAt: { required: true, type: Date },
|
||||
startedAt: { required: false, type: Date },
|
||||
finishedAt: { required: false, type: Date },
|
||||
gcodeFile: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'gcodeFile',
|
||||
required: false,
|
||||
},
|
||||
quantity: {
|
||||
type: Number,
|
||||
required: true,
|
||||
default: 1,
|
||||
min: 1,
|
||||
},
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
|
||||
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
|
||||
});
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
jobSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
|
||||
@ -44,6 +44,7 @@ const printerSchema = new Schema(
|
||||
currentFilamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock' },
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
|
||||
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', default: null },
|
||||
host: { type: Schema.Types.ObjectId, ref: 'host', default: null },
|
||||
alerts: [alertSchema],
|
||||
},
|
||||
{ timestamps: true }
|
||||
|
||||
@ -14,7 +14,7 @@ const subJobSchema = new mongoose.Schema({
|
||||
},
|
||||
subJobId: {
|
||||
type: String,
|
||||
required: true,
|
||||
required: false,
|
||||
},
|
||||
gcodeFile: {
|
||||
type: Schema.Types.ObjectId,
|
||||
|
||||
@ -4,7 +4,7 @@ import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { getAuditLogs } from '../../utils.js';
|
||||
import { distributeNew, flatternObjectIds, getAuditLogs, newAuditLog } from '../../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -84,20 +84,7 @@ export const getFilamentStockRouteHandler = async (req, res) => {
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('filament')
|
||||
.populate({
|
||||
path: 'stockEvents',
|
||||
populate: [
|
||||
{
|
||||
path: 'subJob',
|
||||
select: 'number',
|
||||
},
|
||||
{
|
||||
path: 'job',
|
||||
select: 'startedAt',
|
||||
},
|
||||
],
|
||||
});
|
||||
.populate('filament');
|
||||
|
||||
if (!filamentStock) {
|
||||
logger.warn(`Filament stock not found with supplied id.`);
|
||||
@ -115,48 +102,6 @@ export const getFilamentStockRouteHandler = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const editFilamentStockRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the filamentStock with the given remote address
|
||||
const filamentStock = await filamentStockModel.findOne({ _id: id });
|
||||
|
||||
if (!filamentStock) {
|
||||
// Error handling
|
||||
logger.warn(`Filament stock not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Filament stock not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
|
||||
const result = await filamentStockModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No filament stock updated.');
|
||||
res.status(500).send({ error: 'No filament stocks updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating filament stock:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching filament stock:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newFilamentStockRouteHandler = async (req, res) => {
|
||||
var filament = null;
|
||||
|
||||
@ -181,46 +126,61 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
||||
|
||||
try {
|
||||
logger.warn(req.body);
|
||||
const startingGrossWeight = req.body.startingGrossWeight;
|
||||
const startingWeight = req.body.startingWeight; // { net, gross }
|
||||
if (!startingWeight || typeof startingWeight.gross !== 'number') {
|
||||
return res.status(400).send({ error: 'startingWeight.gross is required' });
|
||||
}
|
||||
// Calculate net if not provided
|
||||
const net =
|
||||
typeof startingWeight.net === 'number'
|
||||
? startingWeight.net
|
||||
: startingWeight.gross - filament.emptySpoolWeight;
|
||||
const starting = {
|
||||
gross: startingWeight.gross,
|
||||
net: net,
|
||||
};
|
||||
const newFilamentStock = {
|
||||
startingGrossWeight: startingGrossWeight,
|
||||
startingNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
||||
currentGrossWeight: startingGrossWeight,
|
||||
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
||||
filament: req.body.filament._id,
|
||||
startingWeight: starting,
|
||||
currentWeight: { ...starting },
|
||||
filament: req.body.filament,
|
||||
state: {
|
||||
type: 'unconsumed',
|
||||
percent: 0,
|
||||
percent: '0', // schema requires string
|
||||
},
|
||||
};
|
||||
|
||||
const result = await filamentStockModel.create(newFilamentStock);
|
||||
if (result.nCreated === 0) {
|
||||
const result = await filamentStockModel.create(flatternObjectIds(newFilamentStock));
|
||||
|
||||
if (!result) {
|
||||
logger.error('No filament stock created.');
|
||||
return res.status(500).send({ error: 'No filament stock created.' });
|
||||
}
|
||||
|
||||
// Create initial stock event
|
||||
await newAuditLog(newFilamentStock, result._id, 'filamentStock', req.user);
|
||||
await distributeNew(result._id, 'filamentStock');
|
||||
|
||||
console.log(result);
|
||||
|
||||
// Create initial stock event (optional, but keep logic if needed)
|
||||
const stockEvent = {
|
||||
type: 'initial',
|
||||
value: startingGrossWeight - filament.emptySpoolWeight,
|
||||
value: starting.net,
|
||||
current: starting.net,
|
||||
unit: 'g',
|
||||
filamentStock: result._id,
|
||||
parent: result,
|
||||
parentType: 'filamentStock',
|
||||
owner: req.user,
|
||||
ownerType: 'user',
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const eventResult = await stockEventModel.create(stockEvent);
|
||||
const eventResult = await stockEventModel.create(flatternObjectIds(stockEvent));
|
||||
if (!eventResult) {
|
||||
logger.error('Failed to create initial stock event.');
|
||||
return res.status(500).send({ error: 'Failed to create initial stock event.' });
|
||||
}
|
||||
|
||||
// Update the filament stock with the stock event reference
|
||||
await filamentStockModel.updateOne(
|
||||
{ _id: result._id },
|
||||
{ $push: { stockEvents: eventResult._id } }
|
||||
);
|
||||
await newAuditLog(stockEvent, eventResult._id, 'stockEvent', req.user);
|
||||
|
||||
return res.send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
|
||||
@ -12,75 +12,43 @@ export const listStockEventsRouteHandler = async (
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
sort = '',
|
||||
sort = 'createdAt',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
let stockEvents;
|
||||
let aggregateCommand = [];
|
||||
const sortOrder = order === 'descend' ? 1 : -1;
|
||||
|
||||
// Lookup filamentStock
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'filamentstocks',
|
||||
localField: 'filamentStock',
|
||||
foreignField: '_id',
|
||||
as: 'filamentStock',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({ $unwind: '$filamentStock' });
|
||||
|
||||
// Conditionally lookup subJob only if it exists
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'subjobs',
|
||||
localField: 'subJob',
|
||||
foreignField: '_id',
|
||||
as: 'subJob',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$addFields: {
|
||||
subJob: {
|
||||
$cond: {
|
||||
if: { $eq: [{ $size: '$subJob' }, 0] },
|
||||
then: null,
|
||||
else: { $arrayElemAt: ['$subJob', 0] },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (filter != {}) {
|
||||
aggregateCommand.push({ $match: filter });
|
||||
if (!sort || sort != '') {
|
||||
sort = 'createdAt';
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } });
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||
// Translate parent._id to parent for Mongoose
|
||||
if (filter['parent._id']) {
|
||||
filter.parent = filter['parent._id'];
|
||||
delete filter['parent._id'];
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
// Translate owner._id to parent for Mongoose
|
||||
if (filter['owner._id']) {
|
||||
filter.owner = filter['owner._id'];
|
||||
delete filter['owner._id'];
|
||||
}
|
||||
|
||||
// Add pagination after sorting
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log('Aggregation pipeline:', JSON.stringify(aggregateCommand, null, 2));
|
||||
|
||||
stockEvents = await stockEventModel.aggregate(aggregateCommand);
|
||||
// Use find with population and filter
|
||||
let query = stockEventModel
|
||||
.find(filter)
|
||||
.sort({ [sort]: sortOrder })
|
||||
.skip(skip)
|
||||
.limit(Number(limit))
|
||||
.populate('owner', 'name _id')
|
||||
.populate('parent', 'name _id');
|
||||
|
||||
const stockEvents = await query;
|
||||
logger.trace(
|
||||
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||
`List of stock events (Page ${page}, Limit ${limit}, Sort ${sort}, Order ${order}):`,
|
||||
stockEvents
|
||||
);
|
||||
res.send(stockEvents);
|
||||
|
||||
@ -7,21 +7,56 @@ dotenv.config();
|
||||
const logger = log4js.getLogger('AuditLogs');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listAuditLogsRouteHandler = async (req, res, page = 1, limit = 25, filter = {}) => {
|
||||
export const listAuditLogsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
filter = {},
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
const sortOrder = order === 'descend' ? 1 : -1;
|
||||
|
||||
// Use find with population
|
||||
const auditLogs = await auditLogModel
|
||||
if (!sort || sort != '') {
|
||||
sort = 'createdAt';
|
||||
}
|
||||
// Translate parent._id to parent for Mongoose
|
||||
if (filter['parent._id']) {
|
||||
filter.parent = filter['parent._id'];
|
||||
delete filter['parent._id'];
|
||||
}
|
||||
|
||||
// Translate owner._id to parent for Mongoose
|
||||
if (filter['owner._id']) {
|
||||
filter.owner = filter['owner._id'];
|
||||
delete filter['owner._id'];
|
||||
}
|
||||
|
||||
console.log('sort: ', { [sort]: sortOrder });
|
||||
|
||||
// Use find with population and filter
|
||||
let query = auditLogModel
|
||||
.find(filter)
|
||||
.sort({ [sort]: sortOrder })
|
||||
.skip(skip)
|
||||
.limit(Number(limit))
|
||||
.sort({ createdAt: -1 })
|
||||
.populate('owner', 'name _id');
|
||||
|
||||
logger.trace(`List of audit logs (Page ${page}, Limit ${limit}):`, auditLogs);
|
||||
res.send(auditLogs);
|
||||
const auditLogs = await query;
|
||||
logger.trace(
|
||||
`List of audit logs (Page ${page}, Limit ${limit}, Sort ${sort}, Order ${order}):`,
|
||||
auditLogs
|
||||
);
|
||||
|
||||
const expandedIdAuditLogs = auditLogs.map((auditLog) => {
|
||||
const expendedAuditLog = { ...auditLog._doc, parent: { _id: auditLog.parent } };
|
||||
return expendedAuditLog;
|
||||
});
|
||||
res.send(expandedIdAuditLogs);
|
||||
} catch (error) {
|
||||
logger.error('Error listing audit logs:', error);
|
||||
res.status(500).send({ error: error });
|
||||
@ -39,7 +74,7 @@ export const getAuditLogRouteHandler = async (req, res) => {
|
||||
})
|
||||
.populate('printer')
|
||||
.populate('owner')
|
||||
.populate('target');
|
||||
.populate('parent');
|
||||
|
||||
if (!auditLog) {
|
||||
logger.warn(`Audit log not found with supplied id.`);
|
||||
|
||||
171
src/services/management/documentprinters.js
Normal file
171
src/services/management/documentprinters.js
Normal file
@ -0,0 +1,171 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { documentPrinterModel } from '../../schemas/management/documentprinter.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Document Templates');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listDocumentPrintersRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: documentPrinterModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['documentSize'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document templates.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`List of document templates (Page ${page}, Limit ${limit}). Count: ${result.length}`
|
||||
);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listDocumentPrintersByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: documentPrinterModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['documentSize'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document templates.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of document templates. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getDocumentPrinterRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: documentPrinterModel,
|
||||
id,
|
||||
populate: ['documentSize'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Document Template not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived document template with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editDocumentPrinterRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document Template with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
global: req.body.global,
|
||||
parent: req.body.parent,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: documentPrinterModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing document template:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited document template with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newDocumentPrinterRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
isGlobal: req.body.isGlobal,
|
||||
globalDocumentPrinter: req.body.globalDocumentPrinter,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentPrinterModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document template created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New document template with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteDocumentPrinterRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document Template with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: documentPrinterModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document template deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted document template with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
158
src/services/management/documentsizes.js
Normal file
158
src/services/management/documentsizes.js
Normal file
@ -0,0 +1,158 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Document Sizes');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listDocumentSizesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: documentSizeModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document sizes.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of document sizes (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listDocumentSizesByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: documentSizeModel,
|
||||
properties,
|
||||
filter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document sizes.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of document sizes. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getDocumentSizeRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: documentSizeModel,
|
||||
id,
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Document size not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived document size with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editDocumentSizeRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document size with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: documentSizeModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing document size:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited document size with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newDocumentSizeRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentSizeModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document size created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New document size with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteDocumentSizeRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document size with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: documentSizeModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document size deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted document size with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
186
src/services/management/documenttemplates.js
Normal file
186
src/services/management/documenttemplates.js
Normal file
@ -0,0 +1,186 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Document Templates');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listDocumentTemplatesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: documentTemplateModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: [
|
||||
{ path: 'documentSize' },
|
||||
{ path: 'parent' },
|
||||
{ path: 'documentPrinters', strictPopulate: false },
|
||||
],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document templates.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`List of document templates (Page ${page}, Limit ${limit}). Count: ${result.length}`
|
||||
);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listDocumentTemplatesByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {},
|
||||
masterFilter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: documentTemplateModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['documentSize'],
|
||||
masterFilter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing document templates.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of document templates. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getDocumentTemplateRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: documentTemplateModel,
|
||||
id,
|
||||
populate: [
|
||||
{ path: 'documentSize' },
|
||||
{ path: 'parent', strictPopulate: false },
|
||||
{ path: 'documentPrinters', strictPopulate: false },
|
||||
],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Document Template not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived document template with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editDocumentTemplateRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document Template with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
global: req.body.global,
|
||||
parent: req.body.parent,
|
||||
objectType: req.body.objectType,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
content: req.body.content,
|
||||
testObject: req.body.testObject,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: documentTemplateModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing document template:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited document template with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newDocumentTemplateRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
global: req.body.global,
|
||||
parent: req.body.parent,
|
||||
objectType: req.body.objectType,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
content: req.body.content,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentTemplateModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document template created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New document template with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteDocumentTemplateRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Document Template with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: documentTemplateModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No document template deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted document template with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
@ -2,7 +2,14 @@ import dotenv from 'dotenv';
|
||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import {
|
||||
newAuditLog,
|
||||
editAuditLog,
|
||||
distributeUpdate,
|
||||
flatternObjectIds,
|
||||
distributeNew,
|
||||
} from '../../utils.js';
|
||||
import { listObjectsByProperties } from '../../database/database.js';
|
||||
|
||||
dotenv.config();
|
||||
const logger = log4js.getLogger('Filaments');
|
||||
@ -14,7 +21,10 @@ export const listFilamentsRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {}
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
@ -23,6 +33,17 @@ export const listFilamentsRouteHandler = async (
|
||||
let filament;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'vendors', // The collection name (usually lowercase plural)
|
||||
@ -46,6 +67,12 @@ export const listFilamentsRouteHandler = async (
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
@ -64,6 +91,29 @@ export const listFilamentsRouteHandler = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const listFilamentsByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = [],
|
||||
filter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: filamentModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: 'vendor',
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing filaments.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of vendors. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getFilamentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
@ -82,9 +132,7 @@ export const getFilamentRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...filament._doc, auditLogs: auditLogs });
|
||||
res.send({ ...filament._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Filament:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -114,26 +162,32 @@ export const editFilamentRouteHandler = async (req, res) => {
|
||||
url: req.body.url,
|
||||
image: req.body.image,
|
||||
color: req.body.color,
|
||||
vendor: req.body.vendor._id,
|
||||
vendor: req.body.vendor,
|
||||
type: req.body.type,
|
||||
price: req.body.price,
|
||||
cost: req.body.cost,
|
||||
diameter: req.body.diameter,
|
||||
density: req.body.density,
|
||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(filament.toObject(), updateData, id, 'filament', req.user._id, 'user');
|
||||
await editAuditLog(filament.toObject(), updateData, id, 'filament', req.user);
|
||||
|
||||
const result = await filamentModel.updateOne({ _id: id }, { $set: updateData });
|
||||
const result = await filamentModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: flatternObjectIds(updateData) }
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No Filament updated.');
|
||||
return res.status(500).send({ error: 'No filaments updated.' });
|
||||
}
|
||||
|
||||
await distributeUpdate(updateData, id, 'filament');
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating filament:', updateError);
|
||||
return res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
|
||||
return res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching filament:', fetchError);
|
||||
@ -151,7 +205,7 @@ export const newFilamentRouteHandler = async (req, res) => {
|
||||
url: req.body.url,
|
||||
image: req.body.image,
|
||||
color: req.body.color,
|
||||
vendor: req.body.vendor._id,
|
||||
vendor: req.body.vendor,
|
||||
type: req.body.type,
|
||||
cost: req.body.cost,
|
||||
diameter: req.body.diameter,
|
||||
@ -159,7 +213,7 @@ export const newFilamentRouteHandler = async (req, res) => {
|
||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||
};
|
||||
|
||||
const result = await filamentModel.create(newFilament);
|
||||
const result = await filamentModel.create(flatternObjectIds(newFilament));
|
||||
|
||||
if (result.nCreated === 0) {
|
||||
logger.error('No filament created.');
|
||||
@ -167,7 +221,8 @@ export const newFilamentRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
// Create audit log for new filament
|
||||
await newAuditLog({}, newFilament, result._id, 'filament', req.user._id, 'user');
|
||||
await newAuditLog(newFilament, result._id, 'filament', req.user);
|
||||
await distributeNew(result._id, 'filament');
|
||||
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
|
||||
151
src/services/management/hosts.js
Normal file
151
src/services/management/hosts.js
Normal file
@ -0,0 +1,151 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Hosts');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listHostsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: hostModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing hosts.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of hosts (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listHostsByPropertiesRouteHandler = async (req, res, properties = '', filter = {}) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: hostModel,
|
||||
properties,
|
||||
filter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing hosts.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of hosts. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getHostRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: hostModel,
|
||||
id,
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Host not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived host with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editHostRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Host with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
active: req.body.active,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: hostModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing host:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited host with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newHostRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
active: req.body.active,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: hostModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No host created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New host with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteHostRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Host with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: hostModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No host deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted host with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
@ -2,7 +2,7 @@ import dotenv from 'dotenv';
|
||||
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import { distributeUpdate, newAuditLog, editAuditLog, distributeNew } from '../../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -15,14 +15,29 @@ export const listNoteTypesRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {}
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
const skip = (page - 1) * limit;
|
||||
let noteTypes;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (Object.keys(filter).length > 0) {
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
@ -31,6 +46,12 @@ export const listNoteTypesRouteHandler = async (
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
@ -63,9 +84,7 @@ export const getNoteTypeRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`Note type with ID: ${id}:`, noteType);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...noteType._doc, auditLogs: auditLogs });
|
||||
res.send({ ...noteType._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching note type:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -93,7 +112,8 @@ export const editNoteTypeRouteHandler = async (req, res) => {
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(noteType.toObject(), updateData, id, 'noteType', req.user._id, 'user');
|
||||
await editAuditLog(noteType.toObject(), updateData, id, 'noteType', req.user._id, 'user');
|
||||
await distributeUpdate(updateData, id, 'noteType');
|
||||
|
||||
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
@ -123,7 +143,8 @@ export const newNoteTypeRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
// Create audit log for new note type
|
||||
await newAuditLog({}, newNoteType, result._id, 'noteType', req.user._id, 'user');
|
||||
await newAuditLog(newNoteType, result._id, 'noteType', req.user);
|
||||
await distributeNew(result._id, 'filament');
|
||||
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
|
||||
@ -5,7 +5,7 @@ import mongoose from 'mongoose';
|
||||
import multer from 'multer';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import { distributeNew, distributeUpdate, editAuditLog, newAuditLog } from '../../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -68,22 +68,6 @@ export const listPartsRouteHandler = async (
|
||||
let part;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
logger.error(property);
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
@ -101,6 +85,7 @@ export const listPartsRouteHandler = async (
|
||||
aggregateCommand.push({
|
||||
$project: {
|
||||
name: 1,
|
||||
globalPricing: 1,
|
||||
_id: 1,
|
||||
createdAt: 1,
|
||||
updatedAt: 1,
|
||||
@ -110,6 +95,22 @@ export const listPartsRouteHandler = async (
|
||||
});
|
||||
}
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
@ -152,9 +153,7 @@ export const getPartRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...part._doc, auditLogs: auditLogs });
|
||||
res.send({ ...part._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Part:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -180,7 +179,8 @@ export const editPartRouteHandler = async (req, res) => {
|
||||
const updateData = req.body;
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'user');
|
||||
await editAuditLog(part.toObject(), updateData, id, 'part', req.user._id, 'user');
|
||||
await distributeUpdate(updateData, id, 'part');
|
||||
|
||||
const result = await partModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
@ -218,9 +218,9 @@ export const newPartRouteHandler = async (req, res) => {
|
||||
|
||||
// Create audit logs for each new part
|
||||
for (const result of results) {
|
||||
await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'user');
|
||||
await newAuditLog(result.toObject(), result._id, 'part', req.user);
|
||||
}
|
||||
|
||||
await distributeNew(null, 'part');
|
||||
return res.status(200).send(results);
|
||||
} else {
|
||||
// Handle single part
|
||||
@ -234,8 +234,8 @@ export const newPartRouteHandler = async (req, res) => {
|
||||
const result = await partModel.create(newPart);
|
||||
|
||||
// Create audit log for new part
|
||||
await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'user');
|
||||
|
||||
await newAuditLog(newPart, result._id, 'part', req.user);
|
||||
await distributeNew(result._id, 'part');
|
||||
return res.status(200).send(result);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@ -3,7 +3,13 @@ import { productModel } from '../../schemas/management/product.schema.js';
|
||||
import { partModel } from '../../schemas/management/part.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import {
|
||||
editAuditLog,
|
||||
flatternObjectIds,
|
||||
distributeUpdate,
|
||||
newAuditLog,
|
||||
distributeNew,
|
||||
} from '../../utils.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Products');
|
||||
@ -44,6 +50,27 @@ export const listProductsRouteHandler = async (
|
||||
});
|
||||
aggregateCommand.push({ $project: { _id: 1, [property]: 1 } });
|
||||
} else {
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'vendors', // The name of the Filament collection
|
||||
localField: 'vendor',
|
||||
foreignField: '_id',
|
||||
as: 'vendor',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$unwind: {
|
||||
path: '$vendor',
|
||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching vendor
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$addFields: {
|
||||
vendor: '$vendor',
|
||||
},
|
||||
});
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
@ -71,8 +98,7 @@ export const getProductRouteHandler = async (req, res) => {
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('vendor')
|
||||
.populate('parts');
|
||||
.populate('vendor');
|
||||
|
||||
if (!product) {
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
@ -81,9 +107,7 @@ export const getProductRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...product._doc, auditLogs: auditLogs });
|
||||
res.send({ ...product._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Product:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -120,18 +144,20 @@ export const editProductRouteHandler = async (req, res) => {
|
||||
version: req.body?.version,
|
||||
parts: req.body?.parts,
|
||||
margin: req.body.margin,
|
||||
price: req.body.price,
|
||||
marginOrPrice: req.body.marginOrPrice,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(product.toObject(), updateData, id, 'product', req.user._id, 'user');
|
||||
await editAuditLog(product.toObject(), updateData, id, 'product', req.user);
|
||||
|
||||
const result = await productModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No Product updated.');
|
||||
res.status(500).send({ error: 'No products updated.' });
|
||||
}
|
||||
|
||||
await distributeUpdate(updateData, id, 'product');
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating product:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
@ -145,25 +171,21 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
vendor: req.body.vendor.id,
|
||||
vendor: req.body.vendor,
|
||||
parts: partIds,
|
||||
margin: req.body.margin,
|
||||
price: req.body.price,
|
||||
marginOrPrice: req.body.marginOrPrice,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
};
|
||||
|
||||
const newProductResult = await productModel.create(newProduct);
|
||||
const newProductResult = await productModel.create(flatternObjectIds(newProduct));
|
||||
|
||||
if (newProductResult.nCreated === 0) {
|
||||
logger.error('No product created.');
|
||||
res.status(500).send({ error: 'No product created.' });
|
||||
}
|
||||
|
||||
// Create audit log for new product
|
||||
await newAuditLog({}, newProduct, newProductResult._id, 'product', req.user._id, 'user');
|
||||
|
||||
const parts = req.body.parts || [];
|
||||
const productId = newProductResult._id;
|
||||
|
||||
var partIds = [];
|
||||
|
||||
@ -172,10 +194,10 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
name: part.name,
|
||||
product: productId,
|
||||
product: { _id: newProductResult._id },
|
||||
};
|
||||
|
||||
const newPartResult = await partModel.create(newPart);
|
||||
const newPartResult = await partModel.create(flatternObjectIds(newPart));
|
||||
if (newPartResult.nCreated === 0) {
|
||||
logger.error('No parts created.');
|
||||
res.status(500).send({ error: 'No parts created.' });
|
||||
@ -183,20 +205,14 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
partIds.push(newPartResult._id);
|
||||
|
||||
// Create audit log for each new part
|
||||
await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'user');
|
||||
await newAuditLog(newPart, newPartResult._id, 'part', req.user);
|
||||
}
|
||||
|
||||
const editProductResult = await productModel.updateOne(
|
||||
{ _id: productId },
|
||||
{ $set: { parts: partIds } }
|
||||
);
|
||||
// Create audit log for new product
|
||||
await newAuditLog(newProduct, newProductResult._id, 'product', req.user);
|
||||
await distributeNew(newProductResult._id, 'product');
|
||||
|
||||
if (editProductResult.nModified === 0) {
|
||||
logger.error('No product updated.');
|
||||
res.status(500).send({ error: 'No products updated.' });
|
||||
}
|
||||
|
||||
res.status(200).send({ ...newProductResult, parts: partIds });
|
||||
res.status(200).send({ ...newProductResult });
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating product:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
|
||||
@ -2,8 +2,7 @@ import dotenv from 'dotenv';
|
||||
import { userModel } from '../../schemas/management/user.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog } from '../../utils.js';
|
||||
import { getAuditLogs } from '../../utils.js';
|
||||
import { distributeUpdate, editAuditLog } from '../../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -16,7 +15,10 @@ export const listUsersRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {}
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
@ -25,6 +27,17 @@ export const listUsersRouteHandler = async (
|
||||
let user;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
@ -35,6 +48,12 @@ export const listUsersRouteHandler = async (
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
@ -66,9 +85,7 @@ export const getUserRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`User with ID: ${id}:`, user);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...user._doc, auditLogs: auditLogs });
|
||||
res.send({ ...user._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching User:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
@ -103,13 +120,15 @@ export const editUserRouteHandler = async (req, res) => {
|
||||
console.log(req.user);
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(user.toObject(), updateData, id, 'user', req.user._id, 'user');
|
||||
await editAuditLog(user.toObject(), updateData, id, 'user', req.user);
|
||||
|
||||
const result = await userModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No User updated.');
|
||||
res.status(500).send({ error: 'No users updated.' });
|
||||
}
|
||||
|
||||
await distributeUpdate(updateData, id, 'user');
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating user:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
|
||||
@ -2,8 +2,14 @@ import dotenv from 'dotenv';
|
||||
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog } from '../../utils.js';
|
||||
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Vendors');
|
||||
@ -15,142 +21,144 @@ export const listVendorsRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: vendorModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing vendors.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of vendors (Page ${page}, Limit ${limit}). Count: ${result.length}.`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listVendorsByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
const result = await listObjectsByProperties({
|
||||
model: vendorModel,
|
||||
properties,
|
||||
filter,
|
||||
});
|
||||
|
||||
let vendor;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
vendor = await vendorModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`, vendor);
|
||||
res.send(vendor);
|
||||
} catch (error) {
|
||||
logger.error('Error listing vendors:', error);
|
||||
res.status(500).send({ error: error });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing vendors.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of vendors. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the vendor with the given remote address
|
||||
const vendor = await vendorModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!vendor) {
|
||||
logger.warn(`Vendor not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||
|
||||
const auditLogs = await auditLogModel
|
||||
.find({
|
||||
target: id,
|
||||
})
|
||||
.populate('owner');
|
||||
|
||||
res.send({ ...vendor._doc, auditLogs: auditLogs });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Vendor:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: vendorModel,
|
||||
id,
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Vendor not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived vendor with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the vendor with the given remote address
|
||||
const vendor = await vendorModel.findOne({ _id: id });
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
if (!vendor) {
|
||||
// Error handling
|
||||
logger.warn(`Vendor not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
logger.trace(`Vendor with ID: ${id}`);
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: vendorModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
|
||||
console.log(req.user);
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(vendor.toObject(), updateData, id, 'vendor', req.user._id, 'user');
|
||||
|
||||
const result = await vendorModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No Vendor updated.');
|
||||
res.status(500).send({ error: 'No vendors updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating vendor:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching vendor:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
if (result.error) {
|
||||
logger.error('Error editing vendor:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited vendor with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newVendor } = req.body;
|
||||
newVendor = { ...newVendor, createdAt: new Date(), updatedAt: new Date() };
|
||||
|
||||
const result = await vendorModel.create(newVendor);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error('No vendor created.');
|
||||
res.status(500).send({ error: 'No vendor created.' });
|
||||
}
|
||||
|
||||
// Create audit log for new vendor
|
||||
await newAuditLog(
|
||||
{},
|
||||
newVendor,
|
||||
result._id,
|
||||
'vendor',
|
||||
req.user.id, // Assuming user ID is available in req.user
|
||||
'user'
|
||||
);
|
||||
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating vendor:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: vendorModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No vendor created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New vendor with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteVendorRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: vendorModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No vendor deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted vendor with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
24
src/services/misc/applaunch.html
Normal file
24
src/services/misc/applaunch.html
Normal file
@ -0,0 +1,24 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>Redirecting...</title>
|
||||
<script>
|
||||
window.onload = function () {
|
||||
window.location.href = '__REDIRECT_URI__';
|
||||
// Listen for blur (user sees the prompt)
|
||||
window.addEventListener('blur', function onBlur() {
|
||||
// When the window regains focus, close it
|
||||
window.addEventListener('focus', function onFocus() {
|
||||
window.removeEventListener('focus', onFocus);
|
||||
window.close();
|
||||
});
|
||||
window.removeEventListener('blur', onBlur);
|
||||
});
|
||||
};
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<p>Redirecting to the app...</p>
|
||||
</body>
|
||||
</html>
|
||||
@ -3,19 +3,90 @@ import { keycloak } from '../../keycloak.js';
|
||||
import log4js from 'log4js';
|
||||
import axios from 'axios';
|
||||
import { userModel } from '../../schemas/management/user.schema.js';
|
||||
import { readFileSync } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import NodeCache from 'node-cache';
|
||||
import jwt from 'jsonwebtoken';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Auth');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Initialize NodeCache with 5-minute TTL for token-based user lookup
|
||||
const tokenUserCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
||||
|
||||
// Cache event listeners for monitoring
|
||||
tokenUserCache.on('expired', (key, value) => {
|
||||
logger.debug(`Token user cache entry expired: ${key.substring(0, 20)}...`);
|
||||
});
|
||||
|
||||
tokenUserCache.on('flush', () => {
|
||||
logger.info('Token user cache flushed');
|
||||
});
|
||||
|
||||
const loginTokenRequests = new Map();
|
||||
|
||||
// Token-based user lookup function with caching
|
||||
const lookupUserByToken = async (token) => {
|
||||
try {
|
||||
// Check cache first
|
||||
const cachedUser = tokenUserCache.get(token);
|
||||
if (cachedUser) {
|
||||
logger.debug(`User found in token cache for token: ${token.substring(0, 20)}...`);
|
||||
return cachedUser;
|
||||
}
|
||||
|
||||
// If not in cache, decode token and lookup user
|
||||
logger.debug(`User not in token cache, decoding token: ${token.substring(0, 20)}...`);
|
||||
const decodedToken = jwt.decode(token);
|
||||
|
||||
if (!decodedToken || !decodedToken.preferred_username) {
|
||||
logger.warn('Invalid token or missing preferred_username');
|
||||
return null;
|
||||
}
|
||||
|
||||
// Query database for user
|
||||
const user = await userModel.findOne({ username: decodedToken.preferred_username });
|
||||
|
||||
if (user) {
|
||||
// Store in cache using token as key
|
||||
tokenUserCache.set(token, user);
|
||||
logger.debug(`User stored in token cache for token: ${token.substring(0, 20)}...`);
|
||||
return user;
|
||||
}
|
||||
|
||||
logger.warn(`User not found in database for username: ${decodedToken.preferred_username}`);
|
||||
return null;
|
||||
} catch (error) {
|
||||
logger.error(`Error looking up user by token:`, error.message);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Cache management utility functions
|
||||
const clearTokenUserCache = () => {
|
||||
tokenUserCache.flushAll();
|
||||
logger.info('Token user cache cleared');
|
||||
};
|
||||
|
||||
const getTokenUserCacheStats = () => {
|
||||
return tokenUserCache.getStats();
|
||||
};
|
||||
|
||||
const removeUserFromTokenCache = (token) => {
|
||||
tokenUserCache.del(token);
|
||||
logger.debug(`User removed from token cache for token: ${token.substring(0, 20)}...`);
|
||||
};
|
||||
|
||||
// Login handler
|
||||
export const loginRouteHandler = (req, res) => {
|
||||
export const loginRouteHandler = (req, res, isApp = false) => {
|
||||
// Get the redirect URL from form data or default to production overview
|
||||
const redirectUrl = req.query.redirect_uri || '/production/overview';
|
||||
|
||||
// Store the original URL to redirect after login
|
||||
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
|
||||
const callbackUrl = encodeURIComponent(`${process.env.APP_URL_API}/auth/callback`);
|
||||
const callBackState = isApp ? '/auth/app/callback' : '/auth/callback';
|
||||
const callbackUrl = encodeURIComponent(`${process.env.APP_URL_API}${callBackState}`);
|
||||
const state = encodeURIComponent(redirectUrl);
|
||||
|
||||
logger.warn(req.query.redirect_uri);
|
||||
@ -44,8 +115,6 @@ const fetchAndStoreUser = async (req, token) => {
|
||||
);
|
||||
|
||||
const userInfo = {
|
||||
access_token: token.access_token,
|
||||
expires_at: token.expires_at,
|
||||
roles: token.realm_access?.roles || [],
|
||||
username: response.data.preferred_username,
|
||||
email: response.data.email,
|
||||
@ -68,8 +137,66 @@ const fetchAndStoreUser = async (req, token) => {
|
||||
}
|
||||
};
|
||||
|
||||
// Function to exchange authorization code for tokens, fetch user, and set session
|
||||
export const loginTokenRouteHandler = async (req, res, isApp = false) => {
|
||||
const code = req.query.code;
|
||||
if (!code) {
|
||||
return res.status(400).json({ error: 'Authorization code missing' });
|
||||
}
|
||||
|
||||
try {
|
||||
// If a request for this code is already in progress, wait for it
|
||||
if (loginTokenRequests.has(code)) {
|
||||
const tokenData = await loginTokenRequests.get(code);
|
||||
return res.status(200).json(tokenData);
|
||||
}
|
||||
|
||||
// Otherwise, start the request and store the promise
|
||||
const tokenPromise = (async () => {
|
||||
const callBackState = isApp ? '/auth/app/callback' : '/auth/callback';
|
||||
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
|
||||
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||
|
||||
const response = await axios.post(
|
||||
tokenUrl,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
code: code,
|
||||
redirect_uri: callbackUrl,
|
||||
}).toString(),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
}
|
||||
);
|
||||
const tokenData = {
|
||||
access_token: response.data.access_token,
|
||||
refresh_token: response.data.refresh_token,
|
||||
id_token: response.data.id_token,
|
||||
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
||||
};
|
||||
|
||||
req.session['keycloak-token'] = tokenData;
|
||||
// Fetch and store user data, set session
|
||||
const userData = await fetchAndStoreUser(req, tokenData);
|
||||
const userAndTokenData = { ...tokenData, ...userData };
|
||||
|
||||
return userAndTokenData;
|
||||
})();
|
||||
|
||||
loginTokenRequests.set(code, tokenPromise);
|
||||
const userAndTokenData = await tokenPromise;
|
||||
res.status(200).json(userAndTokenData);
|
||||
} catch (err) {
|
||||
res.status(500).json({ error: err.message });
|
||||
}
|
||||
};
|
||||
|
||||
// Login callback handler
|
||||
export const loginCallbackRouteHandler = (req, res) => {
|
||||
export const loginCallbackRouteHandler = async (req, res, isApp = false) => {
|
||||
// Don't use keycloak.protect() here as it expects an already authenticated session
|
||||
|
||||
// Extract the code and state from the query parameters
|
||||
@ -80,54 +207,36 @@ export const loginCallbackRouteHandler = (req, res) => {
|
||||
return res.status(400).send('Authorization code missing');
|
||||
}
|
||||
|
||||
// Exchange the code for tokens manually
|
||||
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||
const redirectUri = `${process.env.APP_URL_API || 'http://localhost:8080'}/auth/callback`;
|
||||
|
||||
// Make a POST request to exchange the code for tokens
|
||||
axios
|
||||
.post(
|
||||
tokenUrl,
|
||||
new URLSearchParams({
|
||||
grant_type: 'authorization_code',
|
||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
code: code,
|
||||
redirect_uri: redirectUri,
|
||||
}).toString(),
|
||||
{
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
},
|
||||
}
|
||||
)
|
||||
.then(async (response) => {
|
||||
// Store tokens in session
|
||||
const tokenData = {
|
||||
access_token: response.data.access_token,
|
||||
refresh_token: response.data.refresh_token,
|
||||
id_token: response.data.id_token,
|
||||
expires_at: new Date().getTime() + response.data.expires_in * 1000,
|
||||
};
|
||||
req.session['keycloak-token'] = tokenData;
|
||||
|
||||
try {
|
||||
// Fetch and store user data
|
||||
await fetchAndStoreUser(req, tokenData);
|
||||
|
||||
// Save session and redirect to the original URL
|
||||
req.session.save(() => {
|
||||
res.redirect((process.env.APP_URL_CLIENT || 'http://localhost:3000') + state);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error during user setup:', error);
|
||||
res.status(500).send('Error setting up user session');
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error('Token exchange error:', error.response?.data || error.message);
|
||||
res.status(500).send('Authentication failed');
|
||||
});
|
||||
const appUrl = isApp
|
||||
? 'farmcontrol://app'
|
||||
: process.env.APP_URL_CLIENT || 'http://localhost:3000';
|
||||
const redirectUriRaw = `${appUrl}${state}`;
|
||||
let redirectUri;
|
||||
try {
|
||||
// Try to parse as a URL (works for http/https)
|
||||
const url = new URL(redirectUriRaw);
|
||||
url.searchParams.set('authCode', code);
|
||||
redirectUri = url.toString();
|
||||
} catch (e) {
|
||||
// Fallback for custom schemes (e.g., farmcontrol://app)
|
||||
if (redirectUriRaw.includes('?')) {
|
||||
redirectUri = `${redirectUriRaw}&authCode=${encodeURIComponent(code)}`;
|
||||
} else {
|
||||
redirectUri = `${redirectUriRaw}?authCode=${encodeURIComponent(code)}`;
|
||||
}
|
||||
}
|
||||
// Save session and redirect to the original URL
|
||||
req.session.save(async () => {
|
||||
if (isApp) {
|
||||
// Read HTML template and inject redirectUri
|
||||
const templatePath = resolve(process.cwd(), 'src/services/misc/applaunch.html');
|
||||
let html = readFileSync(templatePath, 'utf8');
|
||||
html = html.replace('__REDIRECT_URI__', redirectUri);
|
||||
res.send(html);
|
||||
} else {
|
||||
res.redirect(redirectUri);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// Function to create or update user
|
||||
@ -318,8 +427,27 @@ export const refreshTokenRouteHandler = (req, res) => {
|
||||
});
|
||||
};
|
||||
|
||||
// Middleware to populate req.user from session
|
||||
export const populateUserMiddleware = (req, res, next) => {
|
||||
// Middleware to populate req.user from session or token
|
||||
export const populateUserMiddleware = async (req, res, next) => {
|
||||
const authHeader = req.headers.authorization || req.headers.Authorization;
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
const token = authHeader.substring(7);
|
||||
|
||||
try {
|
||||
// Use token-based cache to lookup user
|
||||
const user = await lookupUserByToken(token);
|
||||
if (user) {
|
||||
req.user = user;
|
||||
// Also set session user for compatibility
|
||||
req.session.user = user;
|
||||
return next();
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in token-based user lookup:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to session-based authentication
|
||||
if (req.session && req.session.user) {
|
||||
req.user = req.session.user;
|
||||
} else {
|
||||
@ -328,6 +456,9 @@ export const populateUserMiddleware = (req, res, next) => {
|
||||
next();
|
||||
};
|
||||
|
||||
// Export cache management functions
|
||||
export { lookupUserByToken, clearTokenUserCache, getTokenUserCacheStats, removeUserFromTokenCache };
|
||||
|
||||
// Example of how to set up your routes in Express
|
||||
/*
|
||||
import express from "express";
|
||||
|
||||
@ -2,6 +2,13 @@ import dotenv from 'dotenv';
|
||||
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteAuditLog,
|
||||
editAuditLog,
|
||||
expandObjectIds,
|
||||
flatternObjectIds,
|
||||
newAuditLog,
|
||||
} from '../../utils.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -118,6 +125,8 @@ export const editNoteRouteHandler = async (req, res) => {
|
||||
logger.error('No note updated.');
|
||||
res.status(500).send({ error: 'No notes updated.' });
|
||||
}
|
||||
|
||||
await editAuditLog(note.toObject(), updateData, id, 'note', req.user);
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating note:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
@ -132,14 +141,16 @@ export const editNoteRouteHandler = async (req, res) => {
|
||||
export const newNoteRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newNote } = req.body;
|
||||
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user._id };
|
||||
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user };
|
||||
|
||||
const result = await noteModel.create(newNote);
|
||||
const result = await noteModel.create(flatternObjectIds(newNote));
|
||||
if (result.nCreated === 0) {
|
||||
logger.error('No note created.');
|
||||
res.status(500).send({ error: 'No note created.' });
|
||||
}
|
||||
|
||||
await newAuditLog(expandObjectIds(newNote), result._id, 'note', req.user);
|
||||
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
logger.error('Error creating note:', updateError);
|
||||
@ -166,7 +177,7 @@ export const deleteNoteRouteHandler = async (req, res) => {
|
||||
logger.trace(`Deleting note with ID: ${id} and all its children`);
|
||||
|
||||
// Recursively find and delete all child notes
|
||||
const deletedNoteIds = await recursivelyDeleteNotes(id);
|
||||
const deletedNoteIds = await recursivelyDeleteNotes(id, req.user);
|
||||
|
||||
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
|
||||
res.send({
|
||||
@ -181,7 +192,7 @@ export const deleteNoteRouteHandler = async (req, res) => {
|
||||
};
|
||||
|
||||
// Helper function to recursively delete notes and their children
|
||||
const recursivelyDeleteNotes = async (noteId) => {
|
||||
const recursivelyDeleteNotes = async (noteId, user) => {
|
||||
const deletedIds = [];
|
||||
|
||||
// Find all notes that have this note as their parent
|
||||
@ -189,11 +200,16 @@ const recursivelyDeleteNotes = async (noteId) => {
|
||||
|
||||
// Recursively delete all children first
|
||||
for (const childNote of childNotes) {
|
||||
const childDeletedIds = await recursivelyDeleteNotes(childNote._id);
|
||||
const childDeletedIds = await recursivelyDeleteNotes(childNote._id, user);
|
||||
deletedIds.push(...childDeletedIds);
|
||||
}
|
||||
|
||||
// Delete the current note
|
||||
|
||||
const note = await noteModel.findOne({ _id: noteId }).populate('user').populate('parent');
|
||||
|
||||
await deleteAuditLog(expandObjectIds(note.toObject()), noteId, 'note', user);
|
||||
|
||||
await noteModel.deleteOne({ _id: noteId });
|
||||
deletedIds.push(noteId);
|
||||
|
||||
|
||||
@ -17,6 +17,9 @@ import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||
import { userModel } from '../../schemas/management/user.schema.js';
|
||||
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Jobs');
|
||||
@ -26,22 +29,24 @@ logger.level = process.env.LOG_LEVEL;
|
||||
const PREFIX_MODEL_MAP = {
|
||||
PRN: { model: printerModel, idField: '_id', type: 'printer' },
|
||||
FIL: { model: filamentModel, idField: '_id', type: 'filament' },
|
||||
SPL: { model: null, idField: '_id', type: 'spool' }, // No spool model found
|
||||
GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodefile' },
|
||||
GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodeFile' },
|
||||
JOB: { model: jobModel, idField: '_id', type: 'job' },
|
||||
PRT: { model: partModel, idField: '_id', type: 'part' },
|
||||
PRD: { model: productModel, idField: '_id', type: 'product' },
|
||||
VEN: { model: vendorModel, idField: '_id', type: 'vendor' },
|
||||
SJB: { model: subJobModel, idField: '_id', type: 'subjob' },
|
||||
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentstock' },
|
||||
SEV: { model: stockEventModel, idField: '_id', type: 'stockevent' },
|
||||
SAU: { model: stockAuditModel, idField: '_id', type: 'stockaudit' },
|
||||
PTS: { model: partStockModel, idField: '_id', type: 'partstock' },
|
||||
PDS: { model: null, idField: '_id', type: 'productstock' }, // No productStockModel found
|
||||
ADL: { model: auditLogModel, idField: '_id', type: 'auditlog' },
|
||||
SJB: { model: subJobModel, idField: '_id', type: 'subJob' },
|
||||
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentStock' },
|
||||
SEV: { model: stockEventModel, idField: '_id', type: 'stockEvent' },
|
||||
SAU: { model: stockAuditModel, idField: '_id', type: 'stockAudit' },
|
||||
PTS: { model: partStockModel, idField: '_id', type: 'partStock' },
|
||||
PDS: { model: null, idField: '_id', type: 'productStock' }, // No productStockModel found
|
||||
ADL: { model: auditLogModel, idField: '_id', type: 'auditLog' },
|
||||
USR: { model: userModel, idField: '_id', type: 'user' },
|
||||
NTY: { model: noteTypeModel, idField: '_id', type: 'notetype' },
|
||||
NTY: { model: noteTypeModel, idField: '_id', type: 'noteType' },
|
||||
NTE: { model: noteModel, idField: '_id', type: 'note' },
|
||||
DSZ: { model: documentSizeModel, idField: '_id', type: 'documentSize' },
|
||||
DTP: { model: documentTemplateModel, idField: '_id', type: 'documentTemplate' },
|
||||
HST: { model: hostModel, idField: '_id', type: 'host' },
|
||||
};
|
||||
|
||||
// Helper function to build search filter from query parameters
|
||||
@ -75,7 +80,7 @@ const buildSearchFilter = (params) => {
|
||||
return filter;
|
||||
};
|
||||
|
||||
const trimSpotlightObject = (object) => {
|
||||
const trimSpotlightObject = (object, objectType) => {
|
||||
return {
|
||||
_id: object._id,
|
||||
name: object.name || undefined,
|
||||
@ -84,6 +89,7 @@ const trimSpotlightObject = (object) => {
|
||||
email: object.email || undefined,
|
||||
color: object.color || undefined,
|
||||
updatedAt: object.updatedAt || undefined,
|
||||
objectType: objectType || undefined,
|
||||
};
|
||||
};
|
||||
|
||||
@ -105,11 +111,11 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||
return;
|
||||
}
|
||||
const { model, idField } = prefixEntry;
|
||||
const { model, idField, type } = prefixEntry;
|
||||
|
||||
// Validate ObjectId if the idField is '_id'
|
||||
if (idField === '_id' && !mongoose.Types.ObjectId.isValid(suffix)) {
|
||||
res.status(404).send({ error: `${prefix} not found` });
|
||||
res.status(200).send([]);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -118,11 +124,11 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
queryObj[idField] = suffix.toLowerCase();
|
||||
let doc = await model.findOne(queryObj).lean();
|
||||
if (!doc) {
|
||||
res.status(404).send({ error: `${prefix} not found` });
|
||||
res.status(200).send([]);
|
||||
return;
|
||||
}
|
||||
// Build the response with only the required fields
|
||||
const response = trimSpotlightObject(doc);
|
||||
const response = trimSpotlightObject(doc, type);
|
||||
res.status(200).send(response);
|
||||
return;
|
||||
}
|
||||
@ -136,7 +142,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||
return;
|
||||
}
|
||||
const { model } = prefixEntry;
|
||||
const { model, type } = prefixEntry;
|
||||
|
||||
// Use req.query for search parameters
|
||||
|
||||
@ -153,11 +159,50 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
|
||||
|
||||
// Format response
|
||||
const response = docs.map((doc) => trimSpotlightObject(doc));
|
||||
const response = docs.map((doc) => trimSpotlightObject(doc, type));
|
||||
|
||||
res.status(200).send(response);
|
||||
return;
|
||||
}
|
||||
|
||||
// If no query params and no prefix, search all models
|
||||
if (Object.keys(queryParams).length === 0 && (!prefix || !PREFIX_MODEL_MAP[prefix])) {
|
||||
// Search all models for the query string in the 'name' field
|
||||
const searchTerm = query;
|
||||
if (!searchTerm || searchTerm.length < 3) {
|
||||
res.status(200).send([]);
|
||||
return;
|
||||
}
|
||||
// Only use models that are not null
|
||||
const allModelEntries = Object.values(PREFIX_MODEL_MAP).filter((entry) => entry.model);
|
||||
// Run all searches in parallel
|
||||
const searchPromises = allModelEntries.map(async (entry) => {
|
||||
try {
|
||||
const docs = await entry.model
|
||||
.find({ name: { $regex: searchTerm, $options: 'i' } })
|
||||
.limit(5)
|
||||
.sort({ updatedAt: -1 })
|
||||
.lean();
|
||||
return docs.map((doc) => trimSpotlightObject(doc, entry.type));
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
let results = await Promise.all(searchPromises);
|
||||
// Flatten and deduplicate by _id
|
||||
let flatResults = results.flat();
|
||||
const seen = new Set();
|
||||
const deduped = [];
|
||||
for (const obj of flatResults) {
|
||||
if (!seen.has(String(obj._id))) {
|
||||
seen.add(String(obj._id));
|
||||
deduped.push(obj);
|
||||
}
|
||||
if (deduped.length >= 10) break;
|
||||
}
|
||||
res.status(200).send(deduped);
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in spotlight lookup:', error);
|
||||
res.status(500).send({ error: error });
|
||||
|
||||
@ -1,13 +1,19 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import mongoose from 'mongoose';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import { extractConfigBlock } from '../../utils.js';
|
||||
import {
|
||||
deleteObject,
|
||||
editObject,
|
||||
getObject,
|
||||
listObjects,
|
||||
listObjectsByProperties,
|
||||
newObject,
|
||||
} from '../../database/database.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -38,6 +44,156 @@ const gcodeUpload = multer({
|
||||
},
|
||||
}).single('gcodeFile'); // The name attribute of the file input in the HTML form
|
||||
|
||||
export const listGCodeFilesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: gcodeFileModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: 'filament',
|
||||
});
|
||||
|
||||
console.log('req.user', req.user);
|
||||
if (result?.error) {
|
||||
logger.error('Error listing gcodefiles.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of gcodefiles (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listGCodeFilesByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: gcodeFileModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: 'filament',
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing gcodefiles.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of gcodefiles. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
populate: 'filament',
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`GCode file not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived gcodefile with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`GCode file with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
moonraker: req.body.moonraker,
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing gcode file:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited gcode file with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: gcodeFileModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No gcode file created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New gcode file with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteGCodeFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`GCode file with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No gcode file deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted gcode file with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
// Check file type
|
||||
function checkFileType(file, cb) {
|
||||
// Allowed ext
|
||||
@ -53,112 +209,6 @@ function checkFileType(file, cb) {
|
||||
}
|
||||
}
|
||||
|
||||
export const listGCodeFilesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let gcodeFile;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'filaments', // The name of the Filament collection
|
||||
localField: 'filament',
|
||||
foreignField: '_id',
|
||||
as: 'filament',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$unwind: {
|
||||
path: '$filament',
|
||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$addFields: {
|
||||
filament: '$filament',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'vendors', // The collection name (usually lowercase plural)
|
||||
localField: 'filament.vendor', // The field in your current model
|
||||
foreignField: '_id', // The field in the products collection
|
||||
as: 'filament.vendor', // The output field name
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({ $unwind: '$filament.vendor' });
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({
|
||||
$project: {
|
||||
'filament.gcodeFileInfo.estimatedPrintingTimeNormalMode': 0,
|
||||
url: 0,
|
||||
'filament.image': 0,
|
||||
'filament.createdAt': 0,
|
||||
'filament.updatedAt': 0,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
gcodeFile = await gcodeFileModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||
gcodeFile
|
||||
);
|
||||
res.send(gcodeFile);
|
||||
} catch (error) {
|
||||
logger.error('Error listing gcode files:', error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getGCodeFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
@ -198,94 +248,6 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the gcodeFile with the given remote address
|
||||
const gcodeFile = await gcodeFileModel.findOne({ _id: id });
|
||||
|
||||
if (!gcodeFile) {
|
||||
// Error handling
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
filament: req.body?.filament?._id,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(gcodeFile.toObject(), updateData, id, 'gcodeFile', req.user._id, 'user');
|
||||
|
||||
const result = await gcodeFileModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No gcodeFile updated.');
|
||||
res.status(500).send({ error: 'No gcodeFiles updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating gcodeFile:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching gcodeFile:', fetchError);
|
||||
//res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
var filament = null;
|
||||
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.body.filament._id);
|
||||
// Fetch the filament with the given remote address
|
||||
filament = await filamentModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!filament) {
|
||||
logger.warn(`Filament not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Filament not found.' });
|
||||
}
|
||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching filament:', error);
|
||||
return res.status(500).send({ error: error.message });
|
||||
}
|
||||
|
||||
try {
|
||||
const newGCodeFile = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
gcodeFileInfo: req.body.gcodeFileInfo,
|
||||
filament: req.body.filament._id,
|
||||
name: req.body.name,
|
||||
cost: (filament.cost / 1000) * req.body.gcodeFileInfo.filamentUsedG,
|
||||
};
|
||||
|
||||
const result = await gcodeFileModel.create(newGCodeFile);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error('No gcode file created.');
|
||||
res.status(500).send({ error: 'No gcode file created.' });
|
||||
}
|
||||
|
||||
// Create audit log for new gcodefile
|
||||
await newAuditLog({}, newGCodeFile, result._id, 'gcodeFile', req.user._id, 'user');
|
||||
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
logger.error('Error creating gcode file:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const parseGCodeFileHandler = async (req, res) => {
|
||||
try {
|
||||
// Use the same upload middleware as the uploadGCodeFileContentRouteHandler
|
||||
@ -383,30 +345,3 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the gcodeFile with the given remote address
|
||||
const gcodeFile = await gcodeFileModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('filament');
|
||||
|
||||
if (!gcodeFile) {
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...gcodeFile._doc, auditLogs: auditLogs });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching GCodeFile:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,151 +1,133 @@
|
||||
import dotenv from 'dotenv';
|
||||
import mongoose from 'mongoose';
|
||||
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import { getAuditLogs } from '../../utils.js';
|
||||
import { deleteObject, getObject, listObjects, newObject } from '../../database/database.js';
|
||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Jobs');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listJobsRouteHandler = async (req, res, page = 1, limit = 25) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
export const listJobsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: jobModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['gcodeFile'],
|
||||
});
|
||||
|
||||
// Fetch users with pagination
|
||||
const jobs = await jobModel
|
||||
.find()
|
||||
.sort({ createdAt: -1 })
|
||||
.skip(skip)
|
||||
.limit(limit)
|
||||
.populate('subJobs', 'state')
|
||||
.populate('gcodeFile', 'name');
|
||||
|
||||
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
|
||||
res.send(jobs);
|
||||
} catch (error) {
|
||||
logger.error('Error listing print jobs:', error);
|
||||
res.status(500).send({ error: error });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing jobs.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of jobs (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the job with the given remote address
|
||||
const job = await jobModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('printers', 'name state')
|
||||
.populate('gcodeFile')
|
||||
.populate('subJobs')
|
||||
.populate('notes');
|
||||
|
||||
if (!job) {
|
||||
logger.warn(`Job not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Job with ID: ${id}:`, job);
|
||||
|
||||
const targetIds = [id, ...job.subJobs.map((subJob) => subJob._id)];
|
||||
const auditLogs = await getAuditLogs(targetIds.map((id) => new mongoose.Types.ObjectId(id)));
|
||||
|
||||
res.send({ ...job._doc, auditLogs: auditLogs });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching job:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: jobModel,
|
||||
id,
|
||||
populate: ['gcodeFile', 'printers'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Job not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived job with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
// Fetch the job with the given remote address
|
||||
const job = await jobModel.findOne({ _id: id });
|
||||
|
||||
if (!job) {
|
||||
logger.warn(`Job not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Job with ID: ${id}:`, job);
|
||||
|
||||
const updateData = req.body;
|
||||
|
||||
const result = await jobModel.updateOne({ _id: id }, { $set: updateData });
|
||||
|
||||
if (result.nModified === 0) {
|
||||
logger.warn('No jobs updated.');
|
||||
return res.status(400).send({ error: 'No jobs updated.' });
|
||||
}
|
||||
|
||||
res.send({ message: 'Print job updated successfully' });
|
||||
} catch (error) {
|
||||
logger.error('Error updating job:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
export const newJobRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
quantity: req.body.quantity,
|
||||
printers: req.body.printers,
|
||||
gcodeFile: req.body.gcodeFile,
|
||||
state: { type: 'draft' },
|
||||
};
|
||||
const result = await newObject({
|
||||
model: jobModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No job created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
};
|
||||
|
||||
export const createJobRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const { gcodeFile, printers, quantity = 1 } = req.body;
|
||||
logger.debug(`New job with ID: ${result._id}`);
|
||||
|
||||
if (!printers || printers.length === 0) {
|
||||
return res.status(400).send({ error: 'At least one printer must be specified' });
|
||||
}
|
||||
var printerCount = 0;
|
||||
|
||||
// Convert printer IDs to ObjectIds
|
||||
const printerIds = printers.map((id) => new mongoose.Types.ObjectId(id));
|
||||
|
||||
// Create new print job
|
||||
const newJob = new jobModel({
|
||||
state: { type: 'draft' },
|
||||
printers: printerIds,
|
||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||
quantity,
|
||||
subJobs: [], // Initialize empty array for subjob references
|
||||
for (let i = 0; i < newData.quantity; i++) {
|
||||
const newSubJobData = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
startedAt: null,
|
||||
printer: newData.printers[printerCount],
|
||||
gcodeFile: req.body.gcodeFile,
|
||||
number: i + 1,
|
||||
job: result._id,
|
||||
state: { type: 'draft' },
|
||||
};
|
||||
const subJobResult = await newObject({
|
||||
model: subJobModel,
|
||||
newData: newSubJobData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
// Save the print job first to get its ID
|
||||
const savedJob = await newJob.save();
|
||||
|
||||
// Create subjobs array with sequential numbers based on quantity
|
||||
const subJobs = await Promise.all(
|
||||
Array.from({ length: quantity }, (_, index) => {
|
||||
const subJob = new subJobModel({
|
||||
printer: printerIds[index % printerIds.length], // Distribute across available printers
|
||||
job: savedJob._id,
|
||||
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
|
||||
subJobId: `subjob-${index + 1}`,
|
||||
state: { type: 'draft' },
|
||||
number: index + 1,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
});
|
||||
return subJob.save();
|
||||
})
|
||||
);
|
||||
|
||||
// Update the print job with the subjob references
|
||||
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
|
||||
await savedJob.save();
|
||||
|
||||
logger.trace(`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`);
|
||||
res.status(201).send({ job: savedJob, subJobs });
|
||||
} catch (error) {
|
||||
logger.error('Error creating print job:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
if (subJobResult.error) {
|
||||
logger.error('No sub job created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
if (printerCount >= newData.printers.length - 1) {
|
||||
printerCount = 0;
|
||||
} else {
|
||||
printerCount += 1;
|
||||
}
|
||||
}
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteJobRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Job with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: jobModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No job deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted job with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getJobStatsRouteHandler = async (req, res) => {
|
||||
|
||||
@ -1,158 +1,168 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import { newAuditLog, getAuditLogs } from '../../utils.js';
|
||||
import {
|
||||
deleteObject,
|
||||
editObject,
|
||||
getObject,
|
||||
listObjects,
|
||||
listObjectsByProperties,
|
||||
newObject,
|
||||
} from '../../database/database.js';
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Printers');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listPrintersRouteHandler = async (req, res, page = 1, limit = 25) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
export const listPrintersRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: printerModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
});
|
||||
|
||||
// Fetch users with pagination
|
||||
const printers = await printerModel.find().skip(skip).limit(limit);
|
||||
|
||||
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
|
||||
res.send(printers);
|
||||
} catch (error) {
|
||||
logger.error('Error listing users:', error);
|
||||
res.status(500).send({ error: error });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing printers.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of printers (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listPrintersByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: printerModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: 'vendor',
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing printers.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of printers. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getPrinterRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
|
||||
try {
|
||||
// Fetch the printer with the given remote address
|
||||
const printer = await printerModel
|
||||
.findOne({ _id: id })
|
||||
.populate('subJobs')
|
||||
.populate('currentJob')
|
||||
.populate({
|
||||
path: 'currentJob',
|
||||
populate: {
|
||||
path: 'gcodeFile',
|
||||
},
|
||||
})
|
||||
.populate('currentSubJob')
|
||||
.populate({
|
||||
path: 'subJobs',
|
||||
populate: {
|
||||
path: 'job',
|
||||
},
|
||||
})
|
||||
.populate('vendor')
|
||||
.populate({
|
||||
path: 'currentFilamentStock',
|
||||
populate: {
|
||||
path: 'filament',
|
||||
},
|
||||
});
|
||||
|
||||
if (!printer) {
|
||||
logger.warn(`Printer with id ${id} not found.`);
|
||||
return res.status(404).send({ error: 'Printer not found' });
|
||||
}
|
||||
|
||||
logger.trace(`Printer with id ${id}:`, printer);
|
||||
|
||||
const auditLogs = await getAuditLogs(id);
|
||||
|
||||
res.send({ ...printer._doc, auditLogs: auditLogs });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching printer:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
const result = await getObject({
|
||||
model: printerModel,
|
||||
id,
|
||||
populate: ['vendor', 'host'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Printer not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived printer with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editPrinterRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
try {
|
||||
// Fetch the printer first to get the old state
|
||||
const printer = await printerModel.findOne({ _id: id });
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
if (!printer) {
|
||||
logger.warn(`Printer not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Printer not found.' });
|
||||
}
|
||||
logger.trace(`Printer with ID: ${id}`);
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
moonraker: req.body.moonraker,
|
||||
tags: req.body.tags,
|
||||
name: req.body.name,
|
||||
vendor: req.body.vendor.id,
|
||||
};
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
moonraker: req.body.moonraker,
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
host: req.body.host,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: printerModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
populate: 'vendor',
|
||||
});
|
||||
|
||||
// Create audit log before updating
|
||||
await newAuditLog(printer.toObject(), updateData, id, 'printer', req.user._id, 'user');
|
||||
|
||||
const result = await printerModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No printers updated.');
|
||||
res.status(500).send({ error: 'No printers updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating printer:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching printer:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
if (result.error) {
|
||||
logger.error('Error editing printer:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited printer with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const createPrinterRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const { name, moonraker, tags = [], firmware = 'n/a' } = req.body;
|
||||
|
||||
// Validate required fields
|
||||
if (!name || !moonraker) {
|
||||
logger.warn('Missing required fields in printer creation request');
|
||||
return res.status(400).send({
|
||||
error: 'Missing required fields. name and moonraker configuration are required.',
|
||||
});
|
||||
}
|
||||
|
||||
// Validate moonraker configuration
|
||||
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
|
||||
logger.warn('Invalid moonraker configuration in printer creation request');
|
||||
return res.status(400).send({
|
||||
error: 'Invalid moonraker configuration. host, port, protocol are required.',
|
||||
});
|
||||
}
|
||||
|
||||
// Create new printer instance
|
||||
const newPrinter = new printerModel({
|
||||
name,
|
||||
moonraker,
|
||||
tags,
|
||||
firmware,
|
||||
online: false,
|
||||
state: {
|
||||
type: 'offline',
|
||||
},
|
||||
});
|
||||
|
||||
// Save the printer
|
||||
const savedPrinter = await newPrinter.save();
|
||||
|
||||
// Create audit log for new printer
|
||||
await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'printer', req.user._id, 'user');
|
||||
|
||||
logger.info(`Created new printer: ${name}`);
|
||||
res.status(201).send(savedPrinter);
|
||||
} catch (error) {
|
||||
logger.error('Error creating printer:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
export const newPrinterRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
moonraker: req.body.moonraker,
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
host: req.body.host,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: printerModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No printer created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New printer with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deletePrinterRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Printer with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: printerModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No printer deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted printer with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getPrinterStatsRouteHandler = async (req, res) => {
|
||||
|
||||
118
src/services/production/subjobs.js
Normal file
118
src/services/production/subjobs.js
Normal file
@ -0,0 +1,118 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||
import log4js from 'log4js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('SubJobs');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listSubJobsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let subJobs;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and other searchable fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
// Lookup printer
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'printers', // The name of the Printer collection
|
||||
localField: 'printer',
|
||||
foreignField: '_id',
|
||||
as: 'printer',
|
||||
},
|
||||
});
|
||||
|
||||
// Lookup job
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'jobs', // The name of the Printer collection
|
||||
localField: 'job',
|
||||
foreignField: '_id',
|
||||
as: 'job',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$unwind: {
|
||||
path: '$printer',
|
||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$unwind: {
|
||||
path: '$job',
|
||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
|
||||
},
|
||||
});
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({
|
||||
$project: {
|
||||
state: 1,
|
||||
_id: 1,
|
||||
createdAt: 1,
|
||||
startedAt: 1,
|
||||
'printer._id': 1,
|
||||
'job._id': 1,
|
||||
'printer.name': 1,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
} else {
|
||||
// Default sorting by createdAt descending
|
||||
aggregateCommand.push({ $sort: { createdAt: -1 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
subJobs = await subJobModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of print subJobs (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||
subJobs
|
||||
);
|
||||
res.send(subJobs);
|
||||
} catch (error) {
|
||||
logger.error('Error listing print subJobs:', error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
224
src/utils.js
224
src/utils.js
@ -1,15 +1,20 @@
|
||||
import { ObjectId } from 'mongodb';
|
||||
import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
||||
import { etcdServer } from './database/etcd.js';
|
||||
|
||||
function parseFilter(property, value) {
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
var trimmed = value.trim();
|
||||
if (trimmed.charAt(3) == ':') {
|
||||
trimmed = value.split(':')[1];
|
||||
}
|
||||
|
||||
// Handle booleans
|
||||
if (trimmed.toLowerCase() === 'true') return { [property]: true };
|
||||
if (trimmed.toLowerCase() === 'false') return { [property]: false };
|
||||
|
||||
// Handle ObjectId (24-char hex)
|
||||
|
||||
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
|
||||
return { [property]: new ObjectId(trimmed) };
|
||||
}
|
||||
@ -261,50 +266,227 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
|
||||
return useCamelCase ? convertToCamelCase(configObject) : configObject;
|
||||
}
|
||||
|
||||
function getChangedValues(oldObj, newObj) {
|
||||
function getChangedValues(oldObj, newObj, old = false) {
|
||||
const changes = {};
|
||||
|
||||
// Check all keys in the new object
|
||||
for (const key in newObj) {
|
||||
// Skip if the key is _id or timestamps
|
||||
if (key === '_id' || key === 'createdAt' || key === 'updatedAt') continue;
|
||||
const combinedObj = { ...oldObj, ...newObj };
|
||||
|
||||
// If the old value is different from the new value, include it
|
||||
if (JSON.stringify(oldObj[key]) !== JSON.stringify(newObj[key])) {
|
||||
changes[key] = newObj[key];
|
||||
// Check all keys in the new object
|
||||
for (const key in combinedObj) {
|
||||
// Skip if the key is _id or timestamps
|
||||
if (key === 'createdAt' || key === 'updatedAt' || key === '_id') continue;
|
||||
|
||||
const oldVal = oldObj ? oldObj[key] : undefined;
|
||||
const newVal = newObj ? newObj[key] : undefined;
|
||||
|
||||
// If both values are objects (but not arrays or null), recurse
|
||||
if (
|
||||
oldVal &&
|
||||
newVal &&
|
||||
typeof oldVal === 'object' &&
|
||||
typeof newVal === 'object' &&
|
||||
!Array.isArray(oldVal) &&
|
||||
!Array.isArray(newVal) &&
|
||||
oldVal !== null &&
|
||||
newVal !== null
|
||||
) {
|
||||
if (oldVal?._id || newVal?._id) {
|
||||
if (JSON.stringify(oldVal?._id) !== JSON.stringify(newVal?._id)) {
|
||||
changes[key] = old ? oldVal : newVal;
|
||||
}
|
||||
} else {
|
||||
const nestedChanges = getChangedValues(oldVal, newVal, old);
|
||||
if (Object.keys(nestedChanges).length > 0) {
|
||||
changes[key] = nestedChanges;
|
||||
}
|
||||
}
|
||||
} else if (JSON.stringify(oldVal) !== JSON.stringify(newVal)) {
|
||||
// If the old value is different from the new value, include it
|
||||
changes[key] = old ? oldVal : newVal;
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
|
||||
async function newAuditLog(newValue, parentId, parentType, user) {
|
||||
// Filter out createdAt and updatedAt from newValue
|
||||
const filteredNewValue = { ...newValue };
|
||||
delete filteredNewValue.createdAt;
|
||||
delete filteredNewValue.updatedAt;
|
||||
const auditLog = new auditLogModel({
|
||||
changes: {
|
||||
new: filteredNewValue,
|
||||
},
|
||||
parent: parentId,
|
||||
parentType,
|
||||
owner: user._id,
|
||||
ownerType: 'user',
|
||||
operation: 'new',
|
||||
});
|
||||
|
||||
await auditLog.save();
|
||||
|
||||
await distributeNew(auditLog._id, 'auditLog');
|
||||
}
|
||||
|
||||
async function editAuditLog(oldValue, newValue, parentId, parentType, user) {
|
||||
// Get only the changed values
|
||||
const changedValues = getChangedValues(oldValue, newValue);
|
||||
const changedOldValues = getChangedValues(oldValue, newValue, true);
|
||||
const changedNewValues = getChangedValues(oldValue, newValue, false);
|
||||
|
||||
// If no values changed, don't create an audit log
|
||||
if (Object.keys(changedValues).length === 0) {
|
||||
if (Object.keys(changedOldValues).length === 0 || Object.keys(changedNewValues).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auditLog = new auditLogModel({
|
||||
oldValue,
|
||||
newValue: changedValues,
|
||||
target: targetId,
|
||||
targetModel,
|
||||
owner: ownerId,
|
||||
ownerModel,
|
||||
changes: {
|
||||
old: changedOldValues,
|
||||
new: changedNewValues,
|
||||
},
|
||||
parent: parentId,
|
||||
parentType,
|
||||
owner: user._id,
|
||||
ownerType: 'user',
|
||||
operation: 'edit',
|
||||
});
|
||||
|
||||
await auditLog.save();
|
||||
|
||||
await distributeNew(auditLog._id, 'auditLog');
|
||||
}
|
||||
|
||||
async function deleteAuditLog(deleteValue, parentId, parentType, user) {
|
||||
const auditLog = new auditLogModel({
|
||||
changes: {
|
||||
old: deleteValue,
|
||||
},
|
||||
parent: parentId,
|
||||
parentType,
|
||||
owner: user._id,
|
||||
ownerType: 'user',
|
||||
operation: 'delete',
|
||||
});
|
||||
|
||||
await auditLog.save();
|
||||
|
||||
await distributeNew(auditLog._id, 'auditLog');
|
||||
}
|
||||
|
||||
async function getAuditLogs(idOrIds) {
|
||||
if (Array.isArray(idOrIds)) {
|
||||
return auditLogModel.find({ target: { $in: idOrIds } }).populate('owner');
|
||||
return auditLogModel.find({ parent: { $in: idOrIds } }).populate('owner');
|
||||
} else {
|
||||
return auditLogModel.find({ target: idOrIds }).populate('owner');
|
||||
return auditLogModel.find({ parent: idOrIds }).populate('owner');
|
||||
}
|
||||
}
|
||||
|
||||
export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog, getAuditLogs };
|
||||
async function distributeUpdate(value, id, type) {
|
||||
await etcdServer.setKey(`/${type}s/${id}/object`, value);
|
||||
}
|
||||
|
||||
async function distributeNew(id, type) {
|
||||
await etcdServer.setKey(`/${type}s/new`, id);
|
||||
}
|
||||
|
||||
function flatternObjectIds(object) {
|
||||
if (!object || typeof object !== 'object') {
|
||||
return object;
|
||||
}
|
||||
|
||||
const result = {};
|
||||
|
||||
for (const [key, value] of Object.entries(object)) {
|
||||
if (value && typeof value === 'object' && value._id) {
|
||||
// If the value is an object with _id, convert to just the _id
|
||||
result[key] = value._id;
|
||||
} else {
|
||||
// Keep primitive values as is
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function expandObjectIds(input) {
|
||||
// Helper to check if a value is an ObjectId or a 24-char hex string
|
||||
function isObjectId(val) {
|
||||
// Check for MongoDB ObjectId instance
|
||||
if (val instanceof ObjectId) return true;
|
||||
// Check for exactly 24 hex characters (no special characters)
|
||||
if (typeof val === 'string' && /^[a-fA-F\d]{24}$/.test(val)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Recursive function
|
||||
function expand(value) {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(expand);
|
||||
} else if (value && typeof value === 'object' && !(value instanceof ObjectId)) {
|
||||
var result = {};
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
if (key === '_id') {
|
||||
// Do not expand keys that are already named _id
|
||||
result[key] = val;
|
||||
} else if (isObjectId(val)) {
|
||||
result[key] = { _id: val };
|
||||
} else if (Array.isArray(val)) {
|
||||
result[key] = val.map(expand);
|
||||
} else if (val instanceof Date) {
|
||||
result[key] = val;
|
||||
} else if (val && typeof val === 'object') {
|
||||
result[key] = expand(val);
|
||||
} else {
|
||||
result[key] = val;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
} else if (isObjectId(value)) {
|
||||
return { _id: value };
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
return expand(input);
|
||||
}
|
||||
|
||||
// Returns a filter object based on allowed filters and req.query
|
||||
function getFilter(query, allowedFilters, parse = true) {
|
||||
let filter = {};
|
||||
for (const [key, value] of Object.entries(query)) {
|
||||
if (allowedFilters.includes(key)) {
|
||||
const parsedFilter = parse ? parseFilter(key, value) : { [key]: value };
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
|
||||
// Converts a properties argument (string or array) to an array of strings
|
||||
function convertPropertiesString(properties) {
|
||||
if (typeof properties === 'string') {
|
||||
return properties.split(',');
|
||||
} else if (!Array.isArray(properties)) {
|
||||
return [];
|
||||
}
|
||||
return properties;
|
||||
}
|
||||
|
||||
export {
|
||||
parseFilter,
|
||||
convertToCamelCase,
|
||||
extractConfigBlock,
|
||||
newAuditLog,
|
||||
editAuditLog,
|
||||
deleteAuditLog,
|
||||
getAuditLogs,
|
||||
flatternObjectIds,
|
||||
expandObjectIds,
|
||||
distributeUpdate,
|
||||
distributeNew,
|
||||
getFilter, // <-- add here
|
||||
convertPropertiesString,
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user