Compare commits

..

9 Commits

Author SHA1 Message Date
97773c2ab2 Add optional 'message' field to alert schema: Enhanced the alert schema in printer.schema.js to include a non-required 'message' field for improved alert detail. 2025-09-05 23:26:12 +01:00
5cac77a975 Enhance printer filtering: Added 'host._id' to the allowed filters in the printers route to improve data retrieval capabilities. 2025-09-05 23:26:04 +01:00
2fdb77e4bb Update allowed filters in document templates route: Added 'global' and 'objectType' to enhance filtering capabilities for document templates. 2025-09-05 23:25:57 +01:00
0edc085fb7 Enhance filtering for filament stocks: Added 'filament._id' to the allowed filters in the inventory route to improve data retrieval capabilities. 2025-09-05 23:25:42 +01:00
695ff8efc7 Add NATS messaging for updates and deletes: Replaced Etcd operations with NATS publish calls in utils.js for distributing updates, new entries, and deletions. Updated logging in etcd.js to use trace level for improved verbosity during connection and operation handling. 2025-09-05 23:25:34 +01:00
4685cac563 Refactor note and subjob management routes: Enhanced filtering logic and added new route handlers for CRUD operations. Updated services to utilize database functions for improved data handling and error management. Implemented property-based listing for notes and subjobs, ensuring better data retrieval and organization. 2025-09-05 23:25:14 +01:00
12be496f22 Add document jobs management: Introduced new routes, schema, and service functions for managing document jobs. Updated index.js to include documentJobs route. Implemented CRUD operations and filtering capabilities for document jobs in the application. 2025-09-05 23:24:24 +01:00
bec46489d1 Add NATS integration: Implemented NATS server connection and messaging capabilities. Updated package.json and package-lock.json to include NATS dependencies. Enhanced index.js to connect to NATS and added documentJobs route for job management. 2025-09-05 23:23:50 +01:00
751c931e67 Refactor database operations: Enhanced filtering logic to translate keys ending with ._id for Mongoose, added optional distribution for new and delete operations, and implemented a recursive deletion function for child objects. Updated audit logging to handle distributed changes more effectively. 2025-09-05 23:23:26 +01:00
21 changed files with 1080 additions and 491 deletions

52
package-lock.json generated
View File

@ -9,6 +9,7 @@
"version": "1.0.0", "version": "1.0.0",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@nats-io/transport-node": "^3.1.0",
"axios": "^1.11.0", "axios": "^1.11.0",
"bcrypt": "^6.0.0", "bcrypt": "^6.0.0",
"body-parser": "^2.2.0", "body-parser": "^2.2.0",
@ -2924,6 +2925,51 @@
"sparse-bitfield": "^3.0.3" "sparse-bitfield": "^3.0.3"
} }
}, },
"node_modules/@nats-io/nats-core": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@nats-io/nats-core/-/nats-core-3.1.0.tgz",
"integrity": "sha512-xsSkLEGGcqNF+Ru8dMjPmKtfbBeq/U4meuJJX4Zi+5TBHpjpjNjs4YkCBC/pGYWnEum1/vdNPizjE1RdNHCyBg==",
"license": "Apache-2.0",
"dependencies": {
"@nats-io/nkeys": "2.0.3",
"@nats-io/nuid": "2.0.3"
}
},
"node_modules/@nats-io/nkeys": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@nats-io/nkeys/-/nkeys-2.0.3.tgz",
"integrity": "sha512-JVt56GuE6Z89KUkI4TXUbSI9fmIfAmk6PMPknijmuL72GcD+UgIomTcRWiNvvJKxA01sBbmIPStqJs5cMRBC3A==",
"license": "Apache-2.0",
"dependencies": {
"tweetnacl": "^1.0.3"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@nats-io/nuid": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@nats-io/nuid/-/nuid-2.0.3.tgz",
"integrity": "sha512-TpA3HEBna/qMVudy+3HZr5M3mo/L1JPofpVT4t0HkFGkz2Cn9wrlrQC8tvR8Md5Oa9//GtGG26eN0qEWF5Vqew==",
"license": "Apache-2.0",
"engines": {
"node": ">= 18.x"
}
},
"node_modules/@nats-io/transport-node": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@nats-io/transport-node/-/transport-node-3.1.0.tgz",
"integrity": "sha512-k5pH7IOKUetwXOMraVgcB5zG0wibcHOwJJuyuY1/5Q4K0XfBJDnb/IbczP5/JJWwMYfxSL9O+46ojtdBHvHRSw==",
"license": "Apache-2.0",
"dependencies": {
"@nats-io/nats-core": "3.1.0",
"@nats-io/nkeys": "2.0.3",
"@nats-io/nuid": "2.0.3"
},
"engines": {
"node": ">= 18.0.0"
}
},
"node_modules/@nicolo-ribaudo/chokidar-2": { "node_modules/@nicolo-ribaudo/chokidar-2": {
"version": "2.1.8-no-fsevents.3", "version": "2.1.8-no-fsevents.3",
"resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz", "resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz",
@ -11193,6 +11239,12 @@
"license": "0BSD", "license": "0BSD",
"optional": true "optional": true
}, },
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
"integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==",
"license": "Unlicense"
},
"node_modules/type-check": { "node_modules/type-check": {
"version": "0.4.0", "version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",

View File

@ -4,6 +4,7 @@
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"dependencies": { "dependencies": {
"@nats-io/transport-node": "^3.1.0",
"axios": "^1.11.0", "axios": "^1.11.0",
"bcrypt": "^6.0.0", "bcrypt": "^6.0.0",
"body-parser": "^2.2.0", "body-parser": "^2.2.0",

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv'; import dotenv from 'dotenv';
import { deleteAuditLog, expandObjectIds } from '../utils.js'; import { deleteAuditLog, distributeDelete, expandObjectIds } from '../utils.js';
import log4js from 'log4js'; import log4js from 'log4js';
import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js'; import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js';
@ -38,17 +38,15 @@ export const listObjects = async ({
if (!sort || sort === '') { if (!sort || sort === '') {
sort = 'createdAt'; sort = 'createdAt';
} }
// Translate parent._id to parent for Mongoose
if (filter['parent._id']) {
filter.parent = filter['parent._id'];
delete filter['parent._id'];
}
// Translate owner._id to owner for Mongoose // Translate any key ending with ._id to remove the ._id suffix for Mongoose
if (filter['owner._id']) { Object.keys(filter).forEach((key) => {
filter.owner = filter['owner._id']; if (key.endsWith('._id')) {
delete filter['owner._id']; const baseKey = key.slice(0, -4); // Remove '._id' suffix
} filter[baseKey] = filter[key];
delete filter[key];
}
});
// Use find with population and filter // Use find with population and filter
let query = model let query = model
@ -280,7 +278,7 @@ export const getObject = async ({ model, id, populate }) => {
return { error: 'Object not found.', code: 404 }; return { error: 'Object not found.', code: 404 };
} }
return result; return expandObjectIds(result);
} catch (error) { } catch (error) {
return { error: error, code: 500 }; return { error: error, code: 500 };
} }
@ -329,7 +327,7 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
}; };
// Reusable function to create a new object // Reusable function to create a new object
export const newObject = async ({ model, newData, user = null }) => { export const newObject = async ({ model, newData, user = null }, distributeChanges = true) => {
try { try {
const parentType = model.modelName ? model.modelName : 'unknown'; const parentType = model.modelName ? model.modelName : 'unknown';
@ -337,10 +335,12 @@ export const newObject = async ({ model, newData, user = null }) => {
if (!result || result.length === 0) { if (!result || result.length === 0) {
return { error: 'No object created.', code: 500 }; return { error: 'No object created.', code: 500 };
} }
const created = result; const created = expandObjectIds(result.toObject());
await newAuditLog(newData, created._id, parentType, user); await newAuditLog(newData, created._id, parentType, user);
await distributeNew(created._id, parentType); if (distributeChanges == true) {
await distributeNew(created, parentType);
}
return created; return created;
} catch (error) { } catch (error) {
@ -350,7 +350,7 @@ export const newObject = async ({ model, newData, user = null }) => {
}; };
// Reusable function to delete an object by ID, with audit logging and distribution // Reusable function to delete an object by ID, with audit logging and distribution
export const deleteObject = async ({ model, id, user = null }) => { export const deleteObject = async ({ model, id, user = null }, distributeChanges = true) => {
try { try {
const parentType = model.modelName ? model.modelName : 'unknown'; const parentType = model.modelName ? model.modelName : 'unknown';
// Delete the object // Delete the object
@ -359,14 +359,46 @@ export const deleteObject = async ({ model, id, user = null }) => {
if (!result) { if (!result) {
return { error: `${parentType} not found.`, code: 404 }; return { error: `${parentType} not found.`, code: 404 };
} }
// Audit log the deletion
await deleteAuditLog(result, id, parentType, user, 'delete');
// Distribute the deletion event
await distributeUpdate({ deleted: true }, id, parentType);
return { deleted: true, id }; const deleted = expandObjectIds(result.toObject());
// Audit log the deletion
await deleteAuditLog(deleted, id, parentType, user, 'delete');
if (distributeChanges == true) {
await distributeDelete(deleted, parentType);
}
return { deleted: true, object: deleted };
} catch (error) { } catch (error) {
logger.error('deleteObject error:', error); logger.error('deleteObject error:', error);
return { error: error.message, code: 500 }; return { error: error.message, code: 500 };
} }
}; };
// Helper function to recursively delete objects and their children
export const recursivelyDeleteChildObjects = async (
{ model, id, user = null },
distributeChanges = true
) => {
const deletedIds = [];
// Find all objects that have this object as their parent
const childObjects = await model.find({ parent: id });
// Recursively delete all children first
for (const childObject of childObjects) {
const childDeletedIds = await recursivelyDeleteChildObjects(
{ model, id: childObject._id, user },
false
);
deletedIds.push(...childDeletedIds);
}
// Delete the current object
await deleteObject({ model, id, user }, distributeChanges);
deletedIds.push(id);
return deletedIds;
};

View File

@ -16,13 +16,13 @@ class EtcdServer {
this.client = null; this.client = null;
this.watchers = new Map(); this.watchers = new Map();
this.hosts = [`${ETCD_HOST}:${ETCD_PORT}`]; this.hosts = [`${ETCD_HOST}:${ETCD_PORT}`];
logger.debug(`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`); logger.trace(`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`);
} }
async connect() { async connect() {
if (!this.client) { if (!this.client) {
logger.info('Connecting to Etcd...'); logger.info('Connecting to Etcd...');
logger.debug(`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`); logger.trace(`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`);
this.client = new Etcd3({ this.client = new Etcd3({
hosts: this.hosts, hosts: this.hosts,
}); });
@ -30,16 +30,16 @@ class EtcdServer {
// Test connection // Test connection
try { try {
await this.client.get('test-connection').string(); await this.client.get('test-connection').string();
logger.debug('Etcd client connected successfully.'); logger.trace('Etcd client connected successfully.');
} catch (error) { } catch (error) {
if (error.code === 'NOT_FOUND') { if (error.code === 'NOT_FOUND') {
logger.debug('Etcd client connected successfully (test key not found as expected).'); logger.trace('Etcd client connected successfully (test key not found as expected).');
} else { } else {
throw error; throw error;
} }
} }
} else { } else {
logger.debug('Etcd client already exists, skipping connection.'); logger.trace('Etcd client already exists, skipping connection.');
} }
return this.client; return this.client;
} }
@ -47,7 +47,7 @@ class EtcdServer {
async getClient() { async getClient() {
logger.trace('Checking if Etcd client exists.'); logger.trace('Checking if Etcd client exists.');
if (!this.client) { if (!this.client) {
logger.debug('No client found, calling connect().'); logger.trace('No client found, calling connect().');
await this.connect(); await this.connect();
} }
logger.trace('Returning Etcd client.'); logger.trace('Returning Etcd client.');
@ -60,7 +60,7 @@ class EtcdServer {
const stringValue = typeof value === 'string' ? value : JSON.stringify(value); const stringValue = typeof value === 'string' ? value : JSON.stringify(value);
await client.put(key).value(stringValue); await client.put(key).value(stringValue);
logger.debug(`Set key: ${key}, value: ${stringValue}`); logger.trace(`Set key: ${key}, value: ${stringValue}`);
return true; return true;
} }
@ -69,7 +69,7 @@ class EtcdServer {
try { try {
const value = await client.get(key).string(); const value = await client.get(key).string();
logger.debug(`Retrieved key: ${key}, value: ${value}`); logger.trace(`Retrieved key: ${key}, value: ${value}`);
// Try to parse as JSON, fallback to string // Try to parse as JSON, fallback to string
try { try {
@ -79,7 +79,7 @@ class EtcdServer {
} }
} catch (error) { } catch (error) {
if (error.code === 'NOT_FOUND') { if (error.code === 'NOT_FOUND') {
logger.debug(`Key not found: ${key}`); logger.trace(`Key not found: ${key}`);
return null; return null;
} }
throw error; throw error;
@ -91,7 +91,7 @@ class EtcdServer {
// Stop all watchers // Stop all watchers
for (const [key, watcher] of this.watchers) { for (const [key, watcher] of this.watchers) {
logger.debug(`Stopping watcher: ${key}`); logger.trace(`Stopping watcher: ${key}`);
watcher.removeAllListeners(); watcher.removeAllListeners();
await watcher.close(); await watcher.close();
} }

313
src/database/nats.js Normal file
View File

@ -0,0 +1,313 @@
import { connect } from '@nats-io/transport-node';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const NATS_HOST = process.env.NATS_HOST || 'localhost';
const NATS_PORT = process.env.NATS_PORT || 4222;
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
const logger = log4js.getLogger('Nats');
logger.level = LOG_LEVEL;
class NatsServer {
constructor() {
this.client = null;
this.subscriptions = new Map(); // subject → { subscription, callbacks }
this.requestHandlers = new Map(); // subject → { handler, callbacks }
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
this.servers = [`nats://${NATS_HOST}:${NATS_PORT}`];
this.textEncoder = new TextEncoder();
this.textDecoder = new TextDecoder();
logger.trace(`NatsServer: servers set to ${JSON.stringify(this.servers)}`);
}
async connect() {
if (!this.client) {
logger.info('Connecting to NATS...');
logger.trace(`Creating NATS client with servers ${JSON.stringify(this.servers)}`);
try {
this.client = await connect({
servers: this.servers,
reconnect: true,
maxReconnectAttempts: -1, // unlimited reconnects
reconnectTimeWait: 1000,
timeout: 20000,
});
// Test connection by checking if client is connected
try {
if (this.client.isClosed()) {
throw new Error('NATS client connection failed');
}
logger.trace('NATS client connected successfully.');
} catch (error) {
throw error;
}
} catch (error) {
logger.error('Failed to connect to NATS:', error);
throw error;
}
} else {
logger.trace('NATS client already exists, skipping connection.');
}
return this.client;
}
async getClient() {
if (!this.client) {
logger.trace('No client found, calling connect().');
await this.connect();
}
return this.client;
}
async publish(subject, data) {
const client = await this.getClient();
const payload = typeof data === 'string' ? data : JSON.stringify(data);
try {
client.publish(subject, this.textEncoder.encode(payload));
logger.trace(`Published to subject: ${subject}, data: ${payload}`);
return { success: true };
} catch (error) {
logger.error(`Failed to publish to subject ${subject}:`, error);
throw error;
}
}
async request(subject, data, timeout = 30000) {
const client = await this.getClient();
const payload = typeof data === 'string' ? data : JSON.stringify(data);
try {
const response = await client.request(subject, this.textEncoder.encode(payload), {
timeout: timeout,
});
const responseData = this.textDecoder.decode(response.data);
logger.trace(`Request to subject: ${subject}, response: ${responseData}`);
// Try to parse as JSON, fallback to string
try {
return JSON.parse(responseData);
} catch {
return responseData;
}
} catch (error) {
if (error.code === 'TIMEOUT') {
logger.trace(`Request timeout for subject: ${subject}`);
return null;
}
throw error;
}
}
async subscribe(subject, owner, callback) {
const client = await this.getClient();
const subscriptionKey = subject;
if (this.subscriptions.has(subscriptionKey)) {
this.subscriptions.get(subscriptionKey).callbacks.set(owner, callback);
logger.trace(`Added subscription callback for owner=${owner} on subject=${subject}`);
return { success: true };
}
logger.trace(`Creating new subscription for subject: ${subject}`);
const subscription = client.subscribe(subject);
const callbacks = new Map();
callbacks.set(owner, callback);
(async () => {
for await (const msg of subscription) {
logger.trace(`Message received on subject: ${subject}`);
const data = this.textDecoder.decode(msg.data);
let parsedData;
try {
parsedData = JSON.parse(data);
} catch {
parsedData = data;
}
for (const [ownerId, cb] of callbacks) {
try {
cb(subject, parsedData, msg);
} catch (err) {
logger.error(
`Error in subscription callback for owner=${ownerId}, subject=${subject}:`,
err
);
}
}
}
})().catch((err) => {
logger.error(`Subscription error for subject ${subject}:`, err);
});
this.subscriptions.set(subscriptionKey, { subscription, callbacks });
return { success: true };
}
async setRequestHandler(subject, owner, handler) {
const client = await this.getClient();
const handlerKey = subject;
if (this.requestHandlers.has(handlerKey)) {
this.requestHandlers.get(handlerKey).callbacks.set(owner, handler);
logger.trace(`Added request handler for owner=${owner} on subject=${subject}`);
return { success: true };
}
logger.trace(`Creating new request handler for subject: ${subject}`);
const subscription = client.subscribe(subject);
const callbacks = new Map();
callbacks.set(owner, handler);
(async () => {
for await (const msg of subscription) {
logger.trace(`Request received on subject: ${subject}`);
const data = this.textDecoder.decode(msg.data);
let parsedData;
try {
parsedData = JSON.parse(data);
} catch {
parsedData = data;
}
for (const [ownerId, cb] of callbacks) {
try {
const response = await cb(subject, parsedData, msg);
const responsePayload =
typeof response === 'string' ? response : JSON.stringify(response);
msg.respond(this.textEncoder.encode(responsePayload));
} catch (err) {
logger.error(`Error in request handler for owner=${ownerId}, subject=${subject}:`, err);
// Send error response
msg.respond(this.textEncoder.encode(JSON.stringify({ error: err.message })));
}
}
}
})().catch((err) => {
logger.error(`Request handler error for subject ${subject}:`, err);
});
this.requestHandlers.set(handlerKey, { subscription, callbacks });
return { success: true };
}
async removeSubscription(subject, owner) {
const entry = this.subscriptions.get(subject);
if (!entry) {
logger.trace(`Subscription not found for subject: ${subject}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(`Removed subscription callback for owner: ${owner} on subject: ${subject}`);
} else {
logger.trace(`No subscription callback found for owner: ${owner} on subject: ${subject}`);
}
if (entry.callbacks.size === 0) {
logger.trace(`No callbacks left, stopping subscription for ${subject}`);
entry.subscription.unsubscribe();
this.subscriptions.delete(subject);
}
return true;
}
async removeQueuedSubscription(subject, queue, owner) {
const subscriptionKey = `${subject}:${queue}`;
const entry = this.queuedSubscriptions.get(subscriptionKey);
if (!entry) {
logger.trace(`Queued subscription not found for subject: ${subject}, queue: ${queue}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(
`Removed queued subscription callback for owner: ${owner} on subject: ${subject}, queue: ${queue}`
);
} else {
logger.trace(
`No queued subscription callback found for owner: ${owner} on subject: ${subject}, queue: ${queue}`
);
}
if (entry.callbacks.size === 0) {
logger.trace(
`No callbacks left, stopping queued subscription for ${subject}, queue: ${queue}`
);
entry.subscription.unsubscribe();
this.queuedSubscriptions.delete(subscriptionKey);
}
return true;
}
async removeRequestHandler(subject, owner) {
const entry = this.requestHandlers.get(subject);
if (!entry) {
logger.trace(`Request handler not found for subject: ${subject}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(`Removed request handler for owner: ${owner} on subject: ${subject}`);
} else {
logger.trace(`No request handler found for owner: ${owner} on subject: ${subject}`);
}
if (entry.callbacks.size === 0) {
logger.trace(`No handlers left, stopping request handler for ${subject}`);
entry.subscription.unsubscribe();
this.requestHandlers.delete(subject);
}
return true;
}
async disconnect() {
logger.info('Disconnecting from NATS...');
// Stop all subscriptions
for (const [subject, entry] of this.subscriptions) {
logger.trace(`Stopping subscription: ${subject}`);
entry.subscription.unsubscribe();
}
this.subscriptions.clear();
// Stop all queued subscriptions
for (const [key, entry] of this.queuedSubscriptions) {
logger.trace(`Stopping queued subscription: ${key}`);
entry.subscription.unsubscribe();
}
this.queuedSubscriptions.clear();
// Stop all request handlers
for (const [subject, entry] of this.requestHandlers) {
logger.trace(`Stopping request handler: ${subject}`);
entry.subscription.unsubscribe();
}
this.requestHandlers.clear();
if (this.client) {
await this.client.close();
this.client = null;
logger.info('Disconnected from NATS');
}
}
}
const natsServer = new NatsServer();
export { NatsServer, natsServer };

View File

@ -28,6 +28,7 @@ import {
documentSizesRoutes, documentSizesRoutes,
documentTemplatesRoutes, documentTemplatesRoutes,
documentPrintersRoutes, documentPrintersRoutes,
documentJobsRoutes,
} from './routes/index.js'; } from './routes/index.js';
import path from 'path'; import path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
@ -36,6 +37,7 @@ import ReseedAction from './database/ReseedAction.js';
import log4js from 'log4js'; import log4js from 'log4js';
import { etcdServer } from './database/etcd.js'; import { etcdServer } from './database/etcd.js';
import { populateUserMiddleware } from './services/misc/auth.js'; import { populateUserMiddleware } from './services/misc/auth.js';
import { natsServer } from './database/nats.js';
dotenv.config(); dotenv.config();
@ -70,6 +72,15 @@ try {
throw err; throw err;
} }
// Connect to NATS (await)
try {
natsServer.connect();
logger.info('Connected to NATS');
} catch (err) {
logger.error('Failed to connect to NATS:', err);
throw err;
}
app.use(cors(corsOptions)); app.use(cors(corsOptions));
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' })); app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
app.use(express.json()); app.use(express.json());
@ -104,6 +115,7 @@ app.use('/notetypes', noteTypeRoutes);
app.use('/documentsizes', documentSizesRoutes); app.use('/documentsizes', documentSizesRoutes);
app.use('/documenttemplates', documentTemplatesRoutes); app.use('/documenttemplates', documentTemplatesRoutes);
app.use('/documentprinters', documentPrintersRoutes); app.use('/documentprinters', documentPrintersRoutes);
app.use('/documentjobs', documentJobsRoutes);
app.use('/notes', noteRoutes); app.use('/notes', noteRoutes);
if (process.env.SCHEDULE_HOUR) { if (process.env.SCHEDULE_HOUR) {

View File

@ -20,6 +20,7 @@ import noteTypeRoutes from './management/notetypes.js';
import documentSizesRoutes from './management/documentsizes.js'; import documentSizesRoutes from './management/documentsizes.js';
import documentTemplatesRoutes from './management/documenttemplates.js'; import documentTemplatesRoutes from './management/documenttemplates.js';
import documentPrintersRoutes from './management/documentprinters.js'; import documentPrintersRoutes from './management/documentprinters.js';
import documentJobsRoutes from './management/documentjobs.js';
import noteRoutes from './misc/notes.js'; import noteRoutes from './misc/notes.js';
export { export {
@ -46,4 +47,5 @@ export {
documentSizesRoutes, documentSizesRoutes,
documentTemplatesRoutes, documentTemplatesRoutes,
documentPrintersRoutes, documentPrintersRoutes,
documentJobsRoutes,
}; };

View File

@ -15,7 +15,7 @@ import {
// list of filament stocks // list of filament stocks
router.get('/', isAuthenticated, (req, res) => { router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['filament', 'state', 'startingWeight', 'currentWeight']; const allowedFilters = ['filament', 'state', 'startingWeight', 'currentWeight', 'filament._id'];
const filter = getFilter(req.query, allowedFilters); const filter = getFilter(req.query, allowedFilters);
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, search, sort, order); listFilamentStocksRouteHandler(req, res, page, limit, property, filter, search, sort, order);
}); });

View File

@ -0,0 +1,46 @@
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
const router = express.Router();
import {
listDocumentJobsRouteHandler,
getDocumentJobRouteHandler,
editDocumentJobRouteHandler,
newDocumentJobRouteHandler,
deleteDocumentJobRouteHandler,
listDocumentJobsByPropertiesRouteHandler,
} from '../../services/management/documentjobs.js';
// list of document jobs
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['name', 'width', 'height'];
const filter = getFilter(req.query, allowedFilters);
listDocumentJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = [];
const filter = getFilter(req.query, allowedFilters, false);
listDocumentJobsByPropertiesRouteHandler(req, res, properties, filter);
});
router.post('/', isAuthenticated, (req, res) => {
newDocumentJobRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentJobRouteHandler(req, res);
});
router.put('/:id', isAuthenticated, async (req, res) => {
editDocumentJobRouteHandler(req, res);
});
router.delete('/:id', isAuthenticated, async (req, res) => {
deleteDocumentJobRouteHandler(req, res);
});
export default router;

View File

@ -15,7 +15,7 @@ import {
// list of document templates // list of document templates
router.get('/', isAuthenticated, (req, res) => { router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['name', 'tags', 'active', 'isGlobal']; const allowedFilters = ['name', 'tags', 'active', 'global', 'objectType'];
const filter = getFilter(req.query, allowedFilters); const filter = getFilter(req.query, allowedFilters);
listDocumentTemplatesRouteHandler(req, res, page, limit, property, filter, search, sort, order); listDocumentTemplatesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
}); });

View File

@ -1,42 +1,50 @@
import express from 'express'; import express from 'express';
import { isAuthenticated } from '../../keycloak.js'; import { isAuthenticated } from '../../keycloak.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
const router = express.Router();
import { import {
listNoteTypesRouteHandler, listNoteTypesRouteHandler,
getNoteTypeRouteHandler, getNoteTypeRouteHandler,
editNoteTypeRouteHandler, editNoteTypeRouteHandler,
newNoteTypeRouteHandler, newNoteTypeRouteHandler,
deleteNoteTypeRouteHandler,
listNoteTypesByPropertiesRouteHandler,
} from '../../services/management/notetypes.js'; } from '../../services/management/notetypes.js';
import { parseFilter } from '../../utils.js';
const router = express.Router(); // list of note types
router.get('/', isAuthenticated, (req, res) => {
// List note types
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, search, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['_id', 'name', 'active', 'color'];
const allowedFilters = ['name', 'active', 'color', '_id']; const filter = getFilter(req.query, allowedFilters);
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listNoteTypesRouteHandler(req, res, page, limit, property, filter, search, sort, order); listNoteTypesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
}); });
// Get single note type router.get('/properties', isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, getNoteTypeRouteHandler); let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['active', 'color'];
const filter = getFilter(req.query, allowedFilters, false);
var masterFilter = {};
if (req.query.masterFilter) {
masterFilter = JSON.parse(req.query.masterFilter);
}
listNoteTypesByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
});
// Edit note type router.post('/', isAuthenticated, (req, res) => {
router.put('/:id', isAuthenticated, editNoteTypeRouteHandler); newNoteTypeRouteHandler(req, res);
});
// Create new note type router.get('/:id', isAuthenticated, (req, res) => {
router.post('/', isAuthenticated, newNoteTypeRouteHandler); getNoteTypeRouteHandler(req, res);
});
router.put('/:id', isAuthenticated, async (req, res) => {
editNoteTypeRouteHandler(req, res);
});
router.delete('/:id', isAuthenticated, async (req, res) => {
deleteNoteTypeRouteHandler(req, res);
});
export default router; export default router;

View File

@ -7,40 +7,39 @@ import {
newNoteRouteHandler, newNoteRouteHandler,
deleteNoteRouteHandler, deleteNoteRouteHandler,
} from '../../services/misc/notes.js'; } from '../../services/misc/notes.js';
import { parseFilter } from '../../utils.js'; import { getFilter } from '../../utils.js';
const router = express.Router(); const router = express.Router();
// List notes // list of notes
router.get('/', isAuthenticated, async (req, res) => { router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['parent._id'];
const allowedFilters = ['parent', 'user._id']; const filter = getFilter(req.query, allowedFilters);
listNotesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const filterObject = parseFilter(key, value);
filter = { ...filter, ...filterObject };
}
}
}
listNotesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
}); });
// Get single note router.get('/properties', isAuthenticated, (req, res) => {
router.get('/:id', isAuthenticated, getNoteRouteHandler); let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['parent'];
const filter = getFilter(req.query, allowedFilters, false);
listNotesByPropertiesRouteHandler(req, res, properties, filter);
});
// Edit note // create new note
router.put('/:id', isAuthenticated, editNoteRouteHandler); router.post('/', isAuthenticated, (req, res) => {
newNoteRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getNoteRouteHandler(req, res);
});
// update note info
router.put('/:id', isAuthenticated, async (req, res) => {
editNoteRouteHandler(req, res);
});
// Delete note // Delete note
router.delete('/:id', isAuthenticated, deleteNoteRouteHandler); router.delete('/:id', isAuthenticated, deleteNoteRouteHandler);
// Create new note
router.post('/', isAuthenticated, newNoteRouteHandler);
export default router; export default router;

View File

@ -15,7 +15,7 @@ import { convertPropertiesString, getFilter } from '../../utils.js';
// list of printers // list of printers
router.get('/', isAuthenticated, (req, res) => { router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['tags']; const allowedFilters = ['tags', 'host._id'];
const filter = getFilter(req.query, allowedFilters); const filter = getFilter(req.query, allowedFilters);
listPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order); listPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
}); });

View File

@ -2,26 +2,30 @@ import express from 'express';
import { isAuthenticated } from '../../keycloak.js'; import { isAuthenticated } from '../../keycloak.js';
const router = express.Router(); const router = express.Router();
import { listSubJobsRouteHandler } from '../../services/production/subjobs.js'; import {
import { parseFilter } from '../../utils.js'; listSubJobsRouteHandler,
listSubJobsByPropertiesRouteHandler,
getSubJobRouteHandler,
} from '../../services/production/subjobs.js';
import { getFilter } from '../../utils.js';
// list of print subjobs // list of sub jobs
router.get('/', isAuthenticated, (req, res) => { router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query; const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['_id', 'job._id']; const allowedFilters = ['_id', 'job._id', 'printer._id'];
const filter = getFilter(req.query, allowedFilters);
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listSubJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order); listSubJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
}); });
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['job'];
const filter = getFilter(req.query, allowedFilters, false);
listSubJobsByPropertiesRouteHandler(req, res, properties, filter);
});
router.get('/:id', isAuthenticated, (req, res) => {
getSubJobRouteHandler(req, res);
});
export default router; export default router;

View File

@ -0,0 +1,42 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const documentJobSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
objectType: { type: String, required: false },
state: {
type: { type: String, required: true, default: 'queued' },
percent: { type: Number, required: false },
},
documentTemplate: {
type: Schema.Types.ObjectId,
ref: 'documentTemplate',
required: true,
},
documentPrinter: {
type: Schema.Types.ObjectId,
ref: 'documentPrinter',
required: true,
},
content: {
type: String,
required: false,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentJobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
documentJobSchema.set('toJSON', { virtuals: true });
export const documentJobModel = mongoose.model('documentJob', documentJobSchema);

View File

@ -17,6 +17,7 @@ const alertSchema = new Schema(
{ {
priority: { type: String, required: true }, // order to show priority: { type: String, required: true }, // order to show
type: { type: String, required: true }, // selectFilament, error, info, message, type: { type: String, required: true }, // selectFilament, error, info, message,
message: { type: String, required: false },
}, },
{ timestamps: true, _id: false } { timestamps: true, _id: false }
); );

View File

@ -0,0 +1,158 @@
import dotenv from 'dotenv';
import { documentJobModel } from '../../schemas/management/documentjob.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
deleteObject,
listObjects,
getObject,
editObject,
newObject,
listObjectsByProperties,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Jobs');
logger.level = process.env.LOG_LEVEL;
export const listDocumentJobsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = '',
filter = {},
search = '',
sort = '',
order = 'ascend'
) => {
const result = await listObjects({
model: documentJobModel,
page,
limit,
property,
filter,
search,
sort,
order,
});
if (result?.error) {
logger.error('Error listing document jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of document jobs (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listDocumentJobsByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {}
) => {
const result = await listObjectsByProperties({
model: documentJobModel,
properties,
filter,
});
if (result?.error) {
logger.error('Error listing document jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of document jobs. Count: ${result.length}`);
res.send(result);
};
export const getDocumentJobRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: documentJobModel,
id,
});
if (result?.error) {
logger.warn(`Document Job not found with supplied id.`);
return res.status(result.code).send(result);
}
logger.debug(`Retreived document job with ID: ${id}`);
res.send(result);
};
export const editDocumentJobRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Document Job with ID: ${id}`);
const updateData = {
updatedAt: new Date(),
name: req.body.name,
width: req.body.width,
height: req.body.height,
};
// Create audit log before updating
const result = await editObject({
model: documentJobModel,
id,
updateData,
user: req.user,
});
if (result.error) {
logger.error('Error editing document job:', result.error);
res.status(result).send(result);
return;
}
logger.debug(`Edited document job with ID: ${id}`);
res.send(result);
};
export const newDocumentJobRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
name: req.body.name,
width: req.body.width,
height: req.body.height,
};
const result = await newObject({
model: documentJobModel,
newData,
user: req.user,
});
if (result.error) {
logger.error('No document job created:', result.error);
return res.status(result.code).send(result);
}
logger.debug(`New document job with ID: ${result._id}`);
res.send(result);
};
export const deleteDocumentJobRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Document Job with ID: ${id}`);
const result = await deleteObject({
model: documentJobModel,
id,
user: req.user,
});
if (result.error) {
logger.error('No document job deleted:', result.error);
return res.status(result.code).send(result);
}
logger.debug(`Deleted document job with ID: ${result._id}`);
res.send(result);
};

View File

@ -2,11 +2,17 @@ import dotenv from 'dotenv';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js'; import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import log4js from 'log4js'; import log4js from 'log4js';
import mongoose from 'mongoose'; import mongoose from 'mongoose';
import { distributeUpdate, newAuditLog, editAuditLog, distributeNew } from '../../utils.js'; import {
deleteObject,
listObjects,
getObject,
editObject,
newObject,
listObjectsByProperties,
} from '../../database/database.js';
dotenv.config(); dotenv.config();
const logger = log4js.getLogger('NoteTypes'); const logger = log4js.getLogger('Note Types');
logger.level = process.env.LOG_LEVEL; logger.level = process.env.LOG_LEVEL;
export const listNoteTypesRouteHandler = async ( export const listNoteTypesRouteHandler = async (
@ -20,135 +26,135 @@ export const listNoteTypesRouteHandler = async (
sort = '', sort = '',
order = 'ascend' order = 'ascend'
) => { ) => {
try { const result = await listObjects({
const skip = (page - 1) * limit; model: noteTypeModel,
let noteTypes; page,
let aggregateCommand = []; limit,
property,
filter,
search,
sort,
order,
});
if (search) { if (result?.error) {
// Add a text search match stage for name and brand fields logger.error('Error listing note types.');
aggregateCommand.push({ res.status(result.code).send(result);
$match: { return;
$text: {
$search: search,
},
},
});
}
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
logger.trace(
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
noteTypes
);
res.send(noteTypes);
} catch (error) {
logger.error('Error listing note types:', error);
res.status(500).send({ error: error });
} }
logger.debug(`List of note types (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listNoteTypesByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {},
masterFilter = {}
) => {
const result = await listObjectsByProperties({
model: noteTypeModel,
properties,
filter,
masterFilter,
});
if (result?.error) {
logger.error('Error listing note types.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of note types. Count: ${result.length}`);
res.send(result);
}; };
export const getNoteTypeRouteHandler = async (req, res) => { export const getNoteTypeRouteHandler = async (req, res) => {
try { const id = req.params.id;
const id = new mongoose.Types.ObjectId(req.params.id); const result = await getObject({
const noteType = await noteTypeModel.findOne({ model: noteTypeModel,
_id: id, id,
}); });
if (result?.error) {
if (!noteType) { logger.warn(`Note Type not found with supplied id.`);
logger.warn(`Note type not found with supplied id.`); return res.status(result.code).send(result);
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
res.send({ ...noteType._doc });
} catch (error) {
logger.error('Error fetching note type:', error);
res.status(500).send({ error: error.message });
} }
logger.debug(`Retreived note type with ID: ${id}`);
res.send(result);
}; };
export const editNoteTypeRouteHandler = async (req, res) => { export const editNoteTypeRouteHandler = async (req, res) => {
try { // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = new mongoose.Types.ObjectId(req.params.id);
const noteType = await noteTypeModel.findOne({ _id: id });
if (!noteType) { logger.trace(`Note Type with ID: ${id}`);
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType); const updateData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
active: req.body.active,
};
// Create audit log before updating
const result = await editObject({
model: noteTypeModel,
id,
updateData,
user: req.user,
});
try { if (result.error) {
const updateData = { logger.error('Error editing note type:', result.error);
updatedAt: new Date(), res.status(result).send(result);
name: req.body.name, return;
color: req.body.color,
active: req.body.active,
};
// Create audit log before updating
await editAuditLog(noteType.toObject(), updateData, id, 'noteType', req.user._id, 'user');
await distributeUpdate(updateData, id, 'noteType');
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error('No note type updated.');
res.status(500).send({ error: 'No note types updated.' });
}
} catch (updateError) {
logger.error('Error updating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send('OK');
} catch (fetchError) {
logger.error('Error fetching note type:', fetchError);
res.status(500).send({ error: fetchError.message });
} }
logger.debug(`Edited note type with ID: ${id}`);
res.send(result);
}; };
export const newNoteTypeRouteHandler = async (req, res) => { export const newNoteTypeRouteHandler = async (req, res) => {
try { const newData = {
let { ...newNoteType } = req.body; updatedAt: new Date(),
newNoteType = { ...newNoteType, createdAt: new Date(), updatedAt: new Date() }; name: req.body.name,
color: req.body.color,
const result = await noteTypeModel.create(newNoteType); active: req.body.active,
if (result.nCreated === 0) { };
logger.error('No note type created.'); const result = await newObject({
res.status(500).send({ error: 'No note type created.' }); model: noteTypeModel,
} newData,
user: req.user,
// Create audit log for new note type });
await newAuditLog(newNoteType, result._id, 'noteType', req.user); if (result.error) {
await distributeNew(result._id, 'filament'); logger.error('No note type created:', result.error);
return res.status(result.code).send(result);
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error('Error creating note type:', updateError);
res.status(500).send({ error: updateError.message });
} }
logger.debug(`New note type with ID: ${result._id}`);
res.send(result);
};
export const deleteNoteTypeRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Note Type with ID: ${id}`);
const result = await deleteObject({
model: noteTypeModel,
id,
user: req.user,
});
if (result.error) {
logger.error('No note type deleted:', result.error);
return res.status(result.code).send(result);
}
logger.debug(`Deleted note type with ID: ${result._id}`);
res.send(result);
}; };

View File

@ -1,14 +1,16 @@
import dotenv from 'dotenv'; import dotenv from 'dotenv';
import { noteModel } from '../../schemas/misc/note.schema.js'; import { noteModel } from '../../schemas/misc/note.schema.js';
import log4js from 'log4js'; import log4js from 'log4js';
import mongoose from 'mongoose';
import { import {
deleteAuditLog, deleteObject,
editAuditLog, editObject,
expandObjectIds, getObject,
flatternObjectIds, listObjects,
newAuditLog, listObjectsByProperties,
} from '../../utils.js'; newObject,
recursivelyDeleteChildObjects,
} from '../../database/database.js';
import mongoose from 'mongoose';
dotenv.config(); dotenv.config();
@ -21,197 +23,141 @@ export const listNotesRouteHandler = async (
page = 1, page = 1,
limit = 25, limit = 25,
property = '', property = '',
filter = {} filter = {},
search = '',
sort = '',
order = 'ascend'
) => { ) => {
try { const result = await listObjects({
const skip = (page - 1) * limit; model: noteModel,
let notes; page,
let aggregateCommand = []; limit,
property,
filter,
search,
sort,
order,
populate: ['noteType', 'user'],
});
if (Object.keys(filter).length > 0) { if (result?.error) {
aggregateCommand.push({ $match: filter }); logger.error('Error listing notes.');
} res.status(result.code).send(result);
return;
aggregateCommand.push({
$lookup: {
from: 'users', // The collection name (usually lowercase plural)
localField: 'user', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'user', // The output field name
},
});
aggregateCommand.push({ $unwind: '$user' });
aggregateCommand.push({
$lookup: {
from: 'notetypes', // The collection name (usually lowercase plural)
localField: 'noteType', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'noteType', // The output field name
},
});
aggregateCommand.push({ $unwind: '$noteType' });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
'noteType._id': 1,
'noteType.name': 1,
'noteType.color': 1,
'user._id': 1,
'user.name': 1,
content: 1,
parent: 1,
},
});
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
notes = await noteModel.aggregate(aggregateCommand);
logger.trace(`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`, notes);
res.send(notes);
} catch (error) {
logger.error('Error listing notes:', error);
res.status(500).send({ error: error });
} }
logger.debug(`List of notes (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listNotesByPropertiesRouteHandler = async (req, res, properties = '', filter = {}) => {
const result = await listObjectsByProperties({
model: noteModel,
properties,
filter,
populate: ['noteType', 'user'],
});
if (result?.error) {
logger.error('Error listing notes.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of notes. Count: ${result.length}`);
res.send(result);
}; };
export const getNoteRouteHandler = async (req, res) => { export const getNoteRouteHandler = async (req, res) => {
try { const id = req.params.id;
const id = new mongoose.Types.ObjectId(req.params.id); const result = await getObject({
const note = await noteModel.findOne({ model: noteModel,
_id: id, id,
}); populate: ['noteType', 'user'],
});
if (!note) { if (result?.error) {
logger.warn(`Note not found with supplied id.`); logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: 'Note not found.' }); return res.status(result.code).send(result);
}
logger.trace(`Note with ID: ${id}:`, note);
res.send({ ...note._doc });
} catch (error) {
logger.error('Error fetching note:', error);
res.status(500).send({ error: error.message });
} }
logger.debug(`Retreived note with ID: ${id}`);
res.send(result);
}; };
export const editNoteRouteHandler = async (req, res) => { export const editNoteRouteHandler = async (req, res) => {
try { // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({ _id: id });
if (!note) { logger.trace(`Note with ID: ${id}`);
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note); const updateData = {
updatedAt: new Date(),
content: req.body.content,
noteType: req.body.noteType,
};
// Create audit log before updating
const result = await editObject({
model: noteModel,
id,
updateData,
user: req.user,
});
try { if (result.error) {
const updateData = { logger.error('Error editing note:', result.error);
updatedAt: new Date(), res.status(result).send(result);
name: req.body.name, return;
color: req.body.color,
isActive: req.body.isActive,
};
const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error('No note updated.');
res.status(500).send({ error: 'No notes updated.' });
}
await editAuditLog(note.toObject(), updateData, id, 'note', req.user);
} catch (updateError) {
logger.error('Error updating note:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send('OK');
} catch (fetchError) {
logger.error('Error fetching note:', fetchError);
res.status(500).send({ error: fetchError.message });
} }
logger.debug(`Edited note with ID: ${id}`);
res.send(result);
}; };
export const newNoteRouteHandler = async (req, res) => { export const newNoteRouteHandler = async (req, res) => {
try { const newData = {
let { ...newNote } = req.body; updatedAt: new Date(),
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user }; content: req.body.content,
noteType: req.body.noteType,
const result = await noteModel.create(flatternObjectIds(newNote)); parent: req.body.parent,
if (result.nCreated === 0) { parentType: req.body.parentType,
logger.error('No note created.'); user: req.user,
res.status(500).send({ error: 'No note created.' }); };
} const result = await newObject({
model: noteModel,
await newAuditLog(expandObjectIds(newNote), result._id, 'note', req.user); newData,
user: req.user,
res.status(200).send({ status: 'ok' }); });
} catch (updateError) { if (result.error) {
logger.error('Error creating note:', updateError); logger.error('No note created:', result.error);
res.status(500).send({ error: updateError.message }); return res.status(result.code).send(result);
} }
logger.debug(`New note with ID: ${result._id}`);
res.send(result);
}; };
export const deleteNoteRouteHandler = async (req, res) => { export const deleteNoteRouteHandler = async (req, res) => {
try { // Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id); const id = req.params.id;
const note = await noteModel.findOne({ _id: id });
if (!note) { logger.trace(`Delete note with ID: ${id}`);
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: 'Note not found.' });
}
// Check if the current user owns this note // Recursively find and delete all child notes
if (note.user.toString() !== req.user._id.toString()) { const result = await recursivelyDeleteChildObjects({ model: noteModel, id, user: req.user });
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
return res.status(403).send({ error: 'You can only delete your own notes.' });
}
logger.trace(`Deleting note with ID: ${id} and all its children`); if (result?.length <= 0) {
logger.error('No notes deleted');
// Recursively find and delete all child notes return res.status(404).send(result);
const deletedNoteIds = await recursivelyDeleteNotes(id, req.user);
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
res.send({
status: 'ok',
deletedNoteIds: deletedNoteIds,
message: `Deleted ${deletedNoteIds.length} notes`,
});
} catch (error) {
logger.error('Error deleting note:', error);
res.status(500).send({ error: error.message });
}
};
// Helper function to recursively delete notes and their children
const recursivelyDeleteNotes = async (noteId, user) => {
const deletedIds = [];
// Find all notes that have this note as their parent
const childNotes = await noteModel.find({ parent: noteId });
// Recursively delete all children first
for (const childNote of childNotes) {
const childDeletedIds = await recursivelyDeleteNotes(childNote._id, user);
deletedIds.push(...childDeletedIds);
} }
// Delete the current note if (result?.error) {
logger.error('No note deleted:', result.error);
return res.status(result.code).send(result);
}
const note = await noteModel.findOne({ _id: noteId }).populate('user').populate('parent'); logger.info(`Successfully deleted note ${id} and ${result.length - 1} child notes`);
res.send({
await deleteAuditLog(expandObjectIds(note.toObject()), noteId, 'note', user); status: 'ok',
});
await noteModel.deleteOne({ _id: noteId });
deletedIds.push(noteId);
return deletedIds;
}; };

View File

@ -1,9 +1,10 @@
import dotenv from 'dotenv'; import dotenv from 'dotenv';
import { subJobModel } from '../../schemas/production/subjob.schema.js'; import { subJobModel } from '../../schemas/production/subjob.schema.js';
import log4js from 'log4js'; import log4js from 'log4js';
import { getObject, listObjects, listObjectsByProperties } from '../../database/database.js';
dotenv.config(); dotenv.config();
const logger = log4js.getLogger('SubJobs'); const logger = log4js.getLogger('Sub Jobs');
logger.level = process.env.LOG_LEVEL; logger.level = process.env.LOG_LEVEL;
export const listSubJobsRouteHandler = async ( export const listSubJobsRouteHandler = async (
@ -17,102 +18,62 @@ export const listSubJobsRouteHandler = async (
sort = '', sort = '',
order = 'ascend' order = 'ascend'
) => { ) => {
try { const result = await listObjects({
// Calculate the skip value based on the page number and limit model: subJobModel,
const skip = (page - 1) * limit; page,
limit,
property,
filter,
search,
sort,
order,
populate: ['printer'],
});
let subJobs; if (result?.error) {
let aggregateCommand = []; logger.error('Error listing sub jobs.');
res.status(result.code).send(result);
if (search) { return;
// Add a text search match stage for name and other searchable fields
aggregateCommand.push({
$match: {
$text: {
$search: search,
},
},
});
}
// Lookup printer
aggregateCommand.push({
$lookup: {
from: 'printers', // The name of the Printer collection
localField: 'printer',
foreignField: '_id',
as: 'printer',
},
});
// Lookup job
aggregateCommand.push({
$lookup: {
from: 'jobs', // The name of the Printer collection
localField: 'job',
foreignField: '_id',
as: 'job',
},
});
aggregateCommand.push({
$unwind: {
path: '$printer',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
aggregateCommand.push({
$unwind: {
path: '$job',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$project: {
state: 1,
_id: 1,
createdAt: 1,
startedAt: 1,
'printer._id': 1,
'job._id': 1,
'printer.name': 1,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
} else {
// Default sorting by createdAt descending
aggregateCommand.push({ $sort: { createdAt: -1 } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
subJobs = await subJobModel.aggregate(aggregateCommand);
logger.trace(
`List of print subJobs (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
subJobs
);
res.send(subJobs);
} catch (error) {
logger.error('Error listing print subJobs:', error);
res.status(500).send({ error: error });
} }
logger.debug(`List of sub jobs (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listSubJobsByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {},
masterFilter = {}
) => {
const result = await listObjectsByProperties({
model: subJobModel,
properties,
filter,
masterFilter,
});
if (result?.error) {
logger.error('Error listing sub jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of sub jobs. Count: ${result.length}`);
res.send(result);
};
export const getSubJobRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: subJobModel,
id,
});
if (result?.error) {
logger.warn(`Sub job not found with supplied id.`);
return res.status(result.code).send(result);
}
logger.debug(`Retreived sub job with ID: ${id}`);
res.send(result);
}; };

View File

@ -1,6 +1,7 @@
import { ObjectId } from 'mongodb'; import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js'; import { auditLogModel } from './schemas/management/auditlog.schema.js';
import { etcdServer } from './database/etcd.js'; import { etcdServer } from './database/etcd.js';
import { natsServer } from './database/nats.js';
function parseFilter(property, value) { function parseFilter(property, value) {
if (typeof value === 'string') { if (typeof value === 'string') {
@ -383,11 +384,15 @@ async function getAuditLogs(idOrIds) {
} }
async function distributeUpdate(value, id, type) { async function distributeUpdate(value, id, type) {
await etcdServer.setKey(`/${type}s/${id}/object`, value); await natsServer.publish(`${type}s.${id}.object`, value);
} }
async function distributeNew(id, type) { async function distributeNew(value, type) {
await etcdServer.setKey(`/${type}s/new`, id); await natsServer.publish(`${type}s.new`, value);
}
async function distributeDelete(value, type) {
await natsServer.publish(`${type}s.delete`, value);
} }
function flatternObjectIds(object) { function flatternObjectIds(object) {
@ -487,6 +492,7 @@ export {
expandObjectIds, expandObjectIds,
distributeUpdate, distributeUpdate,
distributeNew, distributeNew,
distributeDelete,
getFilter, // <-- add here getFilter, // <-- add here
convertPropertiesString, convertPropertiesString,
}; };