Compare commits

..

No commits in common. "97773c2ab283bbd325da12092f19e3d135cc7ea4" and "8d497de203d504aae94f372675b5649ce481231c" have entirely different histories.

21 changed files with 492 additions and 1081 deletions

52
package-lock.json generated
View File

@ -9,7 +9,6 @@
"version": "1.0.0",
"license": "ISC",
"dependencies": {
"@nats-io/transport-node": "^3.1.0",
"axios": "^1.11.0",
"bcrypt": "^6.0.0",
"body-parser": "^2.2.0",
@ -2925,51 +2924,6 @@
"sparse-bitfield": "^3.0.3"
}
},
"node_modules/@nats-io/nats-core": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@nats-io/nats-core/-/nats-core-3.1.0.tgz",
"integrity": "sha512-xsSkLEGGcqNF+Ru8dMjPmKtfbBeq/U4meuJJX4Zi+5TBHpjpjNjs4YkCBC/pGYWnEum1/vdNPizjE1RdNHCyBg==",
"license": "Apache-2.0",
"dependencies": {
"@nats-io/nkeys": "2.0.3",
"@nats-io/nuid": "2.0.3"
}
},
"node_modules/@nats-io/nkeys": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@nats-io/nkeys/-/nkeys-2.0.3.tgz",
"integrity": "sha512-JVt56GuE6Z89KUkI4TXUbSI9fmIfAmk6PMPknijmuL72GcD+UgIomTcRWiNvvJKxA01sBbmIPStqJs5cMRBC3A==",
"license": "Apache-2.0",
"dependencies": {
"tweetnacl": "^1.0.3"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@nats-io/nuid": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/@nats-io/nuid/-/nuid-2.0.3.tgz",
"integrity": "sha512-TpA3HEBna/qMVudy+3HZr5M3mo/L1JPofpVT4t0HkFGkz2Cn9wrlrQC8tvR8Md5Oa9//GtGG26eN0qEWF5Vqew==",
"license": "Apache-2.0",
"engines": {
"node": ">= 18.x"
}
},
"node_modules/@nats-io/transport-node": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@nats-io/transport-node/-/transport-node-3.1.0.tgz",
"integrity": "sha512-k5pH7IOKUetwXOMraVgcB5zG0wibcHOwJJuyuY1/5Q4K0XfBJDnb/IbczP5/JJWwMYfxSL9O+46ojtdBHvHRSw==",
"license": "Apache-2.0",
"dependencies": {
"@nats-io/nats-core": "3.1.0",
"@nats-io/nkeys": "2.0.3",
"@nats-io/nuid": "2.0.3"
},
"engines": {
"node": ">= 18.0.0"
}
},
"node_modules/@nicolo-ribaudo/chokidar-2": {
"version": "2.1.8-no-fsevents.3",
"resolved": "https://registry.npmjs.org/@nicolo-ribaudo/chokidar-2/-/chokidar-2-2.1.8-no-fsevents.3.tgz",
@ -11239,12 +11193,6 @@
"license": "0BSD",
"optional": true
},
"node_modules/tweetnacl": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz",
"integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==",
"license": "Unlicense"
},
"node_modules/type-check": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",

View File

@ -4,7 +4,6 @@
"description": "",
"main": "index.js",
"dependencies": {
"@nats-io/transport-node": "^3.1.0",
"axios": "^1.11.0",
"bcrypt": "^6.0.0",
"body-parser": "^2.2.0",

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { deleteAuditLog, distributeDelete, expandObjectIds } from '../utils.js';
import { deleteAuditLog, expandObjectIds } from '../utils.js';
import log4js from 'log4js';
import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js';
@ -38,15 +38,17 @@ export const listObjects = async ({
if (!sort || sort === '') {
sort = 'createdAt';
}
// Translate any key ending with ._id to remove the ._id suffix for Mongoose
Object.keys(filter).forEach((key) => {
if (key.endsWith('._id')) {
const baseKey = key.slice(0, -4); // Remove '._id' suffix
filter[baseKey] = filter[key];
delete filter[key];
// Translate parent._id to parent for Mongoose
if (filter['parent._id']) {
filter.parent = filter['parent._id'];
delete filter['parent._id'];
}
// Translate owner._id to owner for Mongoose
if (filter['owner._id']) {
filter.owner = filter['owner._id'];
delete filter['owner._id'];
}
});
// Use find with population and filter
let query = model
@ -278,7 +280,7 @@ export const getObject = async ({ model, id, populate }) => {
return { error: 'Object not found.', code: 404 };
}
return expandObjectIds(result);
return result;
} catch (error) {
return { error: error, code: 500 };
}
@ -327,7 +329,7 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
};
// Reusable function to create a new object
export const newObject = async ({ model, newData, user = null }, distributeChanges = true) => {
export const newObject = async ({ model, newData, user = null }) => {
try {
const parentType = model.modelName ? model.modelName : 'unknown';
@ -335,12 +337,10 @@ export const newObject = async ({ model, newData, user = null }, distributeChang
if (!result || result.length === 0) {
return { error: 'No object created.', code: 500 };
}
const created = expandObjectIds(result.toObject());
const created = result;
await newAuditLog(newData, created._id, parentType, user);
if (distributeChanges == true) {
await distributeNew(created, parentType);
}
await distributeNew(created._id, parentType);
return created;
} catch (error) {
@ -350,7 +350,7 @@ export const newObject = async ({ model, newData, user = null }, distributeChang
};
// Reusable function to delete an object by ID, with audit logging and distribution
export const deleteObject = async ({ model, id, user = null }, distributeChanges = true) => {
export const deleteObject = async ({ model, id, user = null }) => {
try {
const parentType = model.modelName ? model.modelName : 'unknown';
// Delete the object
@ -359,46 +359,14 @@ export const deleteObject = async ({ model, id, user = null }, distributeChanges
if (!result) {
return { error: `${parentType} not found.`, code: 404 };
}
const deleted = expandObjectIds(result.toObject());
// Audit log the deletion
await deleteAuditLog(deleted, id, parentType, user, 'delete');
await deleteAuditLog(result, id, parentType, user, 'delete');
// Distribute the deletion event
await distributeUpdate({ deleted: true }, id, parentType);
if (distributeChanges == true) {
await distributeDelete(deleted, parentType);
}
return { deleted: true, object: deleted };
return { deleted: true, id };
} catch (error) {
logger.error('deleteObject error:', error);
return { error: error.message, code: 500 };
}
};
// Helper function to recursively delete objects and their children
export const recursivelyDeleteChildObjects = async (
{ model, id, user = null },
distributeChanges = true
) => {
const deletedIds = [];
// Find all objects that have this object as their parent
const childObjects = await model.find({ parent: id });
// Recursively delete all children first
for (const childObject of childObjects) {
const childDeletedIds = await recursivelyDeleteChildObjects(
{ model, id: childObject._id, user },
false
);
deletedIds.push(...childDeletedIds);
}
// Delete the current object
await deleteObject({ model, id, user }, distributeChanges);
deletedIds.push(id);
return deletedIds;
};

View File

@ -16,13 +16,13 @@ class EtcdServer {
this.client = null;
this.watchers = new Map();
this.hosts = [`${ETCD_HOST}:${ETCD_PORT}`];
logger.trace(`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`);
logger.debug(`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`);
}
async connect() {
if (!this.client) {
logger.info('Connecting to Etcd...');
logger.trace(`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`);
logger.debug(`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`);
this.client = new Etcd3({
hosts: this.hosts,
});
@ -30,16 +30,16 @@ class EtcdServer {
// Test connection
try {
await this.client.get('test-connection').string();
logger.trace('Etcd client connected successfully.');
logger.debug('Etcd client connected successfully.');
} catch (error) {
if (error.code === 'NOT_FOUND') {
logger.trace('Etcd client connected successfully (test key not found as expected).');
logger.debug('Etcd client connected successfully (test key not found as expected).');
} else {
throw error;
}
}
} else {
logger.trace('Etcd client already exists, skipping connection.');
logger.debug('Etcd client already exists, skipping connection.');
}
return this.client;
}
@ -47,7 +47,7 @@ class EtcdServer {
async getClient() {
logger.trace('Checking if Etcd client exists.');
if (!this.client) {
logger.trace('No client found, calling connect().');
logger.debug('No client found, calling connect().');
await this.connect();
}
logger.trace('Returning Etcd client.');
@ -60,7 +60,7 @@ class EtcdServer {
const stringValue = typeof value === 'string' ? value : JSON.stringify(value);
await client.put(key).value(stringValue);
logger.trace(`Set key: ${key}, value: ${stringValue}`);
logger.debug(`Set key: ${key}, value: ${stringValue}`);
return true;
}
@ -69,7 +69,7 @@ class EtcdServer {
try {
const value = await client.get(key).string();
logger.trace(`Retrieved key: ${key}, value: ${value}`);
logger.debug(`Retrieved key: ${key}, value: ${value}`);
// Try to parse as JSON, fallback to string
try {
@ -79,7 +79,7 @@ class EtcdServer {
}
} catch (error) {
if (error.code === 'NOT_FOUND') {
logger.trace(`Key not found: ${key}`);
logger.debug(`Key not found: ${key}`);
return null;
}
throw error;
@ -91,7 +91,7 @@ class EtcdServer {
// Stop all watchers
for (const [key, watcher] of this.watchers) {
logger.trace(`Stopping watcher: ${key}`);
logger.debug(`Stopping watcher: ${key}`);
watcher.removeAllListeners();
await watcher.close();
}

View File

@ -1,313 +0,0 @@
import { connect } from '@nats-io/transport-node';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const NATS_HOST = process.env.NATS_HOST || 'localhost';
const NATS_PORT = process.env.NATS_PORT || 4222;
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
const logger = log4js.getLogger('Nats');
logger.level = LOG_LEVEL;
class NatsServer {
constructor() {
this.client = null;
this.subscriptions = new Map(); // subject → { subscription, callbacks }
this.requestHandlers = new Map(); // subject → { handler, callbacks }
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
this.servers = [`nats://${NATS_HOST}:${NATS_PORT}`];
this.textEncoder = new TextEncoder();
this.textDecoder = new TextDecoder();
logger.trace(`NatsServer: servers set to ${JSON.stringify(this.servers)}`);
}
async connect() {
if (!this.client) {
logger.info('Connecting to NATS...');
logger.trace(`Creating NATS client with servers ${JSON.stringify(this.servers)}`);
try {
this.client = await connect({
servers: this.servers,
reconnect: true,
maxReconnectAttempts: -1, // unlimited reconnects
reconnectTimeWait: 1000,
timeout: 20000,
});
// Test connection by checking if client is connected
try {
if (this.client.isClosed()) {
throw new Error('NATS client connection failed');
}
logger.trace('NATS client connected successfully.');
} catch (error) {
throw error;
}
} catch (error) {
logger.error('Failed to connect to NATS:', error);
throw error;
}
} else {
logger.trace('NATS client already exists, skipping connection.');
}
return this.client;
}
async getClient() {
if (!this.client) {
logger.trace('No client found, calling connect().');
await this.connect();
}
return this.client;
}
async publish(subject, data) {
const client = await this.getClient();
const payload = typeof data === 'string' ? data : JSON.stringify(data);
try {
client.publish(subject, this.textEncoder.encode(payload));
logger.trace(`Published to subject: ${subject}, data: ${payload}`);
return { success: true };
} catch (error) {
logger.error(`Failed to publish to subject ${subject}:`, error);
throw error;
}
}
async request(subject, data, timeout = 30000) {
const client = await this.getClient();
const payload = typeof data === 'string' ? data : JSON.stringify(data);
try {
const response = await client.request(subject, this.textEncoder.encode(payload), {
timeout: timeout,
});
const responseData = this.textDecoder.decode(response.data);
logger.trace(`Request to subject: ${subject}, response: ${responseData}`);
// Try to parse as JSON, fallback to string
try {
return JSON.parse(responseData);
} catch {
return responseData;
}
} catch (error) {
if (error.code === 'TIMEOUT') {
logger.trace(`Request timeout for subject: ${subject}`);
return null;
}
throw error;
}
}
async subscribe(subject, owner, callback) {
const client = await this.getClient();
const subscriptionKey = subject;
if (this.subscriptions.has(subscriptionKey)) {
this.subscriptions.get(subscriptionKey).callbacks.set(owner, callback);
logger.trace(`Added subscription callback for owner=${owner} on subject=${subject}`);
return { success: true };
}
logger.trace(`Creating new subscription for subject: ${subject}`);
const subscription = client.subscribe(subject);
const callbacks = new Map();
callbacks.set(owner, callback);
(async () => {
for await (const msg of subscription) {
logger.trace(`Message received on subject: ${subject}`);
const data = this.textDecoder.decode(msg.data);
let parsedData;
try {
parsedData = JSON.parse(data);
} catch {
parsedData = data;
}
for (const [ownerId, cb] of callbacks) {
try {
cb(subject, parsedData, msg);
} catch (err) {
logger.error(
`Error in subscription callback for owner=${ownerId}, subject=${subject}:`,
err
);
}
}
}
})().catch((err) => {
logger.error(`Subscription error for subject ${subject}:`, err);
});
this.subscriptions.set(subscriptionKey, { subscription, callbacks });
return { success: true };
}
async setRequestHandler(subject, owner, handler) {
const client = await this.getClient();
const handlerKey = subject;
if (this.requestHandlers.has(handlerKey)) {
this.requestHandlers.get(handlerKey).callbacks.set(owner, handler);
logger.trace(`Added request handler for owner=${owner} on subject=${subject}`);
return { success: true };
}
logger.trace(`Creating new request handler for subject: ${subject}`);
const subscription = client.subscribe(subject);
const callbacks = new Map();
callbacks.set(owner, handler);
(async () => {
for await (const msg of subscription) {
logger.trace(`Request received on subject: ${subject}`);
const data = this.textDecoder.decode(msg.data);
let parsedData;
try {
parsedData = JSON.parse(data);
} catch {
parsedData = data;
}
for (const [ownerId, cb] of callbacks) {
try {
const response = await cb(subject, parsedData, msg);
const responsePayload =
typeof response === 'string' ? response : JSON.stringify(response);
msg.respond(this.textEncoder.encode(responsePayload));
} catch (err) {
logger.error(`Error in request handler for owner=${ownerId}, subject=${subject}:`, err);
// Send error response
msg.respond(this.textEncoder.encode(JSON.stringify({ error: err.message })));
}
}
}
})().catch((err) => {
logger.error(`Request handler error for subject ${subject}:`, err);
});
this.requestHandlers.set(handlerKey, { subscription, callbacks });
return { success: true };
}
async removeSubscription(subject, owner) {
const entry = this.subscriptions.get(subject);
if (!entry) {
logger.trace(`Subscription not found for subject: ${subject}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(`Removed subscription callback for owner: ${owner} on subject: ${subject}`);
} else {
logger.trace(`No subscription callback found for owner: ${owner} on subject: ${subject}`);
}
if (entry.callbacks.size === 0) {
logger.trace(`No callbacks left, stopping subscription for ${subject}`);
entry.subscription.unsubscribe();
this.subscriptions.delete(subject);
}
return true;
}
async removeQueuedSubscription(subject, queue, owner) {
const subscriptionKey = `${subject}:${queue}`;
const entry = this.queuedSubscriptions.get(subscriptionKey);
if (!entry) {
logger.trace(`Queued subscription not found for subject: ${subject}, queue: ${queue}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(
`Removed queued subscription callback for owner: ${owner} on subject: ${subject}, queue: ${queue}`
);
} else {
logger.trace(
`No queued subscription callback found for owner: ${owner} on subject: ${subject}, queue: ${queue}`
);
}
if (entry.callbacks.size === 0) {
logger.trace(
`No callbacks left, stopping queued subscription for ${subject}, queue: ${queue}`
);
entry.subscription.unsubscribe();
this.queuedSubscriptions.delete(subscriptionKey);
}
return true;
}
async removeRequestHandler(subject, owner) {
const entry = this.requestHandlers.get(subject);
if (!entry) {
logger.trace(`Request handler not found for subject: ${subject}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(`Removed request handler for owner: ${owner} on subject: ${subject}`);
} else {
logger.trace(`No request handler found for owner: ${owner} on subject: ${subject}`);
}
if (entry.callbacks.size === 0) {
logger.trace(`No handlers left, stopping request handler for ${subject}`);
entry.subscription.unsubscribe();
this.requestHandlers.delete(subject);
}
return true;
}
async disconnect() {
logger.info('Disconnecting from NATS...');
// Stop all subscriptions
for (const [subject, entry] of this.subscriptions) {
logger.trace(`Stopping subscription: ${subject}`);
entry.subscription.unsubscribe();
}
this.subscriptions.clear();
// Stop all queued subscriptions
for (const [key, entry] of this.queuedSubscriptions) {
logger.trace(`Stopping queued subscription: ${key}`);
entry.subscription.unsubscribe();
}
this.queuedSubscriptions.clear();
// Stop all request handlers
for (const [subject, entry] of this.requestHandlers) {
logger.trace(`Stopping request handler: ${subject}`);
entry.subscription.unsubscribe();
}
this.requestHandlers.clear();
if (this.client) {
await this.client.close();
this.client = null;
logger.info('Disconnected from NATS');
}
}
}
const natsServer = new NatsServer();
export { NatsServer, natsServer };

View File

@ -28,7 +28,6 @@ import {
documentSizesRoutes,
documentTemplatesRoutes,
documentPrintersRoutes,
documentJobsRoutes,
} from './routes/index.js';
import path from 'path';
import * as fs from 'fs';
@ -37,7 +36,6 @@ import ReseedAction from './database/ReseedAction.js';
import log4js from 'log4js';
import { etcdServer } from './database/etcd.js';
import { populateUserMiddleware } from './services/misc/auth.js';
import { natsServer } from './database/nats.js';
dotenv.config();
@ -72,15 +70,6 @@ try {
throw err;
}
// Connect to NATS (await)
try {
natsServer.connect();
logger.info('Connected to NATS');
} catch (err) {
logger.error('Failed to connect to NATS:', err);
throw err;
}
app.use(cors(corsOptions));
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
app.use(express.json());
@ -115,7 +104,6 @@ app.use('/notetypes', noteTypeRoutes);
app.use('/documentsizes', documentSizesRoutes);
app.use('/documenttemplates', documentTemplatesRoutes);
app.use('/documentprinters', documentPrintersRoutes);
app.use('/documentjobs', documentJobsRoutes);
app.use('/notes', noteRoutes);
if (process.env.SCHEDULE_HOUR) {

View File

@ -20,7 +20,6 @@ import noteTypeRoutes from './management/notetypes.js';
import documentSizesRoutes from './management/documentsizes.js';
import documentTemplatesRoutes from './management/documenttemplates.js';
import documentPrintersRoutes from './management/documentprinters.js';
import documentJobsRoutes from './management/documentjobs.js';
import noteRoutes from './misc/notes.js';
export {
@ -47,5 +46,4 @@ export {
documentSizesRoutes,
documentTemplatesRoutes,
documentPrintersRoutes,
documentJobsRoutes,
};

View File

@ -15,7 +15,7 @@ import {
// list of filament stocks
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['filament', 'state', 'startingWeight', 'currentWeight', 'filament._id'];
const allowedFilters = ['filament', 'state', 'startingWeight', 'currentWeight'];
const filter = getFilter(req.query, allowedFilters);
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});

View File

@ -1,46 +0,0 @@
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
const router = express.Router();
import {
listDocumentJobsRouteHandler,
getDocumentJobRouteHandler,
editDocumentJobRouteHandler,
newDocumentJobRouteHandler,
deleteDocumentJobRouteHandler,
listDocumentJobsByPropertiesRouteHandler,
} from '../../services/management/documentjobs.js';
// list of document jobs
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['name', 'width', 'height'];
const filter = getFilter(req.query, allowedFilters);
listDocumentJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = [];
const filter = getFilter(req.query, allowedFilters, false);
listDocumentJobsByPropertiesRouteHandler(req, res, properties, filter);
});
router.post('/', isAuthenticated, (req, res) => {
newDocumentJobRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentJobRouteHandler(req, res);
});
router.put('/:id', isAuthenticated, async (req, res) => {
editDocumentJobRouteHandler(req, res);
});
router.delete('/:id', isAuthenticated, async (req, res) => {
deleteDocumentJobRouteHandler(req, res);
});
export default router;

View File

@ -15,7 +15,7 @@ import {
// list of document templates
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['name', 'tags', 'active', 'global', 'objectType'];
const allowedFilters = ['name', 'tags', 'active', 'isGlobal'];
const filter = getFilter(req.query, allowedFilters);
listDocumentTemplatesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});

View File

@ -1,50 +1,42 @@
import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
const router = express.Router();
import {
listNoteTypesRouteHandler,
getNoteTypeRouteHandler,
editNoteTypeRouteHandler,
newNoteTypeRouteHandler,
deleteNoteTypeRouteHandler,
listNoteTypesByPropertiesRouteHandler,
} from '../../services/management/notetypes.js';
import { parseFilter } from '../../utils.js';
// list of note types
router.get('/', isAuthenticated, (req, res) => {
const router = express.Router();
// List note types
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['_id', 'name', 'active', 'color'];
const filter = getFilter(req.query, allowedFilters);
const allowedFilters = ['name', 'active', 'color', '_id'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listNoteTypesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['active', 'color'];
const filter = getFilter(req.query, allowedFilters, false);
var masterFilter = {};
if (req.query.masterFilter) {
masterFilter = JSON.parse(req.query.masterFilter);
}
listNoteTypesByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
});
// Get single note type
router.get('/:id', isAuthenticated, getNoteTypeRouteHandler);
router.post('/', isAuthenticated, (req, res) => {
newNoteTypeRouteHandler(req, res);
});
// Edit note type
router.put('/:id', isAuthenticated, editNoteTypeRouteHandler);
router.get('/:id', isAuthenticated, (req, res) => {
getNoteTypeRouteHandler(req, res);
});
router.put('/:id', isAuthenticated, async (req, res) => {
editNoteTypeRouteHandler(req, res);
});
router.delete('/:id', isAuthenticated, async (req, res) => {
deleteNoteTypeRouteHandler(req, res);
});
// Create new note type
router.post('/', isAuthenticated, newNoteTypeRouteHandler);
export default router;

View File

@ -7,39 +7,40 @@ import {
newNoteRouteHandler,
deleteNoteRouteHandler,
} from '../../services/misc/notes.js';
import { getFilter } from '../../utils.js';
import { parseFilter } from '../../utils.js';
const router = express.Router();
// list of notes
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['parent._id'];
const filter = getFilter(req.query, allowedFilters);
listNotesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
// List notes
router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
const allowedFilters = ['parent', 'user._id'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const filterObject = parseFilter(key, value);
filter = { ...filter, ...filterObject };
}
}
}
listNotesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['parent'];
const filter = getFilter(req.query, allowedFilters, false);
listNotesByPropertiesRouteHandler(req, res, properties, filter);
});
// Get single note
router.get('/:id', isAuthenticated, getNoteRouteHandler);
// create new note
router.post('/', isAuthenticated, (req, res) => {
newNoteRouteHandler(req, res);
});
// Edit note
router.put('/:id', isAuthenticated, editNoteRouteHandler);
router.get('/:id', isAuthenticated, (req, res) => {
getNoteRouteHandler(req, res);
});
// update note info
router.put('/:id', isAuthenticated, async (req, res) => {
editNoteRouteHandler(req, res);
});
// Delete note
router.delete('/:id', isAuthenticated, deleteNoteRouteHandler);
// Create new note
router.post('/', isAuthenticated, newNoteRouteHandler);
export default router;

View File

@ -15,7 +15,7 @@ import { convertPropertiesString, getFilter } from '../../utils.js';
// list of printers
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['tags', 'host._id'];
const allowedFilters = ['tags'];
const filter = getFilter(req.query, allowedFilters);
listPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});

View File

@ -2,30 +2,26 @@ import express from 'express';
import { isAuthenticated } from '../../keycloak.js';
const router = express.Router();
import {
listSubJobsRouteHandler,
listSubJobsByPropertiesRouteHandler,
getSubJobRouteHandler,
} from '../../services/production/subjobs.js';
import { getFilter } from '../../utils.js';
import { listSubJobsRouteHandler } from '../../services/production/subjobs.js';
import { parseFilter } from '../../utils.js';
// list of sub jobs
// list of print subjobs
router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = ['_id', 'job._id', 'printer._id'];
const filter = getFilter(req.query, allowedFilters);
const allowedFilters = ['_id', 'job._id'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const parsedFilter = parseFilter(key, value);
filter = { ...filter, ...parsedFilter };
}
}
}
listSubJobsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
});
router.get('/properties', isAuthenticated, (req, res) => {
let properties = convertPropertiesString(req.query.properties);
const allowedFilters = ['job'];
const filter = getFilter(req.query, allowedFilters, false);
listSubJobsByPropertiesRouteHandler(req, res, properties, filter);
});
router.get('/:id', isAuthenticated, (req, res) => {
getSubJobRouteHandler(req, res);
});
export default router;

View File

@ -1,42 +0,0 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const documentJobSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
objectType: { type: String, required: false },
state: {
type: { type: String, required: true, default: 'queued' },
percent: { type: Number, required: false },
},
documentTemplate: {
type: Schema.Types.ObjectId,
ref: 'documentTemplate',
required: true,
},
documentPrinter: {
type: Schema.Types.ObjectId,
ref: 'documentPrinter',
required: true,
},
content: {
type: String,
required: false,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentJobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
documentJobSchema.set('toJSON', { virtuals: true });
export const documentJobModel = mongoose.model('documentJob', documentJobSchema);

View File

@ -17,7 +17,6 @@ const alertSchema = new Schema(
{
priority: { type: String, required: true }, // order to show
type: { type: String, required: true }, // selectFilament, error, info, message,
message: { type: String, required: false },
},
{ timestamps: true, _id: false }
);

View File

@ -1,158 +0,0 @@
import dotenv from 'dotenv';
import { documentJobModel } from '../../schemas/management/documentjob.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
deleteObject,
listObjects,
getObject,
editObject,
newObject,
listObjectsByProperties,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Jobs');
logger.level = process.env.LOG_LEVEL;
export const listDocumentJobsRouteHandler = async (
req,
res,
page = 1,
limit = 25,
property = '',
filter = {},
search = '',
sort = '',
order = 'ascend'
) => {
const result = await listObjects({
model: documentJobModel,
page,
limit,
property,
filter,
search,
sort,
order,
});
if (result?.error) {
logger.error('Error listing document jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of document jobs (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listDocumentJobsByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {}
) => {
const result = await listObjectsByProperties({
model: documentJobModel,
properties,
filter,
});
if (result?.error) {
logger.error('Error listing document jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of document jobs. Count: ${result.length}`);
res.send(result);
};
export const getDocumentJobRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: documentJobModel,
id,
});
if (result?.error) {
logger.warn(`Document Job not found with supplied id.`);
return res.status(result.code).send(result);
}
logger.debug(`Retreived document job with ID: ${id}`);
res.send(result);
};
export const editDocumentJobRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Document Job with ID: ${id}`);
const updateData = {
updatedAt: new Date(),
name: req.body.name,
width: req.body.width,
height: req.body.height,
};
// Create audit log before updating
const result = await editObject({
model: documentJobModel,
id,
updateData,
user: req.user,
});
if (result.error) {
logger.error('Error editing document job:', result.error);
res.status(result).send(result);
return;
}
logger.debug(`Edited document job with ID: ${id}`);
res.send(result);
};
export const newDocumentJobRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
name: req.body.name,
width: req.body.width,
height: req.body.height,
};
const result = await newObject({
model: documentJobModel,
newData,
user: req.user,
});
if (result.error) {
logger.error('No document job created:', result.error);
return res.status(result.code).send(result);
}
logger.debug(`New document job with ID: ${result._id}`);
res.send(result);
};
export const deleteDocumentJobRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Document Job with ID: ${id}`);
const result = await deleteObject({
model: documentJobModel,
id,
user: req.user,
});
if (result.error) {
logger.error('No document job deleted:', result.error);
return res.status(result.code).send(result);
}
logger.debug(`Deleted document job with ID: ${result._id}`);
res.send(result);
};

View File

@ -2,14 +2,8 @@ import dotenv from 'dotenv';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
deleteObject,
listObjects,
getObject,
editObject,
newObject,
listObjectsByProperties,
} from '../../database/database.js';
import { distributeUpdate, newAuditLog, editAuditLog, distributeNew } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('NoteTypes');
@ -26,135 +20,135 @@ export const listNoteTypesRouteHandler = async (
sort = '',
order = 'ascend'
) => {
const result = await listObjects({
model: noteTypeModel,
page,
limit,
property,
filter,
search,
sort,
order,
});
try {
const skip = (page - 1) * limit;
let noteTypes;
let aggregateCommand = [];
if (result?.error) {
logger.error('Error listing note types.');
res.status(result.code).send(result);
return;
if (search) {
// Add a text search match stage for name and brand fields
aggregateCommand.push({
$match: {
$text: {
$search: search,
},
},
});
}
logger.debug(`List of note types (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listNoteTypesByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {},
masterFilter = {}
) => {
const result = await listObjectsByProperties({
model: noteTypeModel,
properties,
filter,
masterFilter,
});
if (result?.error) {
logger.error('Error listing note types.');
res.status(result.code).send(result);
return;
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
logger.debug(`List of note types. Count: ${result.length}`);
res.send(result);
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
logger.trace(
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
noteTypes
);
res.send(noteTypes);
} catch (error) {
logger.error('Error listing note types:', error);
res.status(500).send({ error: error });
}
};
export const getNoteTypeRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: noteTypeModel,
id,
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const noteType = await noteTypeModel.findOne({
_id: id,
});
if (result?.error) {
logger.warn(`Note Type not found with supplied id.`);
return res.status(result.code).send(result);
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
res.send({ ...noteType._doc });
} catch (error) {
logger.error('Error fetching note type:', error);
res.status(500).send({ error: error.message });
}
logger.debug(`Retreived note type with ID: ${id}`);
res.send(result);
};
export const editNoteTypeRouteHandler = async (req, res) => {
// Get ID from params
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const noteType = await noteTypeModel.findOne({ _id: id });
logger.trace(`Note Type with ID: ${id}`);
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
try {
const updateData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
active: req.body.active,
};
// Create audit log before updating
const result = await editObject({
model: noteTypeModel,
id,
updateData,
user: req.user,
});
await editAuditLog(noteType.toObject(), updateData, id, 'noteType', req.user._id, 'user');
await distributeUpdate(updateData, id, 'noteType');
if (result.error) {
logger.error('Error editing note type:', result.error);
res.status(result).send(result);
return;
const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error('No note type updated.');
res.status(500).send({ error: 'No note types updated.' });
}
} catch (updateError) {
logger.error('Error updating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send('OK');
} catch (fetchError) {
logger.error('Error fetching note type:', fetchError);
res.status(500).send({ error: fetchError.message });
}
logger.debug(`Edited note type with ID: ${id}`);
res.send(result);
};
export const newNoteTypeRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
active: req.body.active,
};
const result = await newObject({
model: noteTypeModel,
newData,
user: req.user,
});
if (result.error) {
logger.error('No note type created:', result.error);
return res.status(result.code).send(result);
try {
let { ...newNoteType } = req.body;
newNoteType = { ...newNoteType, createdAt: new Date(), updatedAt: new Date() };
const result = await noteTypeModel.create(newNoteType);
if (result.nCreated === 0) {
logger.error('No note type created.');
res.status(500).send({ error: 'No note type created.' });
}
logger.debug(`New note type with ID: ${result._id}`);
// Create audit log for new note type
await newAuditLog(newNoteType, result._id, 'noteType', req.user);
await distributeNew(result._id, 'filament');
res.send(result);
};
export const deleteNoteTypeRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
logger.trace(`Note Type with ID: ${id}`);
const result = await deleteObject({
model: noteTypeModel,
id,
user: req.user,
});
if (result.error) {
logger.error('No note type deleted:', result.error);
return res.status(result.code).send(result);
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error('Error creating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
logger.debug(`Deleted note type with ID: ${result._id}`);
res.send(result);
};

View File

@ -1,16 +1,14 @@
import dotenv from 'dotenv';
import { noteModel } from '../../schemas/misc/note.schema.js';
import log4js from 'log4js';
import {
deleteObject,
editObject,
getObject,
listObjects,
listObjectsByProperties,
newObject,
recursivelyDeleteChildObjects,
} from '../../database/database.js';
import mongoose from 'mongoose';
import {
deleteAuditLog,
editAuditLog,
expandObjectIds,
flatternObjectIds,
newAuditLog,
} from '../../utils.js';
dotenv.config();
@ -23,141 +21,197 @@ export const listNotesRouteHandler = async (
page = 1,
limit = 25,
property = '',
filter = {},
search = '',
sort = '',
order = 'ascend'
filter = {}
) => {
const result = await listObjects({
model: noteModel,
page,
limit,
property,
filter,
search,
sort,
order,
populate: ['noteType', 'user'],
});
try {
const skip = (page - 1) * limit;
let notes;
let aggregateCommand = [];
if (result?.error) {
logger.error('Error listing notes.');
res.status(result.code).send(result);
return;
if (Object.keys(filter).length > 0) {
aggregateCommand.push({ $match: filter });
}
logger.debug(`List of notes (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listNotesByPropertiesRouteHandler = async (req, res, properties = '', filter = {}) => {
const result = await listObjectsByProperties({
model: noteModel,
properties,
filter,
populate: ['noteType', 'user'],
aggregateCommand.push({
$lookup: {
from: 'users', // The collection name (usually lowercase plural)
localField: 'user', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'user', // The output field name
},
});
aggregateCommand.push({ $unwind: '$user' });
aggregateCommand.push({
$lookup: {
from: 'notetypes', // The collection name (usually lowercase plural)
localField: 'noteType', // The field in your current model
foreignField: '_id', // The field in the users collection
as: 'noteType', // The output field name
},
});
aggregateCommand.push({ $unwind: '$noteType' });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
'noteType._id': 1,
'noteType.name': 1,
'noteType.color': 1,
'user._id': 1,
'user.name': 1,
content: 1,
parent: 1,
},
});
if (result?.error) {
logger.error('Error listing notes.');
res.status(result.code).send(result);
return;
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
logger.debug(`List of notes. Count: ${result.length}`);
res.send(result);
notes = await noteModel.aggregate(aggregateCommand);
logger.trace(`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`, notes);
res.send(notes);
} catch (error) {
logger.error('Error listing notes:', error);
res.status(500).send({ error: error });
}
};
export const getNoteRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: noteModel,
id,
populate: ['noteType', 'user'],
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({
_id: id,
});
if (result?.error) {
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(result.code).send(result);
return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note);
res.send({ ...note._doc });
} catch (error) {
logger.error('Error fetching note:', error);
res.status(500).send({ error: error.message });
}
logger.debug(`Retreived note with ID: ${id}`);
res.send(result);
};
export const editNoteRouteHandler = async (req, res) => {
// Get ID from params
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({ _id: id });
logger.trace(`Note with ID: ${id}`);
const updateData = {
updatedAt: new Date(),
content: req.body.content,
noteType: req.body.noteType,
};
// Create audit log before updating
const result = await editObject({
model: noteModel,
id,
updateData,
user: req.user,
});
if (result.error) {
logger.error('Error editing note:', result.error);
res.status(result).send(result);
return;
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: 'Note not found.' });
}
logger.debug(`Edited note with ID: ${id}`);
logger.trace(`Note with ID: ${id}:`, note);
res.send(result);
try {
const updateData = {
updatedAt: new Date(),
name: req.body.name,
color: req.body.color,
isActive: req.body.isActive,
};
const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
logger.error('No note updated.');
res.status(500).send({ error: 'No notes updated.' });
}
await editAuditLog(note.toObject(), updateData, id, 'note', req.user);
} catch (updateError) {
logger.error('Error updating note:', updateError);
res.status(500).send({ error: updateError.message });
}
res.send('OK');
} catch (fetchError) {
logger.error('Error fetching note:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const newNoteRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
content: req.body.content,
noteType: req.body.noteType,
parent: req.body.parent,
parentType: req.body.parentType,
user: req.user,
};
const result = await newObject({
model: noteModel,
newData,
user: req.user,
});
if (result.error) {
logger.error('No note created:', result.error);
return res.status(result.code).send(result);
try {
let { ...newNote } = req.body;
newNote = { ...newNote, createdAt: new Date(), updatedAt: new Date(), user: req.user };
const result = await noteModel.create(flatternObjectIds(newNote));
if (result.nCreated === 0) {
logger.error('No note created.');
res.status(500).send({ error: 'No note created.' });
}
logger.debug(`New note with ID: ${result._id}`);
await newAuditLog(expandObjectIds(newNote), result._id, 'note', req.user);
res.send(result);
res.status(200).send({ status: 'ok' });
} catch (updateError) {
logger.error('Error creating note:', updateError);
res.status(500).send({ error: updateError.message });
}
};
export const deleteNoteRouteHandler = async (req, res) => {
// Get ID from params
const id = req.params.id;
try {
const id = new mongoose.Types.ObjectId(req.params.id);
const note = await noteModel.findOne({ _id: id });
logger.trace(`Delete note with ID: ${id}`);
if (!note) {
logger.warn(`Note not found with supplied id.`);
return res.status(404).send({ error: 'Note not found.' });
}
// Check if the current user owns this note
if (note.user.toString() !== req.user._id.toString()) {
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
return res.status(403).send({ error: 'You can only delete your own notes.' });
}
logger.trace(`Deleting note with ID: ${id} and all its children`);
// Recursively find and delete all child notes
const result = await recursivelyDeleteChildObjects({ model: noteModel, id, user: req.user });
const deletedNoteIds = await recursivelyDeleteNotes(id, req.user);
if (result?.length <= 0) {
logger.error('No notes deleted');
return res.status(404).send(result);
}
if (result?.error) {
logger.error('No note deleted:', result.error);
return res.status(result.code).send(result);
}
logger.info(`Successfully deleted note ${id} and ${result.length - 1} child notes`);
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
res.send({
status: 'ok',
deletedNoteIds: deletedNoteIds,
message: `Deleted ${deletedNoteIds.length} notes`,
});
} catch (error) {
logger.error('Error deleting note:', error);
res.status(500).send({ error: error.message });
}
};
// Helper function to recursively delete notes and their children
const recursivelyDeleteNotes = async (noteId, user) => {
const deletedIds = [];
// Find all notes that have this note as their parent
const childNotes = await noteModel.find({ parent: noteId });
// Recursively delete all children first
for (const childNote of childNotes) {
const childDeletedIds = await recursivelyDeleteNotes(childNote._id, user);
deletedIds.push(...childDeletedIds);
}
// Delete the current note
const note = await noteModel.findOne({ _id: noteId }).populate('user').populate('parent');
await deleteAuditLog(expandObjectIds(note.toObject()), noteId, 'note', user);
await noteModel.deleteOne({ _id: noteId });
deletedIds.push(noteId);
return deletedIds;
};

View File

@ -1,7 +1,6 @@
import dotenv from 'dotenv';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { getObject, listObjects, listObjectsByProperties } from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('SubJobs');
@ -18,62 +17,102 @@ export const listSubJobsRouteHandler = async (
sort = '',
order = 'ascend'
) => {
const result = await listObjects({
model: subJobModel,
page,
limit,
property,
filter,
search,
sort,
order,
populate: ['printer'],
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
let subJobs;
let aggregateCommand = [];
if (search) {
// Add a text search match stage for name and other searchable fields
aggregateCommand.push({
$match: {
$text: {
$search: search,
},
},
});
}
// Lookup printer
aggregateCommand.push({
$lookup: {
from: 'printers', // The name of the Printer collection
localField: 'printer',
foreignField: '_id',
as: 'printer',
},
});
if (result?.error) {
logger.error('Error listing sub jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of sub jobs (Page ${page}, Limit ${limit}). Count: ${result.length}`);
res.send(result);
};
export const listSubJobsByPropertiesRouteHandler = async (
req,
res,
properties = '',
filter = {},
masterFilter = {}
) => {
const result = await listObjectsByProperties({
model: subJobModel,
properties,
filter,
masterFilter,
// Lookup job
aggregateCommand.push({
$lookup: {
from: 'jobs', // The name of the Printer collection
localField: 'job',
foreignField: '_id',
as: 'job',
},
});
if (result?.error) {
logger.error('Error listing sub jobs.');
res.status(result.code).send(result);
return;
}
logger.debug(`List of sub jobs. Count: ${result.length}`);
res.send(result);
};
export const getSubJobRouteHandler = async (req, res) => {
const id = req.params.id;
const result = await getObject({
model: subJobModel,
id,
aggregateCommand.push({
$unwind: {
path: '$printer',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
if (result?.error) {
logger.warn(`Sub job not found with supplied id.`);
return res.status(result.code).send(result);
aggregateCommand.push({
$unwind: {
path: '$job',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$project: {
state: 1,
_id: 1,
createdAt: 1,
startedAt: 1,
'printer._id': 1,
'job._id': 1,
'printer.name': 1,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
} else {
// Default sorting by createdAt descending
aggregateCommand.push({ $sort: { createdAt: -1 } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
subJobs = await subJobModel.aggregate(aggregateCommand);
logger.trace(
`List of print subJobs (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
subJobs
);
res.send(subJobs);
} catch (error) {
logger.error('Error listing print subJobs:', error);
res.status(500).send({ error: error });
}
logger.debug(`Retreived sub job with ID: ${id}`);
res.send(result);
};

View File

@ -1,7 +1,6 @@
import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
import { etcdServer } from './database/etcd.js';
import { natsServer } from './database/nats.js';
function parseFilter(property, value) {
if (typeof value === 'string') {
@ -384,15 +383,11 @@ async function getAuditLogs(idOrIds) {
}
async function distributeUpdate(value, id, type) {
await natsServer.publish(`${type}s.${id}.object`, value);
await etcdServer.setKey(`/${type}s/${id}/object`, value);
}
async function distributeNew(value, type) {
await natsServer.publish(`${type}s.new`, value);
}
async function distributeDelete(value, type) {
await natsServer.publish(`${type}s.delete`, value);
async function distributeNew(id, type) {
await etcdServer.setKey(`/${type}s/new`, id);
}
function flatternObjectIds(object) {
@ -492,7 +487,6 @@ export {
expandObjectIds,
distributeUpdate,
distributeNew,
distributeDelete,
getFilter, // <-- add here
convertPropertiesString,
};