Compare commits

...

5 Commits

Author SHA1 Message Date
7f17f5f205 Rewrote subjob handling so it works and better progress updates too. 2025-12-02 18:29:59 +00:00
79ed7691f9 Added document printer support. 2025-11-29 01:28:20 +00:00
509b7de11c Refactoring 2025-11-29 01:27:19 +00:00
f3a1115a09 Enhance ActionManager with subscription management and listener removal functionality
- Introduced a Set to manage subscriptions for object actions, allowing for better tracking and cleanup of active listeners.
- Added a method to remove all action listeners, improving resource management and preventing memory leaks.
- Enhanced existing subscription and removal methods to ensure proper handling of callbacks and subscriptions.
- Improved logging for subscription actions to aid in debugging and traceability.
2025-11-29 01:26:55 +00:00
362265da72 Refactor caching mechanism to utilize Redis for improved performance and reliability
- Replaced in-memory caching with Redis for object and list caching in database operations.
- Introduced a new RedisServer class for managing Redis connections and operations.
- Updated cache retrieval and update functions to handle asynchronous operations with Redis.
- Enhanced logging for cache operations to improve traceability of cache hits, misses, and errors.
- Adjusted configuration to include Redis settings in config.json.
2025-11-24 03:35:26 +00:00
44 changed files with 7374 additions and 1381 deletions

View File

@ -1,53 +1,54 @@
{
"development": {
"server": {
"port": 9090,
"logLevel": "trace"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://192.168.68.53:27017/farmcontrol"
}
},
"otpExpiryMins": 0.5
"development": {
"server": {
"port": 9090,
"logLevel": "debug"
},
"production": {
"server": {
"port": 8081,
"logLevel": "info"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol"
}
}
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://127.0.0.1:27017/farmcontrol"
},
"redis": { "host": "localhost", "port": 6379, "password": "" }
},
"otpExpiryMins": 0.5
},
"production": {
"server": {
"port": 8081,
"logLevel": "info"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol"
}
}
}
}

2848
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -17,13 +17,13 @@
"author": "Tom Butcher",
"license": "ISC",
"dependencies": {
"@nats-io/nats-core": "^3.1.0",
"@nats-io/transport-node": "^3.1.0",
"axios": "^1.11.0",
"@nats-io/nats-core": "^3.2.0",
"@nats-io/transport-node": "^3.2.0",
"axios": "^1.13.2",
"canonical-json": "^0.2.0",
"date-fns": "^4.1.0",
"dayjs": "^1.11.13",
"dotenv": "^17.2.1",
"dayjs": "^1.11.19",
"dotenv": "^17.2.3",
"ejs": "^3.1.10",
"etcd3": "^1.1.2",
"express": "^5.1.0",
@ -31,20 +31,23 @@
"jsonwebtoken": "^9.0.2",
"lodash": "^4.17.21",
"log4js": "^6.9.1",
"mongodb": "^6.18.0",
"mongoose": "^8.17.1",
"nanoid": "^5.1.5",
"mongodb": "6",
"mongoose": "^8.19.4",
"nanoid": "^5.1.6",
"node-cache": "^5.1.2",
"object-hash": "^3.0.0",
"posthtml": "^0.16.6",
"posthtml": "^0.16.7",
"puppeteer": "^24.31.0",
"redis": "^4.6.14",
"socket.io": "^4.8.1",
"socket.io-adapter-mongo": "^2.0.5",
"socketio-jwt": "^4.6.2"
},
"devDependencies": {
"eslint": "^9.33.0",
"eslint": "^9.39.1",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.5.4",
"nodemon": "^3.1.11",
"prettier": "^3.6.2",
"standard": "^17.1.2"
}

View File

@ -16,11 +16,13 @@ export class ActionManager {
constructor(socketClient) {
this.socketClient = socketClient;
this.callbacks = new Map();
this.subscriptions = new Set();
}
async subscribeToObjectActions(id, objectType) {
logger.debug('Subscribing to object actions...', id, objectType);
const subject = `${objectType}s.${id}.actions`;
const subscriptionKey = `${subject}:${this.socketClient.id}`;
await natsServer.subscribe(
subject,
@ -47,12 +49,18 @@ export class ActionManager {
}
}
);
this.subscriptions.add(subscriptionKey);
return { success: true };
}
async removeObjectActionsListener(id, objectType) {
const subject = `${objectType}s.${id}.actions`;
const subscriptionKey = `${subject}:${this.socketClient.id}`;
await natsServer.removeSubscription(subject, this.socketClient.id);
this.subscriptions.delete(subscriptionKey);
return { success: true };
}
@ -68,22 +76,28 @@ export class ActionManager {
);
// Subscribe to the response subject
const responseSubscriptionKey = `${subject}:${this.socketClient.socketId}`;
await natsServer.subscribe(
subject,
this.socketClient.socketId,
async (subject, value) => {
if (value.result) {
logger.trace('Calling result callback...');
const storedCallback = this.callbacks.get(actionId);
const storedCallback = this.callbacks.get(actionId) || undefined;
await natsServer.removeSubscription(
subject,
this.socketClient.socketId
);
storedCallback(value.result);
this.subscriptions.delete(responseSubscriptionKey);
if (storedCallback) {
storedCallback(value.result);
}
}
}
);
this.subscriptions.add(responseSubscriptionKey);
// Publish the action
await natsServer.publish(`${objectType}s.${id}.actions`, {
...action,
@ -99,4 +113,20 @@ export class ActionManager {
return false;
}
}
async removeAllListeners() {
logger.debug('Removing all action listeners...');
const removePromises = Array.from(this.subscriptions).map(
subscriptionKey => {
const [subject, socketId] = subscriptionKey.split(':');
return natsServer.removeSubscription(subject, socketId);
}
);
await Promise.all(removePromises);
this.subscriptions.clear();
this.callbacks.clear();
logger.debug(`Removed ${removePromises.length} action listener(s)`);
return { success: true };
}
}

View File

@ -1,5 +1,4 @@
import _ from 'lodash';
import NodeCache from 'node-cache';
import {
deleteAuditLog,
expandObjectIds,
@ -10,8 +9,8 @@ import {
} from './utils.js';
import log4js from 'log4js';
import { loadConfig } from '../config.js';
import { userModel } from './schemas/management/user.schema.js';
import { jsonToCacheKey } from '../utils.js';
import { redisServer } from './redis.js';
const config = loadConfig();
@ -20,43 +19,40 @@ const cacheLogger = log4js.getLogger('Local Cache');
logger.level = config.server.logLevel;
cacheLogger.level = config.server.logLevel;
const objectCache = new NodeCache({
stdTTL: 30, // 30 sec expiration
checkperiod: 600, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
const listCache = new NodeCache({
stdTTL: 30, // 30 sec expiration
checkperiod: 600, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
// Default cache TTL in seconds (similar to previous in-memory cache)
const CACHE_TTL_SECONDS = config.database?.redis?.ttlSeconds || 5;
export const retrieveObjectCache = ({ model, id, populate = [] }) => {
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
const cacheKeyObject = {
model: model.modelName,
id,
populate
id: id?.toString()
};
const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Retrieving:');
const cachedObject = objectCache.get(cacheKey);
cacheLogger.trace('Retrieving:', cacheKeyObject);
if (cachedObject == undefined) {
cacheLogger.trace('Miss:', cacheKeyObject);
try {
const cachedObject = await redisServer.getKey(cacheKey);
if (cachedObject == null) {
cacheLogger.trace('Miss:', cacheKeyObject);
return undefined;
}
cacheLogger.trace('Hit:', {
model: model.modelName,
id: cacheKeyObject.id
});
return cachedObject;
} catch (err) {
cacheLogger.error('Error retrieving object from Redis cache:', err);
return undefined;
}
cacheLogger.trace('Hit:', {
model: model.modelName,
id
});
return cachedObject;
};
export const retrieveListCache = ({
export const retrieveListCache = async ({
model,
populate = [],
filter = {},
@ -66,7 +62,6 @@ export const retrieveListCache = ({
}) => {
const cacheKeyObject = {
model: model.modelName,
id,
populate,
filter,
sort,
@ -74,61 +69,77 @@ export const retrieveListCache = ({
order
};
cacheLogger.trace('Retrieving:', cacheKeyObject);
const cacheKey = jsonToCacheKey(cacheKeyObject);
const cachedList = listCache.get(cacheKey);
cacheLogger.trace('Retrieving:', cacheKeyObject);
if (cachedList != undefined) {
cacheLogger.trace('Hit:', {
...cacheKeyObject,
length: cachedList.length
try {
const cachedList = await redisServer.getKey(cacheKey);
if (cachedList != null) {
cacheLogger.trace('Hit:', {
...cacheKeyObject,
length: cachedList.length
});
return cachedList;
}
cacheLogger.trace('Miss:', {
model: model.modelName
});
return cachedList;
return undefined;
} catch (err) {
cacheLogger.error('Error retrieving list from Redis cache:', err);
return undefined;
}
cacheLogger.trace('Miss:', {
model: model.modelName
});
return undefined;
};
export const updateObjectCache = ({ model, id, object, populate = [] }) => {
export const updateObjectCache = async ({ model, id, object }) => {
const cacheKeyObject = {
model: model.modelName,
id,
populate
id: id?.toString()
};
const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Updating:', cacheKeyObject);
const cachedObject = objectCache.get(cacheKey) || {};
const mergedObject = _.merge(cachedObject, object);
try {
const cachedObject = (await redisServer.getKey(cacheKey)) || {};
const mergedObject = _.merge(cachedObject, object);
objectCache.set(cacheKey, mergedObject);
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
cacheLogger.trace('Updated:', { ...cacheKeyObject });
cacheLogger.trace('Updated:', { ...cacheKeyObject });
return mergedObject;
return mergedObject;
} catch (err) {
cacheLogger.error('Error updating object in Redis cache:', err);
// Fallback to returning the provided object if cache fails
return object;
}
};
export const deleteObjectCache = ({ model, id }) => {
export const deleteObjectCache = async ({ model, id }) => {
const cacheKeyObject = {
model: model.modelName,
id: id?.toString()
};
cacheLogger.trace('Deleting:', {
model: model.modelName,
id
...cacheKeyObject
});
modelCache.del(id);
try {
// Note: we currently delete the non-populated key; populated variants will expire via TTL.
const cacheKey = jsonToCacheKey({ ...cacheKeyObject, populate: [] });
await redisServer.deleteKey(cacheKey);
cacheLogger.trace('Deleted:', {
model: model.modelName,
id
});
return mergedObject;
cacheLogger.trace('Deleted:', {
...cacheKeyObject
});
} catch (err) {
cacheLogger.error('Error deleting object from Redis cache:', err);
}
};
export const updateListCache = ({
@ -156,14 +167,20 @@ export const updateListCache = ({
const cacheKey = jsonToCacheKey(cacheKeyObject);
listCache.set(cacheKey, objects);
return (async () => {
try {
await redisServer.setKey(cacheKey, objects, CACHE_TTL_SECONDS);
cacheLogger.trace('Updated:', {
...cacheKeyObject,
length: objects.length
});
cacheLogger.trace('Updated:', {
...cacheKeyObject,
length: objects.length
});
} catch (err) {
cacheLogger.error('Error updating list in Redis cache:', err);
}
return objects;
return objects;
})();
};
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
@ -188,7 +205,7 @@ export const listObjects = async ({
});
if (cached == true) {
const objectsCache = retrieveObjectsCache({
const objectsCache = await retrieveListCache({
model,
populate,
filter,
@ -234,7 +251,7 @@ export const listObjects = async ({
}
// Handle select (projection)
if (project != {}) {
if (project && Object.keys(project).length > 0) {
query = query.select(project);
}
@ -286,7 +303,7 @@ export const getObject = async ({
});
if (cached == true) {
const cachedObject = retrieveObjectCache({ model, id, populate });
const cachedObject = await retrieveObjectCache({ model, id, populate });
if (cachedObject != undefined) {
return cachedObject;
}
@ -343,37 +360,41 @@ export const editObject = async ({
updateData,
owner = undefined,
ownerType = undefined,
populate = []
populate = [],
auditLog = true
}) => {
try {
// Determine parentType from model name
const parentType = model.modelName ? model.modelName : 'unknown';
// Fetch the and update object
var query = model.findByIdAndUpdate(id, updateData).lean();
var newQuery = model.findById(id).lean();
if (populate) {
if (Array.isArray(populate)) {
for (const pop of populate) {
query = query.populate(pop);
newQuery = newQuery.populate(pop);
}
} else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate);
newQuery = newQuery.populate(populate);
}
}
const previousObject = await query;
if (!previousObject) {
const newObject = await newQuery;
if (!previousObject || !newObject) {
return { error: `${parentType} not found.`, code: 404 };
}
const previousExpandedObject = expandObjectIds(previousObject);
const newExpandedObject = expandObjectIds(newObject);
if (owner != undefined && ownerType != undefined) {
if (auditLog == true && owner != undefined && ownerType != undefined) {
// Audit log before update
await editAuditLog(
previousExpandedObject,
{ ...previousExpandedObject, ...updateData },
newExpandedObject,
id,
parentType,
owner,
@ -387,7 +408,8 @@ export const editObject = async ({
updateObjectCache({
model: model,
id: id.toString(),
object: { ...previousExpandedObject, ...updateData }
object: { ...previousExpandedObject, ...updateData },
populate
});
return { ...previousExpandedObject, ...updateData };

65
src/database/redis.js Normal file
View File

@ -0,0 +1,65 @@
import { createClient } from 'redis';
import log4js from 'log4js';
import { loadConfig } from '../config.js';
const config = loadConfig();
const logger = log4js.getLogger('Redis');
logger.level = config.server.logLevel;
class RedisServer {
constructor() {
const redisConfig = config.database?.redis || {};
const host = redisConfig.host || '127.0.0.1';
const port = redisConfig.port || 6379;
const password = redisConfig.password || undefined;
const url = redisConfig.url || `redis://${host}:${port}`;
this.client = createClient({
url,
password
});
this.client.on('error', err => {
logger.error('Redis Client Error', err);
});
this.connected = false;
}
async connect() {
if (this.connected) return;
await this.client.connect();
this.connected = true;
logger.info('Connected to Redis');
}
async setKey(key, value, ttlSeconds) {
await this.connect();
const payload = typeof value === 'string' ? value : JSON.stringify(value);
if (ttlSeconds) {
await this.client.set(key, payload, { EX: ttlSeconds });
} else {
await this.client.set(key, payload);
}
}
async getKey(key) {
await this.connect();
const value = await this.client.get(key);
if (value == null) return null;
try {
return JSON.parse(value);
} catch {
return value;
}
}
async deleteKey(key) {
await this.connect();
await this.client.del(key);
}
}
const redisServer = new RedisServer();
export { RedisServer, redisServer };

View File

@ -1,12 +1,14 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
// Define the main filamentStock schema
const filamentStockSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
state: {
type: { type: String, required: true },
percent: { type: String, required: true },
progress: { type: Number, required: false },
},
startingWeight: {
net: { type: Number, required: true },
@ -16,14 +18,14 @@ const filamentStockSchema = new Schema(
net: { type: Number, required: true },
gross: { type: Number, required: true },
},
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament' },
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament', required: true },
},
{ timestamps: true }
);
// Add virtual id getter
filamentStockSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,12 +1,16 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
// Define the main partStock schema
const partStockSchema = new Schema(
{
name: { type: String, required: true },
fileName: { type: String, required: false },
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part' },
_reference: { type: String, default: () => generateId()() },
state: {
type: { type: String, required: true },
progress: { type: Number, required: false },
},
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part', required: true },
startingQuantity: { type: Number, required: true },
currentQuantity: { type: Number, required: true },
},
@ -15,7 +19,7 @@ const partStockSchema = new Schema(
// Add virtual id getter
partStockSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,4 +1,5 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const stockAuditItemSchema = new Schema({
@ -11,6 +12,7 @@ const stockAuditItemSchema = new Schema({
const stockAuditSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
type: { type: String, required: true },
status: {
type: String,
@ -28,7 +30,7 @@ const stockAuditSchema = new Schema(
// Add virtual id getter
stockAuditSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,10 +1,11 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const stockEventSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
value: { type: Number, required: true },
current: { type: Number, required: true },
unit: { type: String, required: true },
parent: {
type: Schema.Types.ObjectId,
@ -33,7 +34,7 @@ const stockEventSchema = new Schema(
// Add virtual id getter
stockEventSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,60 +1,44 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const auditLogSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
changes: {
old: { type: Object, required: true },
new: { type: Object, required: true }
},
parent: {
type: Schema.Types.ObjectId,
refPath: 'parentType',
required: true
},
parentType: {
type: String,
required: true,
enum: [
'printer',
'job',
'subJob',
'filamentStock',
'stockEvent',
'vendor',
'part',
'product',
'material',
'filament',
'gcodeFile',
'noteType',
'note',
'user',
'host'
] // Add other models as needed
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerType',
required: true
},
ownerType: {
type: String,
required: true,
enum: ['user', 'printer', 'host']
old: { type: Object, required: false },
new: { type: Object, required: false },
},
operation: {
type: String,
required: true,
enum: ['edit', 'new', 'delete']
}
},
parent: {
type: Schema.Types.ObjectId,
refPath: 'parentType',
required: true,
},
parentType: {
type: String,
required: true,
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerType',
required: true,
},
ownerType: {
type: String,
required: true,
enum: ['user', 'printer', 'host'],
},
},
{ timestamps: true }
);
// Add virtual id getter
auditLogSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -0,0 +1,49 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const documentJobSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: {
type: String,
required: true,
unique: true,
},
objectType: { type: String, required: false },
object: {
type: Schema.Types.ObjectId,
refPath: 'objectType',
required: true,
},
state: {
type: { type: String, required: true, default: 'queued' },
percent: { type: Number, required: false },
},
documentTemplate: {
type: Schema.Types.ObjectId,
ref: 'documentTemplate',
required: true,
},
documentPrinter: {
type: Schema.Types.ObjectId,
ref: 'documentPrinter',
required: true,
},
content: {
type: String,
required: false,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentJobSchema.virtual('id').get(function () {
return this._id;
});
// Configure JSON serialization to include virtuals
documentJobSchema.set('toJSON', { virtuals: true });
export const documentJobModel = mongoose.model('documentJob', documentJobSchema);

View File

@ -0,0 +1,48 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const connectionSchema = new Schema(
{
interface: { type: String, required: true },
protocol: { type: String, required: true },
host: { type: String, required: true },
port: { type: Number, required: false },
},
{ _id: false }
);
const documentPrinterSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: {
type: String,
required: true,
unique: true,
},
connection: { type: connectionSchema, required: true },
currentDocumentSize: { type: Schema.Types.ObjectId, ref: 'documentSize', required: false },
tags: [{ type: String }],
online: { type: Boolean, required: true, default: false },
active: { type: Boolean, required: true, default: true },
state: {
type: { type: String, required: true, default: 'offline' },
message: { type: String, required: false },
progress: { type: Number, required: false },
},
connectedAt: { type: Date, default: null },
host: { type: Schema.Types.ObjectId, ref: 'host', required: true },
queue: [{ type: Schema.Types.ObjectId, ref: 'documentJob', required: false }],
},
{ timestamps: true }
);
// Add virtual id getter
documentPrinterSchema.virtual('id').get(function () {
return this._id;
});
// Configure JSON serialization to include virtuals
documentPrinterSchema.set('toJSON', { virtuals: true });
export const documentPrinterModel = mongoose.model('documentPrinter', documentPrinterSchema);

View File

@ -1,8 +1,10 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const documentSizeSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: {
type: String,
required: true,
@ -18,13 +20,18 @@ const documentSizeSchema = new Schema(
required: true,
default: 0,
},
infiniteHeight: {
type: Boolean,
required: true,
default: false,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentSizeSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,8 +1,10 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const documentTemplateSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: {
type: String,
required: true,
@ -52,7 +54,7 @@ const documentTemplateSchema = new Schema(
// Add virtual id getter
documentTemplateSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,7 +1,9 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const filamentSchema = new mongoose.Schema({
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
barcode: { required: false, type: String },
url: { required: false, type: String },
@ -18,7 +20,7 @@ const filamentSchema = new mongoose.Schema({
});
filamentSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
filamentSchema.set('toJSON', { virtuals: true });

View File

@ -0,0 +1,22 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const fileSchema = new mongoose.Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
type: { required: true, type: String },
extension: { required: true, type: String },
size: { required: false, type: Number },
metaData: { required: false, type: Object },
},
{ timestamps: true }
);
fileSchema.virtual('id').get(function () {
return this._id;
});
fileSchema.set('toJSON', { virtuals: true });
export const fileModel = mongoose.model('file', fileSchema);

View File

@ -1,4 +1,5 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
// Define the device schema
const deviceInfoSchema = new mongoose.Schema(
@ -9,56 +10,56 @@ const deviceInfoSchema = new mongoose.Schema(
release: { type: String },
arch: { type: String },
hostname: { type: String },
uptime: { type: Number }
uptime: { type: Number },
},
cpu: {
cores: { type: Number },
model: { type: String },
speedMHz: { type: Number }
speedMHz: { type: Number },
},
memory: {
totalGB: { type: String }, // stored as string from .toFixed(2), could also use Number
freeGB: { type: String }
freeGB: { type: String },
},
network: {
type: mongoose.Schema.Types.Mixed // since it's an object with dynamic interface names
type: mongoose.Schema.Types.Mixed, // since it's an object with dynamic interface names
},
user: {
uid: { type: Number },
gid: { type: Number },
username: { type: String },
homedir: { type: String },
shell: { type: String }
shell: { type: String },
},
process: {
nodeVersion: { type: String },
pid: { type: Number },
cwd: { type: String },
execPath: { type: String }
}
execPath: { type: String },
},
},
{ _id: false }
);
const hostSchema = new mongoose.Schema({
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
tags: [{ required: false, type: String }],
online: { required: true, type: Boolean, default: false },
state: {
type: { type: String, required: true, default: 'offline' },
message: { type: String, required: false },
percent: { type: Number, required: false }
percent: { type: Number, required: false },
},
active: { required: true, type: Boolean, default: true },
connectedAt: { required: false, type: Date },
authCode: { required: false, type: String },
otp: { required: false, type: String },
otpExpiresAt: { required: false, type: Date },
deviceInfo: deviceInfoSchema
authCode: { type: { required: false, type: String } },
deviceInfo: { deviceInfoSchema },
files: [{ type: mongoose.Schema.Types.ObjectId, ref: 'file' }],
});
hostSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
hostSchema.set('toJSON', { virtuals: true });

View File

@ -1,6 +1,8 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const materialSchema = new mongoose.Schema({
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
url: { required: false, type: String },
image: { required: false, type: Buffer },
@ -8,7 +10,7 @@ const materialSchema = new mongoose.Schema({
});
materialSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
materialSchema.set('toJSON', { virtuals: true });

View File

@ -1,8 +1,10 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const noteTypeSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: {
type: String,
required: true,
@ -23,7 +25,7 @@ const noteTypeSchema = new Schema(
// Add virtual id getter
noteTypeSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,23 +1,26 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
// Define the main part schema
const partSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { type: String, required: true },
fileName: { type: String, required: false },
product: { type: mongoose.Schema.Types.ObjectId, ref: 'product' },
globalPricing: { type: Boolean, default: true },
priceMode: { type: String, default: 'margin' },
amount: { type: Number, required: false },
margin: { type: Number, required: false },
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
},
{ timestamps: true }
);
// Add virtual id getter
partSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,9 +1,16 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const partSchema = new Schema({
part: { type: Schema.Types.ObjectId, ref: 'part', required: true },
quantity: { type: Number, required: true },
});
// Define the main product schema
const productSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { type: String, required: true },
tags: [{ type: String }],
version: { type: String },
@ -11,12 +18,13 @@ const productSchema = new Schema(
margin: { type: Number, required: false },
amount: { type: Number, required: false },
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
parts: [partSchema],
},
{ timestamps: true }
);
// Add virtual id getter
productSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,18 +1,21 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const userSchema = new mongoose.Schema(
{
_reference: { type: String, default: () => generateId()() },
username: { required: true, type: String },
name: { required: true, type: String },
firstName: { required: false, type: String },
lastName: { required: false, type: String },
email: { required: true, type: String },
profileImage: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
},
{ timestamps: true }
);
userSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
userSchema.set('toJSON', { virtuals: true });

View File

@ -1,7 +1,9 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const vendorSchema = new mongoose.Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
website: { required: false, type: String },
email: { required: false, type: String },
@ -13,7 +15,7 @@ const vendorSchema = new mongoose.Schema(
);
vendorSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
vendorSchema.set('toJSON', { virtuals: true });

View File

@ -1,9 +1,16 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const noteSchema = new mongoose.Schema({
_reference: { type: String, default: () => generateId()() },
parent: {
type: Schema.Types.ObjectId,
refPath: 'parentType',
required: true,
},
parentType: {
type: String,
required: true,
},
content: {
@ -33,7 +40,7 @@ const noteSchema = new mongoose.Schema({
});
noteSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
noteSchema.set('toJSON', { virtuals: true });

View File

@ -1,22 +1,30 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const gcodeFileSchema = new mongoose.Schema({
name: { required: true, type: String },
gcodeFileName: { required: false, type: String },
gcodeFileInfo: { required: true, type: Object },
size: { type: Number, required: false },
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
parts: [{ type: Schema.Types.ObjectId, ref: 'part', required: true }],
cost: { type: Number, required: false },
createdAt: { type: Date },
updatedAt: { type: Date },
const partSchema = new mongoose.Schema({
part: { type: Schema.Types.ObjectId, ref: 'part', required: true },
quantity: { type: Number, required: true },
});
const gcodeFileSchema = new mongoose.Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { required: true, type: String },
gcodeFileName: { required: false, type: String },
size: { type: Number, required: false },
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
parts: [partSchema],
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
cost: { type: Number, required: false },
},
{ timestamps: true }
);
gcodeFileSchema.index({ name: 'text', brand: 'text' });
gcodeFileSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
gcodeFileSchema.set('toJSON', { virtuals: true });

View File

@ -1,32 +1,38 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const jobSchema = new mongoose.Schema({
state: {
type: { required: true, type: String },
const jobSchema = new mongoose.Schema(
{
_reference: { type: String, default: () => generateId()() },
state: {
type: { required: true, type: String },
progress: { type: Number, required: false },
},
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
gcodeFile: {
type: Schema.Types.ObjectId,
ref: 'gcodeFile',
required: false,
},
quantity: {
type: Number,
required: true,
default: 1,
min: 1,
},
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
},
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
gcodeFile: {
type: Schema.Types.ObjectId,
ref: 'gcodeFile',
required: false,
},
quantity: {
type: Number,
required: true,
default: 1,
min: 1,
},
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
});
{ timestamps: true }
);
jobSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
jobSchema.set('toJSON', { virtuals: true });

View File

@ -1,4 +1,5 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
// Define the moonraker connection schema
@ -7,35 +8,39 @@ const moonrakerSchema = new Schema(
host: { type: String, required: true },
port: { type: Number, required: true },
protocol: { type: String, required: true },
apiKey: { type: String, default: null, required: false }
apiKey: { type: String, default: null, required: false },
},
{ _id: false }
);
// Define the alert schema
const alertSchema = new Schema(
{
priority: { type: String, required: true }, // order to show
type: { type: String, required: true }, // selectFilament, error, info, message
message: { type: String, required: false }
type: { type: String, required: true }, // error, info, message
message: { type: String, required: false },
actions: [{ type: String, required: false, default: [] }],
_id: { type: String, required: true },
canDismiss: { type: Boolean, required: true, default: true },
},
{ timestamps: true, _id: false }
);
// Define the main FDM printer schema
// Define the main printer schema
const printerSchema = new Schema(
{
_reference: { type: String, default: () => generateId()() },
name: { type: String, required: true },
online: { type: Boolean, required: true, default: false },
active: { type: Boolean, required: true, default: true },
state: {
type: { type: String, required: true, default: 'offline' },
progress: { type: Number, required: false, default: 0 }
message: { type: String, required: false },
progress: { type: Number, required: false },
},
connectedAt: { type: Date, default: null },
loadedFilament: {
type: Schema.Types.ObjectId,
ref: 'filament',
default: null
default: null,
},
moonraker: { type: moonrakerSchema, required: true },
tags: [{ type: String }],
@ -43,27 +48,17 @@ const printerSchema = new Schema(
currentJob: { type: Schema.Types.ObjectId, ref: 'job' },
currentSubJob: { type: Schema.Types.ObjectId, ref: 'subJob' },
currentFilamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock' },
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
vendor: {
type: Schema.Types.ObjectId,
ref: 'vendor',
default: null,
required: true
},
host: {
type: Schema.Types.ObjectId,
ref: 'host',
default: null,
required: true
},
alerts: [alertSchema]
queue: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', default: null },
host: { type: Schema.Types.ObjectId, ref: 'host', default: null },
alerts: [alertSchema],
},
{ timestamps: true }
);
// Add virtual id getter
printerSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
// Configure JSON serialization to include virtuals

View File

@ -1,7 +1,9 @@
import mongoose from 'mongoose';
import { generateId } from '../../utils.js';
const { Schema } = mongoose;
const subJobSchema = new mongoose.Schema({
_reference: { type: String, default: () => generateId()() },
printer: {
type: Schema.Types.ObjectId,
ref: 'printer',
@ -12,9 +14,9 @@ const subJobSchema = new mongoose.Schema({
ref: 'job',
required: true,
},
subJobId: {
moonrakerJobId: {
type: String,
required: true,
required: false,
},
gcodeFile: {
type: Schema.Types.ObjectId,
@ -23,7 +25,7 @@ const subJobSchema = new mongoose.Schema({
},
state: {
type: { required: true, type: String },
percent: { required: false, type: Number },
progress: { required: false, type: Number },
},
number: {
type: Number,
@ -42,7 +44,7 @@ const subJobSchema = new mongoose.Schema({
});
subJobSchema.virtual('id').get(function () {
return this._id.toHexString();
return this._id;
});
subJobSchema.set('toJSON', { virtuals: true });

View File

@ -2,6 +2,14 @@ import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
import { natsServer } from './nats.js';
import { customAlphabet } from 'nanoid';
const ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
export const generateId = () => {
// 10 characters
return customAlphabet(ALPHABET, 12);
};
function parseFilter(property, value) {
if (typeof value === 'string') {
var trimmed = value.trim();
@ -309,7 +317,18 @@ function getChangedValues(oldObj, newObj, old = false) {
} else {
const nestedChanges = getChangedValues(oldVal, newVal, old);
if (Object.keys(nestedChanges).length > 0) {
changes[key] = nestedChanges;
// Exclude progress and currentWeight from nested changes
const excludeKeys = ['progress', 'currentWeight', 'net', 'gross'];
const filteredChanges = Object.keys(nestedChanges)
.filter(nestedKey => !excludeKeys.includes(nestedKey))
.reduce((acc, nestedKey) => {
acc[nestedKey] = nestedChanges[nestedKey];
return acc;
}, {});
if (Object.keys(filteredChanges).length > 0) {
changes[key] = filteredChanges;
}
}
}
} else if (JSON.stringify(oldVal) !== JSON.stringify(newVal)) {
@ -350,6 +369,9 @@ async function editAuditLog(
owner,
ownerType
) {
if (parentType === 'stockEvent') {
return;
}
// Get only the changed values
const changedOldValues = getChangedValues(oldValue, newValue, true);
const changedNewValues = getChangedValues(oldValue, newValue, false);

View File

@ -14,12 +14,16 @@ logger.level = config.server.logLevel;
export class EventManager {
constructor(socketClient) {
this.socketClient = socketClient;
this.subscriptions = new Set();
}
async subscribeToObjectEvent(id, objectType, eventType) {
logger.debug('Subscribing to object event:', eventType, id, objectType);
const subject = `${objectType}s.${id}.events.${eventType}`;
const subscriptionKey = `${subject}:${this.socketClient.socketId}`;
await natsServer.subscribe(
`${objectType}s.${id}.events.${eventType}`,
subject,
this.socketClient.socketId,
(key, value) => {
if (!value?.result) {
@ -32,15 +36,19 @@ export class EventManager {
}
}
);
this.subscriptions.add(subscriptionKey);
return { success: true };
}
async removeObjectEventsListener(id, objectType, eventType) {
// Remove specific event subscription for this object
await natsServer.removeSubscription(
`${objectType}s.${id}.events.${eventType}`,
this.socketClient.socketId
);
const subject = `${objectType}s.${id}.events.${eventType}`;
const subscriptionKey = `${subject}:${this.socketClient.socketId}`;
await natsServer.removeSubscription(subject, this.socketClient.socketId);
this.subscriptions.delete(subscriptionKey);
return { success: true };
}
@ -68,4 +76,19 @@ export class EventManager {
};
}
}
async removeAllListeners() {
logger.debug('Removing all event listeners...');
const removePromises = Array.from(this.subscriptions).map(
subscriptionKey => {
const [subject, socketId] = subscriptionKey.split(':');
return natsServer.removeSubscription(subject, socketId);
}
);
await Promise.all(removePromises);
this.subscriptions.clear();
logger.debug(`Removed ${removePromises.length} event listener(s)`);
return { success: true };
}
}

View File

@ -23,8 +23,8 @@ import { mongoServer } from './database/mongo.js';
// Connect to Etcd (await)
try {
await etcdServer.connect();
logger.info('Connected to Etcd');
//await etcdServer.connect();
// logger.info('Connected to Etcd');
} catch (err) {
logger.error('Failed to connect to Etcd:', err);
throw err;

View File

@ -1,4 +1,5 @@
import { etcdServer } from '../database/etcd.js';
import { natsServer } from '../database/nats.js';
import { redisServer } from '../database/redis.js';
import log4js from 'log4js';
import { loadConfig } from '../config.js';
const config = loadConfig();
@ -17,33 +18,43 @@ export class LockManager {
}
async lockObject(object) {
// Add a 'lock' event to the 'locks' stream
// Persist lock in Redis and publish via NATS
logger.debug('Locking object:', object._id);
try {
await etcdServer.setKey(`/locks/${object.type}s/${object._id}`, {
const redisKey = `locks:${object.type}s:${object._id}`;
const lockPayload = {
...object,
locked: true
});
logger.info(`Lock event to id: ${object._id}`);
};
await redisServer.setKey(redisKey, lockPayload);
const subject = `locks.${object.type}s.${object._id}`;
await natsServer.publish(subject, lockPayload);
logger.info(`Lock event published for id: ${object._id}`);
return true;
} catch (err) {
logger.error(`Error adding lock event to: ${object._id}:`, err);
logger.error(`Error locking object ${object._id}:`, err);
throw err;
}
}
async unlockObject(object) {
// Add an 'unlock' event to the 'locks' stream
const key = `/locks/${object.type}s/${object._id}`;
// Remove lock from Redis (if owned by user) and publish via NATS
const redisKey = `locks:${object.type}s:${object._id}`;
try {
logger.debug('Checking user can unlock:', object._id);
const lockEvent = await etcdServer.getKey(key);
const lockEvent = await redisServer.getKey(redisKey);
if (lockEvent?.user === object.user) {
logger.debug('Unlocking object:', object._id);
await etcdServer.deleteKey(key);
logger.info(`Unlocked object: ${object._id}`);
await redisServer.deleteKey(redisKey);
const subject = `locks.${object.type}s.${object._id}`;
await natsServer.publish(subject, {
_id: object._id,
type: object.type,
locked: false
});
logger.info(`Unlocked object and published event: ${object._id}`);
return true;
}
} catch (err) {
@ -53,11 +64,11 @@ export class LockManager {
}
async getObjectLock(object) {
// Get the current lock status of an object and broadcast it
// Get the current lock status of an object
logger.info('Getting lock status for object:', object._id);
try {
const lockKey = `/locks/${object.type}s/${object._id}`;
const lockValue = await etcdServer.getKey(lockKey);
const lockKey = `locks:${object.type}s:${object._id}`;
const lockValue = await redisServer.getKey(lockKey);
if (lockValue) {
// Object is locked
@ -81,26 +92,26 @@ export class LockManager {
}
setupLocksListeners() {
etcdServer.onPrefixPutEvent(
'/locks',
this.socketClient.id,
(key, value) => {
const id = key.split('/').pop();
logger.debug('Lock object event:', id);
this.socketClient.socket.emit('lockUpdate', {
...value,
locked: true
});
}
);
etcdServer.onPrefixDeleteEvent('/locks', this.socketClient.id, key => {
const id = key.split('/').pop();
logger.debug('Unlock object event:', id);
this.socketClient.socket.emit('lockUpdate', {
_id: id,
locked: false
// Subscribe to NATS subject for lock changes and emit via socket
const subject = 'locks.>';
natsServer
.subscribe(subject, this.socketClient.id, (_subject, value) => {
// Expected subjects: locks.{type}s.{id}
const parts = _subject.split('.');
const last = parts[parts.length - 1];
const id = last;
const payload =
typeof value === 'object'
? value
: { _id: id, locked: !!value?.locked };
logger.debug('Lock event received:', _subject);
this.socketClient.socket.emit('lockUpdate', payload);
})
.then(() => {
logger.info('Subscribed to NATS for lock changes.');
})
.catch(err => {
logger.error('Failed to subscribe to NATS lock changes:', err);
});
});
logger.info('Subscribed to Etcd stream for lock changes.');
}
}

View File

@ -2,12 +2,18 @@ import log4js from 'log4js';
// Load configuration
import { loadConfig } from '../config.js';
import { CodeAuth, createAuthMiddleware } from '../auth/auth.js';
import { editObject, getObject, listObjects } from '../database/database.js';
import {
newObject,
editObject,
getObject,
listObjects
} from '../database/database.js';
import { hostModel } from '../database/schemas/management/host.schema.js';
import { UpdateManager } from '../updates/updatemanager.js';
import { ActionManager } from '../actions/actionmanager.js';
import { getModelByName } from '../utils.js';
import { EventManager } from '../events/eventmanager.js';
import { TemplateManager } from '../templates/templatemanager.js';
const config = loadConfig();
@ -25,6 +31,7 @@ export class SocketHost {
this.updateManager = new UpdateManager(this);
this.actionManager = new ActionManager(this);
this.eventManager = new EventManager(this);
this.templateManager = new TemplateManager(this);
this.codeAuth = new CodeAuth();
this.setupSocketEventHandlers();
}
@ -34,12 +41,33 @@ export class SocketHost {
this.socket.on('authenticate', this.handleAuthenticate.bind(this));
this.socket.on('updateHost', this.handleUpdateHost.bind(this));
this.socket.on('getObject', this.handleGetObject.bind(this));
this.socket.on('newObject', this.handleNewObject.bind(this));
this.socket.on('editObject', this.handleEditObject.bind(this));
this.socket.on('listObjects', this.handleListObjects.bind(this));
this.socket.on(
'subscribeToObjectUpdates',
this.handleSubscribeToObjectUpdatesEvent.bind(this)
);
this.socket.on(
'unsubscribeToObjectUpdates',
this.handleUnsubscribeToObjectUpdatesEvent.bind(this)
);
this.socket.on(
'subscribeToObjectActions',
this.handleSubscribeToObjectActions.bind(this)
);
this.socket.on(
'subscribeToObjectEvent',
this.handleSubscribeToObjectEventEvent.bind(this)
);
this.socket.on(
'unsubscribeObjectEvent',
this.handleUnsubscribeObjectEventEvent.bind(this)
);
this.socket.on(
'renderTemplatePDF',
this.handleRenderTemplatePDFEvent.bind(this)
);
this.socket.on('objectEvent', this.handleObjectEventEvent.bind(this));
this.socket.on('disconnect', this.handleDisconnect.bind(this));
}
@ -107,6 +135,16 @@ export class SocketHost {
});
}
async handleNewObject(data, callback) {
const object = await newObject({
model: getModelByName(data.objectType),
newData: data.newData,
owner: this.host,
ownerType: 'host'
});
callback(object);
}
async handleEditObject(data, callback) {
const object = await editObject({
model: getModelByName(data.objectType),
@ -114,7 +152,8 @@ export class SocketHost {
updateData: data.updateData,
populate: data.populate,
owner: this.host,
ownerType: 'host'
ownerType: 'host',
auditLog: data?.auditLog
});
callback(object);
}
@ -151,6 +190,13 @@ export class SocketHost {
);
}
async handleSubscribeToObjectUpdatesEvent(data) {
const result = await this.updateManager.subscribeToObjectUpdate(
data._id,
data.objectType
);
}
async handleSubscribeToObjectActions(data) {
await this.actionManager.subscribeToObjectActions(
data._id,
@ -158,11 +204,83 @@ export class SocketHost {
);
}
async handleSubscribeToObjectEventEvent(data) {
await this.eventManager.subscribeToObjectEvent(
data._id,
data.objectType,
data.eventType
);
}
async handleUnsubscribeObjectEventEvent(data) {
await this.eventManager.removeObjectEventsListener(
data._id,
data.objectType,
data.eventType
);
}
async handleUnsubscribeToObjectUpdatesEvent(data) {
await this.updateManager.unsubscribeToObjectUpdate(
data._id,
data.objectType
);
}
async handleRenderTemplatePDFEvent(data, callback) {
const result = await this.templateManager.renderPDF(
data._id,
data.content,
data.object,
1
);
callback(result);
}
async setDevicesState(state, online, connectedAt) {
logger.info('Setting devices state to', state, 'and online to', online);
const documentPrinters = await listObjects({
model: getModelByName('documentPrinter'),
filter: { host: this.host._id }
});
const printers = await listObjects({
model: getModelByName('printer'),
filter: { host: this.host._id }
});
logger.debug(
'Retrieved',
documentPrinters.length,
'document printers and',
printers.length,
'printers'
);
for (const documentPrinter of documentPrinters) {
await editObject({
model: getModelByName('documentPrinter'),
id: documentPrinter._id,
updateData: { state: state, online: online, connectedAt: connectedAt },
owner: this.host,
ownerType: 'host'
});
}
for (const printer of printers) {
await editObject({
model: getModelByName('printer'),
id: printer._id,
updateData: { state: state, online: online, connectedAt: connectedAt },
owner: this.host,
ownerType: 'host'
});
}
logger.info('Devices state set to', state, 'and online to', online);
}
async handleDisconnect() {
if (this.authenticated) {
await editObject({
model: hostModel,
id: this.host._id,
id: this.id,
updateData: {
online: false,
state: { type: 'offline' },
@ -173,6 +291,13 @@ export class SocketHost {
});
this.authenticated = false;
}
await this.actionManager.removeAllListeners();
await this.eventManager.removeAllListeners();
await this.setDevicesState(
{ type: 'offline', message: 'Host disconnected.' },
false,
null
);
logger.info('External host disconnected. Socket ID:', this.id);
}
}

View File

@ -64,11 +64,16 @@ export class SocketUser {
'previewTemplate',
this.handlePreviewTemplateEvent.bind(this)
);
this.socket.on(
'renderTemplatePDF',
this.handleRenderTemplatePDFEvent.bind(this)
);
this.socket.on(
'generateHostOtp',
this.handleGenerateHostOtpEvent.bind(this)
);
this.socket.on('objectAction', this.handleObjectActionEvent.bind(this));
this.socket.on('disconnect', this.handleDisconnect.bind(this));
}
async handleAuthenticateEvent(data, callback) {
@ -196,6 +201,15 @@ export class SocketUser {
callback(result);
}
async handleRenderTemplatePDFEvent(data, callback) {
const result = await this.templateManager.renderPDF(
data._id,
data.content,
data.object,
1
);
callback(result);
}
async handleGenerateHostOtpEvent(data, callback) {
const result = await generateHostOTP(data._id);
callback(result);
@ -210,7 +224,9 @@ export class SocketUser {
);
}
handleDisconnect() {
async handleDisconnect() {
await this.actionManager.removeAllListeners();
await this.eventManager.removeAllListeners();
logger.info('External user disconnected:', this.socket.user?.username);
}
}

View File

@ -15,26 +15,21 @@
</style>
<style>
body {
min-width: calc((<%= width || '50mm' %> * <%= scale || '1' %>) + 100px);
min-height: calc(
(<%= height || '50mm' %> * <%= scale || '1' %>) + 100px
);
}
.previewContainer {
transform: scale(<%= scale || '1' %>);
min-width: calc((<%= width || '50mm' %> + 100px) * <%= scale || '1' %>);
min-height: calc(
(<%= height || '50mm' %> + 100px) * <%= scale || '1' %>
);
.previewWrapper {
width: <%= (width * scale) + 'mm' || '50mm' %>;
height: <%= (height * scale) + 'mm' || '50mm' %>;
}
.previewDocument {
width: <%= width || '50mm' %>;
height: <%= height || '50mm' %>;
width: <%= (width) + 'mm' || '50mm' %>;
height: <%= (height) + 'mm' || '50mm' %>;
transform: scale(<%= scale || '1' %>);
transform-origin: top left;
}
.renderDocument {
width: <%= width || '50mm' %>;
height: <%= height || '50mm' %>;
width: <%= (width * scale) + 'mm' || '50mm' %>;
height: <%= (height * scale) + 'mm' || '50mm' %>;
transform: scale(<%= scale || '1' %>);
}
</style>
@ -45,5 +40,10 @@
<script>
JsBarcode('.documentBarcode').init();
</script>
<% if (typeof previewPaginationScript !== 'undefined' && previewPaginationScript) { %>
<script>
<%- previewPaginationScript %>
</script>
<% } %>
</body>
</html>

View File

@ -1,3 +1,5 @@
<div class="previewContainer">
<div class="previewDocument"><%- content %></div>
<div class="previewWrapper">
<div class="previewDocument" id="content"><%- content %></div>
</div>
</div>

View File

@ -11,15 +11,26 @@ body {
display: flex;
justify-content: center; /* Horizontal center */
align-items: center; /* Vertical center */
transform-origin: center center;
padding: 60px;
box-sizing: border-box;
width: fit-content;
}
.previewWrapper {
position: relative;
}
.previewDocument {
background: #ffffff;
border: 1px solid #000;
box-shadow: 0 0 5px rgba(0, 0, 0, 0.2);
transform-origin: top left;
position: relative;
overflow: hidden;
}
.documentText {
margin: 0;
font-size: 12px;
line-height: 1;
}
.documentTitle {
margin: 0;
@ -27,7 +38,7 @@ body {
h1.documentTitle {
font-weight: 800;
font-size: 38px;
font-size: 34px;
}
h2.documentTitle {
@ -74,6 +85,47 @@ h4.documentTitle {
}
.documentBarcode {
width: 100% !important;
width: auto !important;
height: 100% !important;
}
.documentProgressBar {
height: 8px;
width: 260px;
border-radius: 8px;
border: 1px solid #000000;
overflow: hidden;
}
.documentProgressBarInner {
height: 100%;
background: #000;
}
.documentTable {
width: 100%;
border-collapse: collapse;
border: 1px solid #000000;
}
.documentTableRow {
border: 1px solid #000000;
}
.documentTableRow td,
.documentTableRowHeader th,
.documentTableRowFooter td {
padding: 4px;
border: 1px solid #000000;
}
.documentTableRowHeader {
background: #0000002e;
text-align: left;
border: 1px solid #000000;
}
.documentTableRowFooter {
background: #0000002e;
border: 1px solid #000000;
}

View File

@ -0,0 +1,55 @@
import log4js from 'log4js';
import { loadConfig } from '../config.js';
const config = loadConfig();
const logger = log4js.getLogger('PDF Factory');
logger.level = config.server.logLevel;
/**
* Generates a PDF from HTML content using Puppeteer
* @param {string} html - The HTML content to convert to PDF
* @param {Object} options - PDF generation options
* @param {number} options.width - Document width in mm
* @param {number} options.height - Document height in mm
* @returns {Promise<Buffer>} The PDF buffer
*/
export async function generatePDF(html, options = {}) {
try {
// Dynamically import puppeteer to handle cases where it might not be installed
const puppeteer = await import('puppeteer');
const browser = await puppeteer.default.launch({
headless: true,
args: ['--no-sandbox', '--disable-setuid-sandbox']
});
const page = await browser.newPage();
// Set content with HTML
await page.setContent(html, {
waitUntil: 'networkidle0'
});
// Generate PDF with specified dimensions
const pdfBuffer = await page.pdf({
format: options.format || undefined,
width: options.width ? `${options.width}mm` : undefined,
height: options.height ? `${options.height}mm` : undefined,
printBackground: true,
preferCSSPageSize: true,
margin: {
top: '0mm',
right: '0mm',
bottom: '0mm',
left: '0mm'
}
});
await browser.close();
return pdfBuffer;
} catch (error) {
logger.error('Error generating PDF:', error.message);
throw error;
}
}

View File

@ -12,7 +12,9 @@ import utc from 'dayjs/plugin/utc.js';
import timezone from 'dayjs/plugin/timezone.js';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { getObject } from '../database/database.js';
import { getObject, listObjects } from '../database/database.js';
import { getModelByName } from '../utils.js';
import { generatePDF } from './pdffactory.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
@ -28,7 +30,9 @@ logger.level = config.server.logLevel;
let baseTemplate;
let baseCSS;
let previewTemplate;
let renderTemplateEjs;
let contentPlaceholder;
let previewPaginationScript;
async function loadTemplates() {
// Synchronously load files
@ -41,10 +45,18 @@ async function loadTemplates() {
join(__dirname, '/assets/previewtemplate.ejs'),
'utf8'
);
renderTemplateEjs = fs.readFileSync(
join(__dirname, '/assets/rendertemplate.ejs'),
'utf8'
);
contentPlaceholder = fs.readFileSync(
join(__dirname, '/assets/contentplaceholder.ejs'),
'utf8'
);
previewPaginationScript = fs.readFileSync(
join(__dirname, '/assets/previewpagination.js'),
'utf8'
);
}
loadTemplates();
@ -60,6 +72,12 @@ function getNodeStyles(attributes) {
if (attributes?.height) {
styles += `height: ${attributes.height};`;
}
if (attributes?.maxWidth) {
styles += `max-width: ${attributes.maxWidth};`;
}
if (attributes?.maxHeight) {
styles += `max-height: ${attributes.maxHeight};`;
}
if (attributes?.gap && attributes?.vertical != 'true') {
styles += `column-gap: ${attributes.gap};`;
}
@ -96,6 +114,15 @@ function getNodeStyles(attributes) {
if (attributes?.scale) {
styles += `transform: scale(${attributes.scale});`;
}
if (attributes?.textAlign) {
styles += `text-align: ${attributes.textAlign};`;
}
if (attributes?.textSize) {
styles += `font-size: ${attributes.textSize};`;
}
if (attributes?.wordWrap) {
styles += `word-wrap: ${attributes.wordWrap};`;
}
return styles;
}
@ -152,7 +179,9 @@ async function transformCustomElements(content) {
class: 'documentBarcode',
'jsbarcode-displayValue': 'false',
'jsbarcode-value': node.content[0],
'jsbarcode-format': node.attrs.format
'jsbarcode-format': node.attrs.format,
'jsbarcode-width': node.attrs.barcodeWidth,
'jsbarcode-margin': 0
}
}
],
@ -193,6 +222,27 @@ async function transformCustomElements(content) {
}
};
}),
tree =>
tree.match({ tag: 'ProgressBar' }, node => {
return {
...node,
tag: 'div',
attrs: {
class: 'documentProgressBar',
style: getNodeStyles(node.attrs)
},
content: [
{
tag: 'div',
attrs: {
class: 'documentProgressBarInner',
style: `width: ${Math.round((node.content[0] || 0) * 100)}%`
}
}
]
};
}),
tree =>
tree.match({ tag: 'DateTime' }, node => {
const dateTime = dayjs.utc(node.content[0]);
@ -204,6 +254,91 @@ async function transformCustomElements(content) {
style: getNodeStyles(node.attrs)
}
};
}),
tree =>
tree.match({ tag: 'Table' }, node => {
return {
...node,
tag: 'table',
attrs: {
class: 'documentTable',
style: getNodeStyles(node.attrs)
}
};
}),
tree =>
tree.match({ tag: 'Row' }, node => {
const rowType = node.attrs?.type?.toLowerCase() || '';
// Transform Col children based on the row type (header/footer/body)
const transformCols = content => {
if (!Array.isArray(content)) return content;
return content.map(child => {
if (typeof child === 'string' || child == null) {
return child;
}
if (child.tag !== 'Col') {
return child;
}
const baseAttrs = {
...child.attrs,
style: getNodeStyles(child.attrs)
};
if (rowType === 'header') {
// Header row columns become table headers
return {
...child,
tag: 'th',
attrs: baseAttrs
};
}
// Footer and body rows both use <td>; footer is distinguished by the row class
return {
...child,
tag: 'td',
attrs: baseAttrs
};
});
};
const content = transformCols(node.content);
if (rowType === 'header') {
return {
...node,
tag: 'tr',
content,
attrs: {
class: 'documentTableRowHeader',
style: getNodeStyles(node.attrs)
}
};
}
if (rowType === 'footer') {
return {
...node,
tag: 'tr',
content,
attrs: {
class: 'documentTableRowFooter',
style: getNodeStyles(node.attrs)
}
};
}
return {
...node,
tag: 'tr',
content,
attrs: {
class: 'documentTableRow',
style: getNodeStyles(node.attrs)
}
};
})
]).process(content);
@ -211,6 +346,13 @@ async function transformCustomElements(content) {
}
export class TemplateManager {
constructor() {
this.fc = {
listObjects: this.listObjects.bind(this),
getObject: this.getObject.bind(this),
formatDate: this.formatDate.bind(this)
};
}
/**
* Previews an EJS template by rendering it with provided data
* @param {string} templateString - The EJS template as a string
@ -218,7 +360,14 @@ export class TemplateManager {
* @param {Object} options - EJS rendering options
* @returns {Promise<string>} The rendered HTML string
*/
async renderTemplate(id, content, data = {}, scale, options = {}) {
async renderTemplate(
id,
content,
data = {},
scale = 1,
options = {},
preview = true
) {
try {
// Set default options for EJS rendering
const defaultOptions = {
@ -242,11 +391,28 @@ export class TemplateManager {
}
const documentSize = documentTemplate.documentSize;
if (documentSize == null) {
return { error: 'Document template size not found.' };
}
var templateData = data;
// Validate content parameter
if (content == null || typeof content !== 'string') {
return { error: 'Template content is required and must be a string.' };
}
// Make sure data has default undefefined values and then merge with data
var templateData = {};
if (documentTemplate.global == true) {
templateData = { content: contentPlaceholder };
templateData = { content: contentPlaceholder, fc: this.fc };
} else {
const objectType = documentTemplate?.objectType;
const model = getModelByName(objectType);
const defaultKeys = Object.keys(model.schema.obj);
const defaultValues = {};
for (const key of defaultKeys) {
defaultValues[key] = null;
}
templateData = { ...defaultValues, ...data, fc: this.fc };
}
// Render the template
@ -259,39 +425,79 @@ export class TemplateManager {
var templateWithParentContent;
if (documentTemplate.parent != undefined) {
// Validate parent content
if (
documentTemplate.parent.content == null ||
typeof documentTemplate.parent.content !== 'string'
) {
return {
error: 'Parent template content is required and must be a string.'
};
}
templateWithParentContent = await ejs.render(
documentTemplate.parent.content,
{ content: templateContent },
{ content: templateContent, fc: this.fc },
defaultOptions
);
} else {
templateWithParentContent = templateContent;
}
// Validate rendered content before transformation
if (
templateWithParentContent == null ||
typeof templateWithParentContent !== 'string'
) {
return { error: 'Failed to render template content.' };
}
const templateHtml = await transformCustomElements(
templateWithParentContent
);
const previewHtml = await ejs.render(
previewTemplate,
{ content: templateHtml },
defaultOptions
);
// Validate transformed HTML
if (templateHtml == null || typeof templateHtml !== 'string') {
return { error: 'Failed to transform template content.' };
}
var innerHtml = null;
if (preview == true) {
innerHtml = await ejs.render(
previewTemplate,
{ content: templateHtml },
defaultOptions
);
} else {
innerHtml = await ejs.render(
renderTemplateEjs,
{ content: templateHtml },
defaultOptions
);
}
// Validate inner HTML
if (innerHtml == null || typeof innerHtml !== 'string') {
return { error: 'Failed to render inner template content.' };
}
const baseHtml = await ejs.render(
baseTemplate,
{
content: previewHtml,
width: `${documentSize.width}mm`,
height: `${documentSize.height}mm`,
content: innerHtml,
width: documentSize.width,
height: documentSize.height,
scale: `${scale}`,
baseCSS: baseCSS
baseCSS: baseCSS,
previewPaginationScript: preview ? previewPaginationScript : ''
},
defaultOptions
);
const previewObject = {
html: baseHtml
html: baseHtml,
width: documentSize.width,
height: documentSize.height
};
return previewObject;
@ -315,4 +521,73 @@ export class TemplateManager {
return false;
}
}
/**
* Renders a template to PDF format
* @param {string} id - The document template ID
* @param {string} content - The template content
* @param {Object} data - Data object to pass to the template
* @param {number} scale - Scale factor for rendering
* @param {Object} options - EJS rendering options
* @returns {Promise<Object>} Object containing PDF buffer or error
*/
async renderPDF(id, content, data = {}, options = {}) {
try {
logger.debug('Rendering PDF for template:', id);
const renderedTemplate = await this.renderTemplate(
id,
content,
data,
1,
options,
false
);
if (renderedTemplate.error != undefined) {
return { error: renderedTemplate.error };
}
const baseHtml = renderedTemplate.html;
// Generate PDF using PDF factory
const pdfBuffer = await generatePDF(baseHtml, {
width: renderedTemplate.width,
height: renderedTemplate.height
});
const pdfObject = {
pdf: pdfBuffer
};
return pdfObject;
} catch (error) {
logger.warn('Error whilst rendering PDF:', error.message);
return { error: error.message };
}
}
async listObjects(objectType, filter = {}, populate = []) {
const model = getModelByName(objectType);
if (model == undefined) {
throw new Error('Farm Control: Object type not found.');
}
const objects = await listObjects({
model,
filter,
populate
});
return objects;
}
formatDate(date, format) {
return dayjs(date).format(format);
}
async getObject(objectType, id) {
const model = getModelByName(objectType);
if (model == undefined) {
throw new Error('Farm Control: Object type not found.');
}
const object = await getObject({ model, id, cached: true });
return object;
}
}

View File

@ -46,6 +46,7 @@ export class UpdateManager {
}
async subscribeToObjectUpdate(id, objectType) {
logger.debug('Subscribing to object update...', id, objectType);
await natsServer.subscribe(
`${objectType}s.${id}.object`,
this.socketClient.socketId,

View File

@ -7,11 +7,17 @@ import canonicalize from 'canonical-json';
import { loadConfig } from './config.js';
import { userModel } from './database/schemas/management/user.schema.js';
import { documentSizeModel } from './database/schemas/management/documentsize.schema.js';
import { documentJobModel } from './database/schemas/management/documentjob.schema.js';
import { documentTemplateModel } from './database/schemas/management/documenttemplate.schema.js';
import { documentPrinterModel } from './database/schemas/management/documentprinter.schema.js';
import { printerModel } from './database/schemas/production/printer.schema.js';
import { subJobModel } from './database/schemas/production/subjob.schema.js';
import { jobModel } from './database/schemas/production/job.schema.js';
import { filamentStockModel } from './database/schemas/inventory/filamentstock.schema.js';
import { fileModel } from './database/schemas/management/file.schema.js';
import { gcodeFileModel } from './database/schemas/production/gcodefile.schema.js';
import { stockEventModel } from './database/schemas/inventory/stockevent.schema.js';
import { filamentModel } from './database/schemas/management/filament.schema.js';
const config = loadConfig();
@ -21,11 +27,17 @@ const modelList = [
hostModel,
userModel,
documentSizeModel,
documentJobModel,
documentTemplateModel,
documentPrinterModel,
printerModel,
jobModel,
subJobModel,
filamentStockModel
fileModel,
gcodeFileModel,
filamentStockModel,
stockEventModel,
filamentModel
];
export async function generateHostOTP(id) {

4322
yarn.lock Normal file

File diff suppressed because it is too large Load Diff