Compare commits

..

14 Commits

Author SHA1 Message Date
b9c2e959b9 Update dependencies in package.json and package-lock.json
- Upgraded several dependencies including axios, dotenv, express, mongodb, and mongoose to their latest versions for improved performance and security.
- Added new dependencies: date-fns, dayjs, ejs, nanoid, node-cache, object-hash, and posthtml to enhance functionality.
- Updated ESLint and related packages to the latest versions for better linting support and configuration.
- Removed deprecated dependencies and ensured compatibility with the latest Node.js versions.
2025-08-18 01:09:29 +01:00
6cc2a07ee0 Add utility functions for database operations
- Introduced a new utils.js file containing various utility functions for database operations.
- Implemented functions for parsing filters, converting objects to camelCase, extracting configuration blocks, and managing audit logs.
- Added functionality to handle ObjectId conversions and filter generation based on query parameters.
- Enhanced object ID handling with functions to flatten and expand ObjectIds for better integration with MongoDB.
2025-08-18 01:09:14 +01:00
1b86d256ca Enhance UpdateManager for object event handling and subscription management
- Refactored constructor to utilize socketClient for improved clarity.
- Added methods to subscribe and unsubscribe from object creation and update events.
- Implemented functionality to retrieve and set object updates in Etcd.
- Improved logging for event handling and error management.
2025-08-18 01:09:00 +01:00
d695772a3a Refactor LockManager to improve key handling and event broadcasting
- Updated constructor to use socketClient instead of socketManager for better clarity.
- Changed key management methods to use a consistent naming convention for setting and deleting locks.
- Enhanced event listener setup to utilize socketClient for broadcasting lock updates.
- Improved logging for lock and unlock operations to provide clearer insights into the locking mechanism.
2025-08-18 01:08:49 +01:00
f5bfbe0d63 Refactor EtcdServer class to enhance event handling and logging
- Introduced separate watchers for prefix and key events, improving organization and clarity.
- Renamed methods for setting and deleting keys to better reflect their functionality.
- Updated logging levels from debug to trace for more granular logging during operations.
- Added functionality to remove watchers for both prefixes and keys, enhancing resource management.
- Improved error handling and callback management for event listeners.
2025-08-18 01:08:36 +01:00
75ccd91b50 Add ActionManager class for object action tracking and websocket integration
- Implemented ActionManager to handle object updates using Etcd and broadcast events via websockets.
- Added methods for subscribing to object actions, removing listeners, and sending object actions with callback handling.
- Integrated logging for better traceability of actions and errors.
2025-08-18 01:08:26 +01:00
110f6d9a57 Update README.md to improve configuration and usage instructions
- Added spacing for better readability in the Etcd, MongoDB, Authentication, and Installation sections.
- Corrected the reference from SocketClient to SocketUser in the Adding New Features section.
- Enhanced the Troubleshooting section with a verification test for etcd.
2025-08-18 01:08:14 +01:00
e2eb240a06 Deleted schema. 2025-08-18 01:07:51 +01:00
53246b70b2 Refactor socket management to remove Keycloak integration
- Eliminated KeycloakAuth from the SocketManager initialization.
- Updated SocketManager to directly use the server instance, simplifying the socket management setup.
2025-08-18 01:07:39 +01:00
43dc41470d Add OTP expiry configuration to config.json
- Introduced otpExpiryMins setting to specify the expiration time for OTPs in minutes.
- Updated configuration structure to enhance authentication management.
2025-08-18 01:07:26 +01:00
3424c17ab3 Refactor socket management: replace SocketClient with SocketUser and SocketHost classes
- Removed SocketClient class and its associated functionality.
- Introduced SocketUser and SocketHost classes to handle user and host socket connections respectively.
- Updated SocketManager to manage user and host connections, including authentication and event handling.
- Enhanced event handling for user actions such as locking, unlocking, and subscribing to updates.
2025-08-18 01:07:14 +01:00
03eb0a61c1 Enhance authentication module with code and OTP verification
- Added CodeAuth class for verifying authentication codes and OTPs against the database.
- Implemented methods to check host status, validate codes, and manage OTP expiration.
- Updated KeycloakAuth to retrieve user information from the database.
- Refactored createAuthMiddleware to handle authentication checks for Socket.IO connections.
2025-08-18 01:06:55 +01:00
ce15d3dbfc Add inventory and management schemas for filament, part, and stock management
- Introduced new schemas for managing inventory, including filamentStock, partStock, stockAudit, stockEvent, and their respective models.
- Added management schemas for user, vendor, material, and various document types to enhance data structure and organization.
- Implemented necessary fields and relationships to support inventory tracking and management functionalities.
2025-08-18 01:06:38 +01:00
5584e61583 Add Template Manager and associated assets for document rendering
- Implemented TemplateManager class for rendering EJS templates with dynamic content.
- Added base template, preview template, content placeholder, and render template EJS files.
- Introduced CSS styles for document layout and presentation.
- Integrated dayjs for date formatting and posthtml for custom element transformation.
2025-08-18 01:05:57 +01:00
43 changed files with 5162 additions and 1256 deletions

View File

@ -30,6 +30,7 @@ A WebSocket microservice for FarmControl that handles real-time communication an
The application uses `config.json` for configuration. Update the following sections: The application uses `config.json` for configuration. Update the following sections:
### Etcd Configuration ### Etcd Configuration
```json ```json
{ {
"database": { "database": {
@ -42,6 +43,7 @@ The application uses `config.json` for configuration. Update the following secti
``` ```
### MongoDB Configuration ### MongoDB Configuration
```json ```json
{ {
"database": { "database": {
@ -53,6 +55,7 @@ The application uses `config.json` for configuration. Update the following secti
``` ```
### Authentication Configuration ### Authentication Configuration
```json ```json
{ {
"auth": { "auth": {
@ -70,11 +73,13 @@ The application uses `config.json` for configuration. Update the following secti
## Running the Application ## Running the Application
### Development ### Development
```bash ```bash
npm run dev npm run dev
``` ```
### Production ### Production
```bash ```bash
npm start npm start
``` ```
@ -84,6 +89,7 @@ npm start
### Installation ### Installation
#### Using Docker #### Using Docker
```bash ```bash
docker run -d --name etcd \ docker run -d --name etcd \
-p 2379:2379 \ -p 2379:2379 \
@ -95,19 +101,23 @@ docker run -d --name etcd \
``` ```
#### Using Homebrew (macOS) #### Using Homebrew (macOS)
```bash ```bash
brew install etcd brew install etcd
etcd etcd
``` ```
#### Using apt (Ubuntu/Debian) #### Using apt (Ubuntu/Debian)
```bash ```bash
sudo apt-get install etcd sudo apt-get install etcd
sudo systemctl start etcd sudo systemctl start etcd
``` ```
### Verification ### Verification
Test that etcd is running: Test that etcd is running:
```bash ```bash
curl http://localhost:2379/version curl http://localhost:2379/version
``` ```
@ -139,6 +149,7 @@ The service exposes WebSocket endpoints for:
## Development ## Development
### Project Structure ### Project Structure
``` ```
src/ src/
├── auth/ # Authentication logic ├── auth/ # Authentication logic
@ -152,7 +163,7 @@ src/
### Adding New Features ### Adding New Features
1. **Database operations**: Use the `etcdServer` instance for etcd operations 1. **Database operations**: Use the `etcdServer` instance for etcd operations
2. **WebSocket events**: Extend the `SocketClient` class 2. **WebSocket events**: Extend the `SocketUser` class
3. **Authentication**: Extend the `KeycloakAuth` class 3. **Authentication**: Extend the `KeycloakAuth` class
## Troubleshooting ## Troubleshooting

View File

@ -22,7 +22,8 @@
"mongo": { "mongo": {
"url": "mongodb://192.168.68.53:27017/farmcontrol" "url": "mongodb://192.168.68.53:27017/farmcontrol"
} }
} },
"otpExpiryMins": 0.5
}, },
"production": { "production": {
"server": { "server": {

2863
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -17,23 +17,32 @@
"author": "Tom Butcher", "author": "Tom Butcher",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"axios": "^1.10.0", "axios": "^1.11.0",
"dotenv": "^16.4.5", "date-fns": "^4.1.0",
"dayjs": "^1.11.13",
"dotenv": "^17.2.1",
"ejs": "^3.1.10",
"etcd3": "^1.1.2", "etcd3": "^1.1.2",
"express": "^4.19.2", "express": "^5.1.0",
"he": "^1.2.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"lodash": "^4.17.21",
"log4js": "^6.9.1", "log4js": "^6.9.1",
"mongodb": "^6.8.0", "mongodb": "^6.18.0",
"mongoose": "^8.5.1", "mongoose": "^8.17.1",
"socket.io": "^4.7.5", "nanoid": "^5.1.5",
"node-cache": "^5.1.2",
"object-hash": "^3.0.0",
"posthtml": "^0.16.6",
"socket.io": "^4.8.1",
"socket.io-adapter-mongo": "^2.0.5", "socket.io-adapter-mongo": "^2.0.5",
"socketio-jwt": "^4.6.2" "socketio-jwt": "^4.6.2"
}, },
"devDependencies": { "devDependencies": {
"eslint": "^8.57.0", "eslint": "^9.33.0",
"eslint-config-prettier": "^10.1.5", "eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.5.1", "eslint-plugin-prettier": "^5.5.4",
"prettier": "^3.6.2", "prettier": "^3.6.2",
"standard": "^17.1.0" "standard": "^17.1.2"
} }
} }

View File

@ -0,0 +1,100 @@
import log4js from 'log4js';
import { loadConfig } from '../config.js';
import { etcdServer } from '../database/etcd.js';
import { generateEtcId } from '../utils.js';
const config = loadConfig();
// Setup logger
const logger = log4js.getLogger('Action Manager');
logger.level = config.server.logLevel;
/**
* ActionManager handles tracking object updates using Etcd and broadcasts update events via websockets.
*/
export class ActionManager {
constructor(socketClient) {
this.socketClient = socketClient;
this.callbacks = new Map();
}
async subscribeToObjectActions(id, objectType) {
logger.debug('Subscribing to object actions...', id, objectType);
await etcdServer.onPrefixPutEvent(
`/${objectType}s/${id}/actions`,
this.socketClient.id,
(key, value) => {
if (!value?.result) {
logger.trace('Object action:', id);
this.socketClient.socket.emit(
'objectAction',
{
_id: id,
objectType: objectType,
action: { ...value }
},
result => {
logger.trace('Got action result:', key);
const actionId = key.split('/').pop();
etcdServer.setKey(`/${objectType}s/${id}/actions/${actionId}`, {
...value,
result: { ...result }
});
}
);
}
}
);
return { success: true };
}
async removeObjectActionsListener(id, objectType) {
await etcdServer.removePrefixWatcher(
`/${objectType}s/${id}/actions`,
this.socketClient.id,
'put'
);
return { success: true };
}
async sendObjectAction(id, objectType, action, callback) {
try {
const actionId = generateEtcId();
this.callbacks.set(actionId, callback);
logger.trace(
`Calling action id: ${actionId}, object id: ${id}, object type: ${objectType} Action:`,
action
);
await etcdServer.onKeyPutEvent(
`/${objectType}s/${id}/actions/${actionId}`,
this.socketClient.socketId,
async (key, value) => {
if (value.result) {
logger.trace('Calling result callback...');
const storedCallback = this.callbacks.get(actionId);
await etcdServer.removeKeyWatcher(
`/${objectType}s/${id}/actions/${actionId}`,
this.socketClient.socketId,
'put'
);
await etcdServer.deleteKey(
`/${objectType}s/${id}/actions/${actionId}`
);
storedCallback(value.result);
}
}
);
await etcdServer.setKey(
`/${objectType}s/${id}/actions/${actionId}`,
action
);
return true;
} catch (error) {
logger.error(
`Failed to set value for /${objectType}s/${id}/object:`,
error
);
return false;
}
}
}

View File

@ -4,6 +4,14 @@ import jwt from 'jsonwebtoken';
import log4js from 'log4js'; import log4js from 'log4js';
// Load configuration // Load configuration
import { loadConfig } from '../config.js'; import { loadConfig } from '../config.js';
import {
editObject,
getObject,
getObjectByFilter
} from '../database/database.js';
import { hostModel } from '../database/schemas/management/host.schema.js';
import { userModel } from '../database/schemas/management/user.schema.js';
import { generateAuthCode } from '../utils.js';
const config = loadConfig(); const config = loadConfig();
@ -11,7 +19,7 @@ const logger = log4js.getLogger('Auth');
logger.level = config.server.logLevel; logger.level = config.server.logLevel;
export class KeycloakAuth { export class KeycloakAuth {
constructor(config) { constructor() {
this.config = config.auth; this.config = config.auth;
this.tokenCache = new Map(); // Cache for verified tokens this.tokenCache = new Map(); // Cache for verified tokens
} }
@ -66,7 +74,7 @@ export class KeycloakAuth {
// Parse token to extract user info // Parse token to extract user info
const decodedToken = jwt.decode(token); const decodedToken = jwt.decode(token);
const user = { const decodedUser = {
id: decodedToken.sub, id: decodedToken.sub,
username: decodedToken.preferred_username, username: decodedToken.preferred_username,
email: decodedToken.email, email: decodedToken.email,
@ -74,6 +82,11 @@ export class KeycloakAuth {
roles: this.extractRoles(decodedToken) roles: this.extractRoles(decodedToken)
}; };
const user = await getObjectByFilter({
model: userModel,
filter: { username: decodedUser.username }
});
// Cache the verified token // Cache the verified token
const expiresAt = introspection.exp * 1000; // Convert to milliseconds const expiresAt = introspection.exp * 1000; // Convert to milliseconds
this.tokenCache.set(token, { expiresAt, user }); this.tokenCache.set(token, { expiresAt, user });
@ -120,28 +133,101 @@ export class KeycloakAuth {
} }
} }
// Socket.IO middleware for authentication export class CodeAuth {
export function createAuthMiddleware(auth) { // Verify a code with the database
return async (socket, next) => { async verifyCode(id, authCode) {
const { token } = socket.handshake.auth;
if (!token) {
return next(new Error('Authentication token is required'));
}
try { try {
const authResult = await auth.verifyToken(token); const host = await getObject({ model: hostModel, id, cached: true });
if (host == undefined) {
if (!authResult.valid) { const error = 'Host not found.';
return next(new Error('Invalid authentication token')); logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.active == false) {
const error = 'Host not active.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.authCode == undefined || host.authCode == '') {
const error = 'No authCode on database.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.authCode != authCode) {
const error = 'authCode does not match.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
return { valid: true, host: host };
} catch (error) {
logger.error('Code verification error:', error.message);
return { valid: false };
}
} }
// Attach user information to socket async verifyOtp(otp) {
socket.user = authResult.user; try {
const host = await getObjectByFilter({
model: hostModel,
filter: { otp: otp },
cached: false
});
if (host == undefined) {
const error = 'No host found with OTP.';
logger.warn(error);
return { valid: false, error: error };
}
const id = host._id.toString();
if (host.active == false) {
const error = 'Host is not active.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.otp == undefined) {
const error = 'No OTP on database.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.otpExpiresAt == undefined) {
const error = 'No OTP expiry.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
if (host.otpExpiresAt < Date.now()) {
const error = 'OTP expired.';
logger.warn(error, 'Host:', id);
return { valid: false, error: error };
}
const authCodeHost = await editObject({
model: hostModel,
id: id,
updateData: { authCode: generateAuthCode() }
});
return { valid: true, host: authCodeHost };
} catch (error) {
logger.error('Code verification error:', error.message);
return { valid: false, error: error.message };
}
}
}
// Socket.IO middleware for authentication
export function createAuthMiddleware(socketUser) {
return async (packet, next) => {
const [event] = packet; // event name is always first element
// Allow the 'authenticate' event through without checks
logger.trace('Event:', event);
if (event === 'authenticate') {
next(); next();
} catch (err) { return;
logger.error('Authentication error:', err);
next(new Error('Authentication failed'));
} }
if (socketUser.authenticated) {
next();
return;
}
return next(new Error('Authentication is required.'));
}; };
} }

490
src/database/database.js Normal file
View File

@ -0,0 +1,490 @@
import _ from 'lodash';
import NodeCache from 'node-cache';
import {
deleteAuditLog,
expandObjectIds,
editAuditLog,
distributeUpdate,
newAuditLog,
distributeNew
} from './utils.js';
import log4js from 'log4js';
import { loadConfig } from '../config.js';
import { userModel } from './schemas/management/user.schema.js';
const config = loadConfig();
const logger = log4js.getLogger('Database');
const cacheLogger = log4js.getLogger('Local Cache');
logger.level = config.server.logLevel;
cacheLogger.level = config.server.logLevel;
const modelCaches = new Map();
const listCache = new NodeCache({
stdTTL: 30, // 30 sec expiration
checkperiod: 600, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
function getModelCache(model) {
const modelName = model.modelName;
const modelCache = modelCaches.get(modelName);
if (modelCache == undefined) {
logger.trace('Creating new model cache...');
const newModelCache = new NodeCache({
stdTTL: 30, // 30 sec expiration
checkperiod: 30, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
modelCaches.set(modelName, newModelCache);
return newModelCache;
}
logger.trace('Getting model cache...');
return modelCache;
}
export const retrieveObjectCache = ({ model, id }) => {
cacheLogger.trace('Retrieving:', {
model: model.modelName,
id
});
const modelCache = getModelCache(model);
const cachedObject = modelCache.get(id);
if (cachedObject == undefined) {
cacheLogger.trace('Miss:', {
model: model.modelName,
id
});
return undefined;
}
cacheLogger.trace('Hit:', {
model: model.modelName,
id
});
return cachedObject;
};
export const retrieveObjectsCache = ({ model }) => {
cacheLogger.trace('Retrieving:', {
model: model.modelName
});
const modelCache = getModelCache(model);
const modelCacheKeys = modelCache.keys();
const cachedList = listCache.get(model.modelName);
if (cachedList == true) {
const cachedObjects = modelCacheKeys.map(key => modelCache.get(key));
cacheLogger.trace('Hit:', {
model: model.modelName,
length: cachedObjects.length
});
return cachedObjects;
}
cacheLogger.trace('Miss:', {
model: model.modelName
});
return undefined;
};
export const updateObjectCache = ({ model, id, object }) => {
cacheLogger.trace('Updating:', {
model: model.modelName,
id
});
const modelCache = getModelCache(model);
const cachedObject = modelCache.get(id) || {};
const mergedObject = _.merge(cachedObject, object);
modelCache.set(id, mergedObject);
cacheLogger.trace('Updated:', {
model: model.modelName,
id
});
return mergedObject;
};
export const deleteObjectCache = ({ model, id }) => {
cacheLogger.trace('Deleting:', {
model: model.modelName,
id
});
modelCache.del(id);
cacheLogger.trace('Deleted:', {
model: model.modelName,
id
});
return mergedObject;
};
export const updateObjectsCache = ({ model, objects }) => {
cacheLogger.trace('Updating:', {
model: model.modelName,
length: objects.length
});
const modelCache = getModelCache(model);
objects.forEach(object => {
const cachedObject = modelCache.get(object._id) || {};
const mergedObject = _.merge(cachedObject, object);
modelCache.set(object._id, mergedObject);
});
listCache.set(model.modelName, true);
cacheLogger.trace('Updated:', {
model: model.modelName,
length: objects.length
});
return mergedObject;
};
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
export const listObjects = async ({
model,
populate = [],
filter = {},
sort = '',
order = 'ascend',
project, // optional: override default projection
cached = false
}) => {
try {
logger.trace('Listing objects:', {
model,
populate,
page,
limit,
filter,
sort,
order,
project,
cache
});
var cacheKey = undefined;
var modelCache = getModelCache(model);
if (cached == true) {
const objectsCache = retrieveObjectsCache({ model });
if (objectsCache != undefined) {
return objectsCache;
}
}
// Fix: descend should be -1, ascend should be 1
const sortOrder = order === 'descend' ? -1 : 1;
if (!sort || sort === '') {
sort = 'createdAt';
}
// Translate parent._id to parent for Mongoose
if (filter['parent._id']) {
filter.parent = filter['parent._id'];
delete filter['parent._id'];
}
// Translate owner._id to owner for Mongoose
if (filter['owner._id']) {
filter.owner = filter['owner._id'];
delete filter['owner._id'];
}
// Use find with population and filter
let query = model.find(filter).sort({ [sort]: sortOrder });
// Handle populate (array or single value)
if (populate) {
if (Array.isArray(populate)) {
for (const pop of populate) {
query = query.populate(pop);
}
} else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate);
}
}
// Handle select (projection)
if (project) {
query = query.select(project);
}
query = query.lean();
const queryResult = await query;
const finalResult = expandObjectIds(queryResult);
updateObjectsCache({ model, objects });
logger.trace('Retreived from database:', {
model,
populate,
page,
limit,
filter,
sort,
order,
project,
cache
});
return finalResult;
} catch (error) {
logger.error('Object list error:', error);
return { error: error, code: 500 };
}
};
// Reusable function to get a single object by ID
export const getObject = async ({ model, id, populate, cached = false }) => {
try {
logger.trace('Getting object:', {
model,
id,
populate
});
if (cached == true) {
const cachedObject = retrieveObjectCache({ model, id });
if (cachedObject != undefined) {
return cachedObject;
}
}
let query = model.findById(id).lean();
// Handle populate (array or single value)
if (populate) {
if (Array.isArray(populate)) {
for (const pop of populate) {
query = query.populate(pop);
}
} else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate);
}
}
const finalResult = await query;
if (!finalResult) {
logger.warn('Object not found in database:', {
model,
id,
populate
});
return undefined;
}
logger.trace('Retreived object from database:', {
model,
id,
populate
});
updateObjectCache({
model: model,
id: finalResult._id.toString(),
object: finalResult
});
return finalResult;
} catch (error) {
logger.error('An error retreiving object:', error.message);
return undefined;
}
};
// Reusable function to get a single object by ID
export const getObjectByFilter = async ({ model, filter, populate }) => {
try {
logger.trace('Getting object:', {
model,
filter,
populate
});
let query = model.findOne(filter).lean();
// Handle populate (array or single value)
if (populate) {
if (Array.isArray(populate)) {
for (const pop of populate) {
query = query.populate(pop);
}
} else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate);
}
}
const finalResult = await query;
if (!finalResult) {
logger.warn('Object not found in database:', {
model,
filter,
populate
});
return undefined;
}
logger.trace('Retreived object from database:', {
model,
filter,
populate
});
updateObjectCache({
model: model,
id: finalResult._id.toString(),
object: finalResult
});
return finalResult;
} catch (error) {
logger.error('An error retreiving object:', error.message);
return undefined;
}
};
// Reusable function to edit an object by ID, with audit logging and distribution
export const editObject = async ({
model,
id,
updateData,
owner = undefined,
ownerType = undefined,
populate
}) => {
try {
// Determine parentType from model name
const parentType = model.modelName ? model.modelName : 'unknown';
// Fetch the and update object
var query = model.findByIdAndUpdate(id, updateData).lean();
if (populate) {
if (Array.isArray(populate)) {
for (const pop of populate) {
query = query.populate(pop);
}
} else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate);
}
}
const previousObject = await query;
if (!previousObject) {
return { error: `${parentType} not found.`, code: 404 };
}
const previousExpandedObject = expandObjectIds(previousObject);
if (owner != undefined && ownerType != undefined) {
// Audit log before update
await editAuditLog(
previousExpandedObject,
{ ...previousExpandedObject, ...updateData },
id,
parentType,
owner,
ownerType
);
}
// Distribute update
await distributeUpdate(updateData, id, parentType);
updateObjectCache({
model: model,
id: id.toString(),
object: { ...previousExpandedObject, ...updateData }
});
return { ...previousExpandedObject, ...updateData };
} catch (error) {
logger.error('editObject error:', error);
return { error: error.message, code: 500 };
}
};
// Reusable function to create a new object
export const newObject = async ({
model,
newData,
owner = null,
ownerType = undefined
}) => {
try {
const parentType = model.modelName ? model.modelName : 'unknown';
const result = await model.create(newData);
if (!result || result.length === 0) {
return { error: 'No object created.', code: 500 };
}
const created = result;
if (owner != undefined && ownerType != undefined) {
await newAuditLog(newData, created._id, parentType, owner, ownerType);
}
await distributeNew(created._id, parentType);
updateObjectCache({
model: model,
id: created._id.toString(),
object: { _id: created._id, ...newData }
});
return created;
} catch (error) {
logger.error('newObject error:', error);
return { error: error.message, code: 500 };
}
};
// Reusable function to delete an object by ID, with audit logging and distribution
export const deleteObject = async ({
model,
id,
owner = null,
ownerType = undefined
}) => {
try {
const parentType = model.modelName ? model.modelName : 'unknown';
// Delete the object
const result = await model.findByIdAndDelete(id);
if (!result) {
return { error: `${parentType} not found.`, code: 404 };
}
if (owner != undefined && ownerType != undefined) {
// Audit log the deletion
await deleteAuditLog(result, id, parentType, owner, ownerType);
}
deleteObjectCache({ model: model, id: id.toString() });
// Distribute the deletion event
await distributeUpdate({ deleted: true }, id, parentType);
return { deleted: true, id: id.toString() };
} catch (error) {
logger.error('deleteObject error:', error);
return { error: error.message, code: 500 };
}
};

View File

@ -9,20 +9,21 @@ logger.level = config.server.logLevel;
class EtcdServer { class EtcdServer {
constructor() { constructor() {
this.client = null; this.client = null;
this.watchers = new Map(); this.prefixPutWatchers = new Map(); // prefix → { watcher, callbacks }
this.prefixDeleteWatchers = new Map(); // prefix → { watcher, callbacks }
this.keyPutWatchers = new Map(); // key → { watcher, callbacks }
this.keyDeleteWatchers = new Map(); // key → { watcher, callbacks }
const etcdConfig = config.database?.etcd || config.database; // fallback for production config const etcdConfig = config.database?.etcd || config.database; // fallback for production config
const host = etcdConfig.host || 'localhost'; const host = etcdConfig.host || 'localhost';
const port = etcdConfig.port || 2379; const port = etcdConfig.port || 2379;
this.hosts = [`${host}:${port}`]; this.hosts = [`${host}:${port}`];
logger.debug( logger.trace(`EtcdServer: hosts set to ${JSON.stringify(this.hosts)}`);
`EtcdServer constructor: hosts set to ${JSON.stringify(this.hosts)}`
);
} }
async connect() { async connect() {
if (!this.client) { if (!this.client) {
logger.info('Connecting to Etcd...'); logger.info('Connecting to Etcd...');
logger.debug( logger.trace(
`Creating Etcd client with hosts ${JSON.stringify(this.hosts)}` `Creating Etcd client with hosts ${JSON.stringify(this.hosts)}`
); );
this.client = new Etcd3({ this.client = new Etcd3({
@ -32,10 +33,10 @@ class EtcdServer {
// Test connection // Test connection
try { try {
await this.client.get('test-connection').string(); await this.client.get('test-connection').string();
logger.debug('Etcd client connected successfully.'); logger.trace('Etcd client connected successfully.');
} catch (error) { } catch (error) {
if (error.code === 'NOT_FOUND') { if (error.code === 'NOT_FOUND') {
logger.debug( logger.trace(
'Etcd client connected successfully (test key not found as expected).' 'Etcd client connected successfully (test key not found as expected).'
); );
} else { } else {
@ -43,38 +44,35 @@ class EtcdServer {
} }
} }
} else { } else {
logger.debug('Etcd client already exists, skipping connection.'); logger.trace('Etcd client already exists, skipping connection.');
} }
return this.client; return this.client;
} }
async getClient() { async getClient() {
logger.trace('Checking if Etcd client exists.');
if (!this.client) { if (!this.client) {
logger.debug('No client found, calling connect().'); logger.trace('No client found, calling connect().');
await this.connect(); await this.connect();
} }
logger.trace('Returning Etcd client.');
return this.client; return this.client;
} }
// Hash-like functionality using etcd async setKey(key, value) {
async set(key, value) {
const client = await this.getClient(); const client = await this.getClient();
const stringValue = const stringValue =
typeof value === 'string' ? value : JSON.stringify(value); typeof value === 'string' ? value : JSON.stringify(value);
await client.put(key).value(stringValue); await client.put(key).value(stringValue);
logger.debug(`Set key: ${key}, value: ${stringValue}`); logger.trace(`Set key: ${key}, value: ${stringValue}`);
return true; return true;
} }
async get(key) { async getKey(key) {
const client = await this.getClient(); const client = await this.getClient();
try { try {
const value = await client.get(key).string(); const value = await client.get(key).string();
logger.debug(`Retrieved key: ${key}, value: ${value}`); logger.trace(`Retrieved key: ${key}, value: ${value}`);
// Try to parse as JSON, fallback to string // Try to parse as JSON, fallback to string
try { try {
@ -84,41 +82,191 @@ class EtcdServer {
} }
} catch (error) { } catch (error) {
if (error.code === 'NOT_FOUND') { if (error.code === 'NOT_FOUND') {
logger.debug(`Key not found: ${key}`); logger.trace(`Key not found: ${key}`);
return null; return null;
} }
throw error; throw error;
} }
} }
async delete(key) { async deleteKey(key) {
const client = await this.getClient(); const client = await this.getClient();
try { try {
await client.delete().key(key); await client.delete().key(key);
logger.debug(`Deleted key: ${key}`); logger.trace(`Deleted key: ${key}`);
return true; return { success: true };
} catch (error) { } catch (error) {
if (error.code === 'NOT_FOUND') { if (error.code === 'NOT_FOUND') {
logger.debug(`Key not found for deletion: ${key}`); const error = `Key not found for deletion.`;
return false; console.log(error, 'Key:', key);
return { error: error };
} }
throw error; throw error;
} }
} }
async onPrefixEvent(prefix, callback) { async onPrefixPutEvent(prefix, owner, callback) {
const client = await this.getClient(); const client = await this.getClient();
logger.debug(`Setting up watcher for prefix events: ${prefix}`); const watcherKey = prefix;
if (this.prefixPutWatchers.has(watcherKey)) {
this.prefixPutWatchers.get(watcherKey).callbacks.set(owner, callback);
logger.trace(`Added put callback for owner=${owner} on prefix=${prefix}`);
return;
}
logger.trace(`Creating new put watcher for prefix: ${prefix}`);
const watcher = await client.watch().prefix(prefix).create();
const callbacks = new Map();
callbacks.set(owner, callback);
watcher.on('put', (kv, previous) => {
logger.trace(`Prefix put event detected: ${prefix}, key: ${kv.key}`);
const valueStr = kv.value.toString();
let parsedValue;
try {
parsedValue = JSON.parse(valueStr);
} catch {
parsedValue = valueStr;
}
for (const [ownerId, cb] of callbacks) {
try {
cb(kv.key.toString(), parsedValue, kv, previous);
} catch (err) {
logger.error(
`Error in onPrefixPutEvent callback for owner=${ownerId}, prefix=${prefix}:`,
err
);
}
}
});
this.prefixPutWatchers.set(watcherKey, { watcher, callbacks });
return { success: true };
}
async onPrefixDeleteEvent(prefix, owner, callback) {
const client = await this.getClient();
const watcherKey = prefix;
if (this.prefixDeleteWatchers.has(watcherKey)) {
this.prefixDeleteWatchers.get(watcherKey).callbacks.set(owner, callback);
logger.trace(
`Added delete callback for owner=${owner} on prefix=${prefix}`
);
return;
}
logger.trace(`Creating new delete watcher for prefix: ${prefix}`);
const watcher = await client.watch().prefix(prefix).create();
const callbacks = new Map();
callbacks.set(owner, callback);
watcher.on('delete', (kv, previous) => {
logger.trace(`Prefix delete event detected: ${prefix}, key: ${kv.key}`);
for (const [ownerId, cb] of callbacks) {
try {
cb(kv.key.toString(), kv, previous);
} catch (err) {
logger.error(
`Error in onPrefixDeleteEvent callback for owner=${ownerId}, prefix=${prefix}:`,
err
);
}
}
});
this.prefixDeleteWatchers.set(watcherKey, { watcher, callbacks });
return { success: true };
}
async onKeyPutEvent(key, owner, callback) {
const client = await this.getClient();
const watcherKey = key;
if (this.keyPutWatchers.has(watcherKey)) {
this.keyPutWatchers.get(watcherKey).callbacks.set(owner, callback);
logger.trace(`Added put callback for owner: ${owner}, on key: ${key}`);
return;
}
logger.trace(`Creating new put watcher for key: ${key}`);
const watcher = await client.watch().key(key).create();
const callbacks = new Map();
callbacks.set(owner, callback);
watcher.on('put', (kv, previous) => {
logger.trace(`Key put event detected: ${key}, key: ${kv.key}`);
const valueStr = kv.value.toString();
let parsedValue;
try {
parsedValue = JSON.parse(valueStr);
} catch {
parsedValue = valueStr;
}
for (const [ownerId, cb] of callbacks) {
try {
cb(kv.key.toString(), parsedValue, kv, previous);
} catch (err) {
logger.error(
`Error in onKeyPutEvent callback for owner: ${ownerId}, key: ${key}:`,
err
);
}
}
});
this.keyPutWatchers.set(watcherKey, { watcher, callbacks });
return { success: true };
}
async onKeyDeleteEvent(key, owner, callback) {
const client = await this.getClient();
const watcherKey = key;
if (this.keyDeleteWatchers.has(watcherKey)) {
this.keyDeleteWatchers.get(watcherKey).callbacks.set(owner, callback);
logger.trace(`Added delete callback for owner: ${owner} on key: ${key}`);
return;
}
logger.trace(`Creating new delete watcher for key: ${key}`);
const watcher = await client.watch().key(key).create();
const callbacks = new Map();
callbacks.set(owner, callback);
watcher.on('delete', (kv, previous) => {
logger.trace(`Key delete event detected: ${key}, key: ${kv.key}`);
for (const [ownerId, cb] of callbacks) {
try {
cb(kv.key.toString(), kv, previous);
} catch (err) {
logger.error(
`Error in onKeyDeleteEvent callback for owner=${ownerId}, key=${key}:`,
err
);
}
}
});
this.keyDeleteWatchers.set(watcherKey, { watcher, callbacks });
}
async onKeyEvent(key, callback) {
const client = await this.getClient();
logger.trace(`Setting up watcher for key events: ${key}`);
client client
.watch() .watch()
.prefix(prefix) .key(key)
.create() .create()
.then(watcher => { .then(watcher => {
// Handle put events // Handle put events
watcher.on('put', (kv, previous) => { watcher.on('put', (kv, previous) => {
logger.debug(`Prefix put event detected: ${prefix}, key: ${kv.key}`); logger.trace(`Key put event detected: ${key}`);
try { try {
const value = kv.value.toString(); const value = kv.value.toString();
let parsedValue; let parsedValue;
@ -127,10 +275,10 @@ class EtcdServer {
} catch { } catch {
parsedValue = value; parsedValue = value;
} }
callback(kv.key.toString(), parsedValue, kv, previous); callback(key, parsedValue, kv, previous);
} catch (error) { } catch (error) {
logger.error( logger.error(
`Error in onPrefixEvent put callback for prefix ${prefix}:`, `Error in onKeyEvent put callback for key ${key}:`,
error error
); );
} }
@ -138,98 +286,80 @@ class EtcdServer {
// Handle delete events // Handle delete events
watcher.on('delete', (kv, previous) => { watcher.on('delete', (kv, previous) => {
logger.debug( logger.trace(`Key delete event detected: ${key}`);
`Prefix delete event detected: ${prefix}, key: ${kv.key}`
);
try { try {
callback(kv.key.toString(), null, kv, previous); callback(key, null, kv, previous);
} catch (error) { } catch (error) {
logger.error( logger.error(
`Error in onPrefixEvent delete callback for prefix ${prefix}:`, `Error in onKeyEvent delete callback for key ${key}:`,
error error
); );
} }
}); });
// Store watcher with a unique key // Store watcher with a unique key
const watcherKey = `event:${prefix}`; const watcherKey = `event:key:${key}`;
this.watchers.set(watcherKey, watcher); this.watchers.set(watcherKey, watcher);
}); });
} }
async onPrefixPut(prefix, callback) { async removePrefixWatcher(prefix, owner, type = 'put') {
const client = await this.getClient(); const store =
logger.debug(`Setting up watcher for prefix put: ${prefix}`); type === 'put' ? this.prefixPutWatchers : this.prefixDeleteWatchers;
const entry = store.get(prefix);
client if (!entry) {
.watch() logger.trace(`Watcher not found for prefix: ${prefix}, type: ${type}`);
.prefix(prefix)
.create()
.then(watcher => {
watcher.on('put', (kv, previous) => {
logger.debug(`Prefix put event detected: ${prefix}, key: ${kv.key}`);
try {
const value = kv.value.toString();
let parsedValue;
try {
parsedValue = JSON.parse(value);
} catch {
parsedValue = value;
}
callback(kv.key.toString(), parsedValue, kv, previous);
} catch (error) {
logger.error(
`Error in onPrefixPut callback for prefix ${prefix}:`,
error
);
}
});
this.watchers.set(`put:${prefix}`, watcher);
});
}
async onPrefixDelete(prefix, callback) {
const client = await this.getClient();
logger.debug(`Setting up watcher for prefix delete: ${prefix}`);
client
.watch()
.prefix(prefix)
.create()
.then(watcher => {
watcher.on('delete', (kv, previous) => {
logger.debug(
`Prefix delete event detected: ${prefix}, key: ${kv.key}`
);
try {
callback(kv.key.toString(), kv, previous);
} catch (error) {
logger.error(
`Error in onPrefixDelete callback for prefix ${prefix}:`,
error
);
}
});
this.watchers.set(`delete:${prefix}`, watcher);
});
}
async removeWatcher(prefix, type = 'put') {
const watcherKey = `${type}:${prefix}`;
const watcher = this.watchers.get(watcherKey);
if (watcher) {
logger.debug(`Removing watcher: ${watcherKey}`);
watcher.removeAllListeners();
await watcher.close();
this.watchers.delete(watcherKey);
return true;
} else {
logger.debug(`Watcher not found: ${watcherKey}`);
return false; return false;
} }
if (entry.callbacks.delete(owner)) {
logger.trace(
`Removed ${type} callback for owner: ${owner} on prefix: ${prefix}`
);
} else {
logger.trace(
`No ${type} callback found for owner: ${owner} on prefix: ${prefix}`
);
}
if (entry.callbacks.size === 0) {
logger.trace(`No callbacks left, stopping ${type} watcher for ${prefix}`);
entry.watcher.removeAllListeners();
await entry.watcher.cancel();
store.delete(prefix);
}
return true;
}
async removeKeyWatcher(key, owner, type = 'put') {
const store = type === 'put' ? this.keyPutWatchers : this.keyDeleteWatchers;
const entry = store.get(key);
if (!entry) {
logger.trace(`Watcher not found for key: ${key}, type: ${type}`);
return false;
}
if (entry.callbacks.delete(owner)) {
logger.trace(
`Removed ${type} callback for owner: ${owner} on key: ${key}`
);
} else {
logger.trace(
`No ${type} callback found for owner: ${owner} on key: ${key}`
);
}
if (entry.callbacks.size === 0) {
logger.trace(`No callbacks left, stopping ${type} watcher for ${key}`);
entry.watcher.removeAllListeners();
await entry.watcher.cancel();
store.delete(key);
}
return true;
} }
async disconnect() { async disconnect() {
@ -237,7 +367,7 @@ class EtcdServer {
// Stop all watchers // Stop all watchers
for (const [key, watcher] of this.watchers) { for (const [key, watcher] of this.watchers) {
logger.debug(`Stopping watcher: ${key}`); logger.trace(`Stopping watcher: ${key}`);
watcher.removeAllListeners(); watcher.removeAllListeners();
await watcher.close(); await watcher.close();
} }

View File

@ -0,0 +1,33 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main filamentStock schema
const filamentStockSchema = new Schema(
{
state: {
type: { type: String, required: true },
percent: { type: String, required: true },
},
startingWeight: {
net: { type: Number, required: true },
gross: { type: Number, required: true },
},
currentWeight: {
net: { type: Number, required: true },
gross: { type: Number, required: true },
},
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament' },
},
{ timestamps: true }
);
// Add virtual id getter
filamentStockSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
filamentStockSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const filamentStockModel = mongoose.model('filamentStock', filamentStockSchema);

View File

@ -0,0 +1,25 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main partStock schema
const partStockSchema = new Schema(
{
name: { type: String, required: true },
fileName: { type: String, required: false },
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part' },
startingQuantity: { type: Number, required: true },
currentQuantity: { type: Number, required: true },
},
{ timestamps: true }
);
// Add virtual id getter
partStockSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
partStockSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const partStockModel = mongoose.model('partStock', partStockSchema);

View File

@ -0,0 +1,38 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const stockAuditItemSchema = new Schema({
type: { type: String, enum: ['filament', 'part'], required: true },
stock: { type: Schema.Types.ObjectId, required: true },
expectedQuantity: { type: Number, required: true },
actualQuantity: { type: Number, required: true },
notes: { type: String },
});
const stockAuditSchema = new Schema(
{
type: { type: String, required: true },
status: {
type: String,
enum: ['pending', 'in_progress', 'completed', 'cancelled'],
default: 'pending',
required: true,
},
notes: { type: String },
items: [stockAuditItemSchema],
createdBy: { type: Schema.Types.ObjectId, ref: 'user', required: true },
completedAt: { type: Date },
},
{ timestamps: true }
);
// Add virtual id getter
stockAuditSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockAuditSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const stockAuditModel = mongoose.model('stockAudit', stockAuditSchema);

View File

@ -0,0 +1,43 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const stockEventSchema = new Schema(
{
value: { type: Number, required: true },
current: { type: Number, required: true },
unit: { type: String, required: true },
parent: {
type: Schema.Types.ObjectId,
refPath: 'parentType',
required: true,
},
parentType: {
type: String,
required: true,
enum: ['filamentStock', 'partStock', 'productStock'], // Add other models as needed
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerType',
required: true,
},
ownerType: {
type: String,
required: true,
enum: ['user', 'subJob', 'stockAudit'],
},
timestamp: { type: Date, default: Date.now },
},
{ timestamps: true }
);
// Add virtual id getter
stockEventSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
stockEventSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const stockEventModel = mongoose.model('stockEvent', stockEventSchema);

View File

@ -0,0 +1,64 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const auditLogSchema = new Schema(
{
changes: {
old: { type: Object, required: true },
new: { type: Object, required: true }
},
parent: {
type: Schema.Types.ObjectId,
refPath: 'parentType',
required: true
},
parentType: {
type: String,
required: true,
enum: [
'printer',
'job',
'subJob',
'filamentStock',
'stockEvent',
'vendor',
'part',
'product',
'material',
'filament',
'gcodeFile',
'noteType',
'note',
'user',
'host'
] // Add other models as needed
},
owner: {
type: Schema.Types.ObjectId,
refPath: 'ownerType',
required: true
},
ownerType: {
type: String,
required: true,
enum: ['user', 'printer', 'host']
},
operation: {
type: String,
required: true,
enum: ['edit', 'new', 'delete']
}
},
{ timestamps: true }
);
// Add virtual id getter
auditLogSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
auditLogSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const auditLogModel = mongoose.model('auditLog', auditLogSchema);

View File

@ -0,0 +1,33 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const documentSizeSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
width: {
type: Number,
required: true,
default: 0,
},
height: {
type: Number,
required: true,
default: 0,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentSizeSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
documentSizeSchema.set('toJSON', { virtuals: true });
export const documentSizeModel = mongoose.model('documentSize', documentSizeSchema);

View File

@ -0,0 +1,61 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const documentTemplateSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
objectType: { type: String, required: false },
tags: [{ type: String }],
active: {
type: Boolean,
required: true,
default: true,
},
global: {
type: Boolean,
required: true,
default: false,
},
parent: {
type: Schema.Types.ObjectId,
ref: 'documentTemplate',
required: false,
},
documentSize: {
type: Schema.Types.ObjectId,
ref: 'documentSize',
required: true,
},
documentPrinters: [
{
type: Schema.Types.ObjectId,
ref: 'documentPrinter',
required: false,
},
],
content: {
type: String,
required: false,
default: '<Container></Container>',
},
testObject: {
type: Schema.Types.Mixed,
required: false,
},
},
{ timestamps: true }
);
// Add virtual id getter
documentTemplateSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
documentTemplateSchema.set('toJSON', { virtuals: true });
export const documentTemplateModel = mongoose.model('documentTemplate', documentTemplateSchema);

View File

@ -0,0 +1,26 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const filamentSchema = new mongoose.Schema({
name: { required: true, type: String },
barcode: { required: false, type: String },
url: { required: false, type: String },
image: { required: false, type: Buffer },
color: { required: true, type: String },
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
type: { required: true, type: String },
cost: { required: true, type: Number },
diameter: { required: true, type: Number },
density: { required: true, type: Number },
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
emptySpoolWeight: { required: true, type: Number },
});
filamentSchema.virtual('id').get(function () {
return this._id.toHexString();
});
filamentSchema.set('toJSON', { virtuals: true });
export const filamentModel = mongoose.model('filament', filamentSchema);

View File

@ -0,0 +1,66 @@
import mongoose from 'mongoose';
// Define the device schema
const deviceInfoSchema = new mongoose.Schema(
{
os: {
platform: { type: String },
type: { type: String },
release: { type: String },
arch: { type: String },
hostname: { type: String },
uptime: { type: Number }
},
cpu: {
cores: { type: Number },
model: { type: String },
speedMHz: { type: Number }
},
memory: {
totalGB: { type: String }, // stored as string from .toFixed(2), could also use Number
freeGB: { type: String }
},
network: {
type: mongoose.Schema.Types.Mixed // since it's an object with dynamic interface names
},
user: {
uid: { type: Number },
gid: { type: Number },
username: { type: String },
homedir: { type: String },
shell: { type: String }
},
process: {
nodeVersion: { type: String },
pid: { type: Number },
cwd: { type: String },
execPath: { type: String }
}
},
{ _id: false }
);
const hostSchema = new mongoose.Schema({
name: { required: true, type: String },
tags: [{ required: false, type: String }],
online: { required: true, type: Boolean, default: false },
state: {
type: { type: String, required: true, default: 'offline' },
message: { type: String, required: false },
percent: { type: Number, required: false }
},
active: { required: true, type: Boolean, default: true },
connectedAt: { required: false, type: Date },
authCode: { required: false, type: String },
otp: { required: false, type: String },
otpExpiresAt: { required: false, type: Date },
deviceInfo: deviceInfoSchema
});
hostSchema.virtual('id').get(function () {
return this._id.toHexString();
});
hostSchema.set('toJSON', { virtuals: true });
export const hostModel = mongoose.model('host', hostSchema);

View File

@ -0,0 +1,16 @@
import mongoose from 'mongoose';
const materialSchema = new mongoose.Schema({
name: { required: true, type: String },
url: { required: false, type: String },
image: { required: false, type: Buffer },
tags: [{ type: String }],
});
materialSchema.virtual('id').get(function () {
return this._id.toHexString();
});
materialSchema.set('toJSON', { virtuals: true });
export const materialModel = mongoose.model('material', materialSchema);

View File

@ -0,0 +1,32 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const noteTypeSchema = new Schema(
{
name: {
type: String,
required: true,
unique: true,
},
color: {
type: String,
required: false,
},
active: {
type: Boolean,
required: true,
default: true,
},
},
{ timestamps: true }
);
// Add virtual id getter
noteTypeSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
noteTypeSchema.set('toJSON', { virtuals: true });
export const noteTypeModel = mongoose.model('noteType', noteTypeSchema);

View File

@ -0,0 +1,27 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main part schema
const partSchema = new Schema(
{
name: { type: String, required: true },
fileName: { type: String, required: false },
product: { type: mongoose.Schema.Types.ObjectId, ref: 'product' },
globalPricing: { type: Boolean, default: true },
priceMode: { type: String, default: 'margin' },
amount: { type: Number, required: false },
margin: { type: Number, required: false },
},
{ timestamps: true }
);
// Add virtual id getter
partSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
partSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const partModel = mongoose.model('part', partSchema);

View File

@ -0,0 +1,26 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the main product schema
const productSchema = new Schema(
{
name: { type: String, required: true },
tags: [{ type: String }],
version: { type: String },
priceMode: { type: String, default: 'margin' },
margin: { type: Number, required: false },
amount: { type: Number, required: false },
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
},
{ timestamps: true }
);
// Add virtual id getter
productSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
productSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const productModel = mongoose.model('product', productSchema);

View File

@ -6,7 +6,7 @@ const userSchema = new mongoose.Schema(
name: { required: true, type: String }, name: { required: true, type: String },
firstName: { required: false, type: String }, firstName: { required: false, type: String },
lastName: { required: false, type: String }, lastName: { required: false, type: String },
email: { required: true, type: String } email: { required: true, type: String },
}, },
{ timestamps: true } { timestamps: true }
); );
@ -17,4 +17,4 @@ userSchema.virtual('id').get(function () {
userSchema.set('toJSON', { virtuals: true }); userSchema.set('toJSON', { virtuals: true });
export const userModel = mongoose.model('User', userSchema); export const userModel = mongoose.model('user', userSchema);

View File

@ -0,0 +1,21 @@
import mongoose from 'mongoose';
const vendorSchema = new mongoose.Schema(
{
name: { required: true, type: String },
website: { required: false, type: String },
email: { required: false, type: String },
phone: { required: false, type: String },
contact: { required: false, type: String },
country: { required: false, type: String },
},
{ timestamps: true }
);
vendorSchema.virtual('id').get(function () {
return this._id.toHexString();
});
vendorSchema.set('toJSON', { virtuals: true });
export const vendorModel = mongoose.model('vendor', vendorSchema);

View File

@ -0,0 +1,41 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const noteSchema = new mongoose.Schema({
parent: {
type: Schema.Types.ObjectId,
required: true,
},
content: {
type: String,
required: true,
},
noteType: {
type: Schema.Types.ObjectId,
ref: 'noteType',
required: true,
},
createdAt: {
type: Date,
required: true,
default: Date.now,
},
updatedAt: {
type: Date,
required: true,
default: Date.now,
},
user: {
type: Schema.Types.ObjectId,
ref: 'user',
required: false,
},
});
noteSchema.virtual('id').get(function () {
return this._id.toHexString();
});
noteSchema.set('toJSON', { virtuals: true });
export const noteModel = mongoose.model('note', noteSchema);

View File

@ -0,0 +1,24 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const gcodeFileSchema = new mongoose.Schema({
name: { required: true, type: String },
gcodeFileName: { required: false, type: String },
gcodeFileInfo: { required: true, type: Object },
size: { type: Number, required: false },
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
parts: [{ type: Schema.Types.ObjectId, ref: 'part', required: true }],
cost: { type: Number, required: false },
createdAt: { type: Date },
updatedAt: { type: Date },
});
gcodeFileSchema.index({ name: 'text', brand: 'text' });
gcodeFileSchema.virtual('id').get(function () {
return this._id.toHexString();
});
gcodeFileSchema.set('toJSON', { virtuals: true });
export const gcodeFileModel = mongoose.model('gcodeFile', gcodeFileSchema);

View File

@ -0,0 +1,34 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const jobSchema = new mongoose.Schema({
state: {
type: { required: true, type: String },
},
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
gcodeFile: {
type: Schema.Types.ObjectId,
ref: 'gcodeFile',
required: false,
},
quantity: {
type: Number,
required: true,
default: 1,
min: 1,
},
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob', required: false }],
notes: [{ type: Schema.Types.ObjectId, ref: 'note', required: false }],
});
jobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
jobSchema.set('toJSON', { virtuals: true });
export const jobModel = mongoose.model('job', jobSchema);

View File

@ -0,0 +1,72 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
// Define the moonraker connection schema
const moonrakerSchema = new Schema(
{
host: { type: String, required: true },
port: { type: Number, required: true },
protocol: { type: String, required: true },
apiKey: { type: String, default: null, required: false }
},
{ _id: false }
);
// Define the alert schema
const alertSchema = new Schema(
{
priority: { type: String, required: true }, // order to show
type: { type: String, required: true } // selectFilament, error, info, message,
},
{ timestamps: true, _id: false }
);
// Define the main FDM printer schema
const printerSchema = new Schema(
{
name: { type: String, required: true },
online: { type: Boolean, required: true, default: false },
state: {
type: { type: String, required: true, default: 'offline' },
progress: { type: Number, required: false, default: 0 }
},
connectedAt: { type: Date, default: null },
loadedFilament: {
type: Schema.Types.ObjectId,
ref: 'filament',
default: null
},
moonraker: { type: moonrakerSchema, required: true },
tags: [{ type: String }],
firmware: { type: String },
currentJob: { type: Schema.Types.ObjectId, ref: 'job' },
currentSubJob: { type: Schema.Types.ObjectId, ref: 'subJob' },
currentFilamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock' },
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
vendor: {
type: Schema.Types.ObjectId,
ref: 'vendor',
default: null,
required: true
},
host: {
type: Schema.Types.ObjectId,
ref: 'host',
default: null,
required: true
},
alerts: [alertSchema]
},
{ timestamps: true }
);
// Add virtual id getter
printerSchema.virtual('id').get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
printerSchema.set('toJSON', { virtuals: true });
// Create and export the model
export const printerModel = mongoose.model('printer', printerSchema);

View File

@ -0,0 +1,50 @@
import mongoose from 'mongoose';
const { Schema } = mongoose;
const subJobSchema = new mongoose.Schema({
printer: {
type: Schema.Types.ObjectId,
ref: 'printer',
required: true,
},
job: {
type: Schema.Types.ObjectId,
ref: 'job',
required: true,
},
subJobId: {
type: String,
required: true,
},
gcodeFile: {
type: Schema.Types.ObjectId,
ref: 'gcodeFile',
required: true,
},
state: {
type: { required: true, type: String },
percent: { required: false, type: Number },
},
number: {
type: Number,
required: true,
},
createdAt: {
type: Date,
default: Date.now,
},
updatedAt: {
type: Date,
default: Date.now,
},
startedAt: { required: false, type: Date },
finishedAt: { required: false, type: Date },
});
subJobSchema.virtual('id').get(function () {
return this._id.toHexString();
});
subJobSchema.set('toJSON', { virtuals: true });
export const subJobModel = mongoose.model('subJob', subJobSchema);

524
src/database/utils.js Normal file
View File

@ -0,0 +1,524 @@
import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
import { etcdServer } from './etcd.js';
function parseFilter(property, value) {
if (typeof value === 'string') {
var trimmed = value.trim();
if (trimmed.charAt(3) == ':') {
trimmed = value.split(':')[1];
}
// Handle booleans
if (trimmed.toLowerCase() === 'true') return { [property]: true };
if (trimmed.toLowerCase() === 'false') return { [property]: false };
// Handle ObjectId (24-char hex)
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
return { [property]: new ObjectId(trimmed) };
}
// Handle numbers
if (!isNaN(trimmed)) {
return { [property]: parseFloat(trimmed) };
}
// Default to case-insensitive regex for non-numeric strings
return {
[property]: {
$regex: trimmed,
$options: 'i'
}
};
}
// Handle actual booleans, numbers, objects, etc.
return { [property]: value };
}
function convertToCamelCase(obj) {
const result = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const value = obj[key];
// Convert the key to camelCase
let camelKey = key
// First handle special cases with spaces, brackets and other characters
.replace(/\s*\[.*?\]\s*/g, '') // Remove brackets and their contents
.replace(/\s+/g, ' ') // Normalize spaces
.trim()
// Split by common separators (space, underscore, hyphen)
.split(/[\s_-]/)
// Convert to camelCase
.map((word, index) => {
// Remove any non-alphanumeric characters
word = word.replace(/[^a-zA-Z0-9]/g, '');
// Lowercase first word, uppercase others
return index === 0
? word.toLowerCase()
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
})
.join('');
// Handle values that are objects recursively
if (
value !== null &&
typeof value === 'object' &&
!Array.isArray(value)
) {
result[camelKey] = convertToCamelCase(value);
} else {
result[camelKey] = value;
}
}
}
return result;
}
function extractConfigBlock(fileContent, useCamelCase = true) {
const configObject = {};
// Extract header information
const headerBlockRegex =
/; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
const headerBlockMatch = fileContent.match(headerBlockRegex);
if (headerBlockMatch && headerBlockMatch[1]) {
const headerLines = headerBlockMatch[1].split('\n');
headerLines.forEach(line => {
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
// Try key-value format first
let match = line.match(keyValueRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (!isNaN(value) && value !== '') {
value = Number(value);
}
configObject[key] = value;
} else {
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
match = line.match(simpleValueRegex);
if (match && match[1] && !match[1].includes('HEADER_BLOCK')) {
const text = match[1].trim();
// Extract slicer info
const slicerMatch = text.match(
/generated by (.*?) on (.*?) at (.*?)$/
);
if (slicerMatch) {
configObject['slicer'] = slicerMatch[1].trim();
configObject['date'] = slicerMatch[2].trim();
configObject['time'] = slicerMatch[3].trim();
} else {
// Just add as a general header entry if it doesn't match any specific pattern
const key = `header_${Object.keys(configObject).length}`;
configObject[key] = text;
}
}
}
});
}
// Extract thumbnail data
const thumbnailBlockRegex =
/; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
const thumbnailLines = thumbnailBlockMatch[1].split('\n');
let base64Data = '';
let thumbnailInfo = {};
thumbnailLines.forEach(line => {
// Extract thumbnail dimensions and size from the line "thumbnail begin 640x640 27540"
const thumbnailHeaderRegex = /^\s*;\s*thumbnail begin (\d+)x(\d+) (\d+)/;
const match = line.match(thumbnailHeaderRegex);
if (match) {
thumbnailInfo.width = parseInt(match[1], 10);
thumbnailInfo.height = parseInt(match[2], 10);
thumbnailInfo.size = parseInt(match[3], 10);
} else if (
line.trim().startsWith('; ') &&
!line.includes('THUMBNAIL_BLOCK')
) {
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
const dataLine = line.trim().substring(2);
if (dataLine && dataLine != 'thumbnail end') {
base64Data += dataLine;
}
}
});
// Add thumbnail data to config object
if (base64Data) {
configObject.thumbnail = {
data: base64Data,
...thumbnailInfo
};
}
}
// Extract CONFIG_BLOCK
const configBlockRegex =
/; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
const configBlockMatch = fileContent.match(configBlockRegex);
if (configBlockMatch && configBlockMatch[1]) {
// Extract each config line
const configLines = configBlockMatch[1].split('\n');
// Process each line
configLines.forEach(line => {
// Check if the line starts with a semicolon and has an equals sign
const configLineRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
const match = line.match(configLineRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === 'true' || value === 'false') {
value = value === 'true';
} else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
if (!value.includes('%')) {
value = Number(value);
}
}
configObject[key] = value;
}
});
}
// Extract additional variables that appear after EXECUTABLE_BLOCK_END
const additionalVarsRegex =
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
if (additionalVarsMatch && additionalVarsMatch[1]) {
const additionalLines = additionalVarsMatch[1].split('\n');
additionalLines.forEach(line => {
// Match both standard format and the special case for "total filament cost"
const varRegex =
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
const match = line.match(varRegex);
if (match) {
const key = match[1].replace(/\[([^\]]+)\]/g, '$1').trim();
let value = match[2].trim();
// Clean up values - remove units in brackets and handle special cases
if (key.includes('filament used')) {
// Extract just the numeric value, ignoring units in brackets
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes('filament cost')) {
// Extract just the numeric value
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes('total layers count')) {
value = parseInt(value, 10);
} else if (key.includes('estimated printing time')) {
// Keep as string but trim any additional whitespace
value = value.trim();
}
configObject[key] = value;
}
});
}
// Also extract extrusion width settings
const extrusionWidthRegex = /;\s*(.*?)\s*extrusion width\s*=\s*(.*?)mm/g;
let extrusionMatch;
while ((extrusionMatch = extrusionWidthRegex.exec(fileContent)) !== null) {
const settingName = extrusionMatch[1].trim();
const settingValue = parseFloat(extrusionMatch[2].trim());
configObject[`${settingName} extrusion width`] = settingValue;
}
// Extract additional parameters after CONFIG_BLOCK_END if they exist
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
const postConfigMatch = fileContent.match(postConfigParams);
if (postConfigMatch && postConfigMatch[1]) {
const postConfigLines = postConfigMatch[1].split('\n');
postConfigLines.forEach(line => {
// Match lines with format "; parameter_name = value"
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
const match = line.match(paramRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === 'true' || value === 'false') {
value = value === 'true';
} else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
if (!value.includes('%')) {
value = Number(value);
}
}
// Add to config object if not already present
if (!configObject[key]) {
configObject[key] = value;
}
}
});
}
// Apply camelCase conversion if requested
return useCamelCase ? convertToCamelCase(configObject) : configObject;
}
function getChangedValues(oldObj, newObj, old = false) {
const changes = {};
const combinedObj = { ...oldObj, ...newObj };
// Check all keys in the new object
for (const key in combinedObj) {
// Skip if the key is _id or timestamps
if (key === 'createdAt' || key === 'updatedAt' || key === '_id') continue;
const oldVal = oldObj ? oldObj[key] : undefined;
const newVal = newObj ? newObj[key] : undefined;
// If both values are objects (but not arrays or null), recurse
if (
oldVal &&
newVal &&
typeof oldVal === 'object' &&
typeof newVal === 'object' &&
!Array.isArray(oldVal) &&
!Array.isArray(newVal) &&
oldVal !== null &&
newVal !== null
) {
if (oldVal?._id || newVal?._id) {
if (JSON.stringify(oldVal?._id) !== JSON.stringify(newVal?._id)) {
changes[key] = old ? oldVal : newVal;
}
} else {
const nestedChanges = getChangedValues(oldVal, newVal, old);
if (Object.keys(nestedChanges).length > 0) {
changes[key] = nestedChanges;
}
}
} else if (JSON.stringify(oldVal) !== JSON.stringify(newVal)) {
// If the old value is different from the new value, include it
changes[key] = old ? oldVal : newVal;
}
}
return changes;
}
async function newAuditLog(newValue, parentId, parentType, owner, ownerType) {
// Filter out createdAt and updatedAt from newValue
const filteredNewValue = { ...newValue };
delete filteredNewValue.createdAt;
delete filteredNewValue.updatedAt;
const auditLog = new auditLogModel({
changes: {
new: filteredNewValue
},
parent: parentId,
parentType,
owner: owner._id,
ownerType: ownerType,
operation: 'new'
});
await auditLog.save();
await distributeNew(auditLog._id, 'auditLog');
}
async function editAuditLog(
oldValue,
newValue,
parentId,
parentType,
owner,
ownerType
) {
// Get only the changed values
const changedOldValues = getChangedValues(oldValue, newValue, true);
const changedNewValues = getChangedValues(oldValue, newValue, false);
// If no values changed, don't create an audit log
if (
Object.keys(changedOldValues).length === 0 ||
Object.keys(changedNewValues).length === 0
) {
return;
}
const auditLog = new auditLogModel({
changes: {
old: changedOldValues,
new: changedNewValues
},
parent: parentId,
parentType,
owner: owner._id,
ownerType: ownerType,
operation: 'edit'
});
await auditLog.save();
await distributeNew(auditLog._id, 'auditLog');
}
async function deleteAuditLog(
deleteValue,
parentId,
parentType,
owner,
ownerType
) {
const auditLog = new auditLogModel({
changes: {
old: deleteValue
},
parent: parentId,
parentType,
owner: owner._id,
ownerType: ownerType,
operation: 'delete'
});
await auditLog.save();
await distributeNew(auditLog._id, 'auditLog');
}
async function getAuditLogs(idOrIds) {
if (Array.isArray(idOrIds)) {
return auditLogModel.find({ parent: { $in: idOrIds } }).populate('owner');
} else {
return auditLogModel.find({ parent: idOrIds }).populate('owner');
}
}
async function distributeUpdate(value, id, type) {
await etcdServer.setKey(`/${type}s/${id}/object`, value);
}
async function distributeNew(id, type) {
await etcdServer.setKey(`/${type}s/new`, id);
}
function flatternObjectIds(object) {
if (!object || typeof object !== 'object') {
return object;
}
const result = {};
for (const [key, value] of Object.entries(object)) {
if (value && typeof value === 'object' && value._id) {
// If the value is an object with _id, convert to just the _id
result[key] = value._id;
} else {
// Keep primitive values as is
result[key] = value;
}
}
return result;
}
function expandObjectIds(input) {
// Helper to check if a value is an ObjectId or a 24-char hex string
function isObjectId(val) {
// Check for MongoDB ObjectId instance
if (val instanceof ObjectId) return true;
// Check for exactly 24 hex characters (no special characters)
if (typeof val === 'string' && /^[a-fA-F\d]{24}$/.test(val)) return true;
return false;
}
// Recursive function
function expand(value) {
if (Array.isArray(value)) {
return value.map(expand);
} else if (
value &&
typeof value === 'object' &&
!(value instanceof ObjectId)
) {
var result = {};
for (const [key, val] of Object.entries(value)) {
if (key === '_id') {
// Do not expand keys that are already named _id
result[key] = val;
} else if (isObjectId(val)) {
result[key] = { _id: val };
} else if (Array.isArray(val)) {
result[key] = val.map(expand);
} else if (val instanceof Date) {
result[key] = val;
} else if (val && typeof val === 'object') {
result[key] = expand(val);
} else {
result[key] = val;
}
}
return result;
} else if (isObjectId(value)) {
return { _id: value };
} else {
return value;
}
}
return expand(input);
}
// Returns a filter object based on allowed filters and req.query
function getFilter(query, allowedFilters, parse = true) {
let filter = {};
for (const [key, value] of Object.entries(query)) {
if (allowedFilters.includes(key)) {
const parsedFilter = parse ? parseFilter(key, value) : { [key]: value };
filter = { ...filter, ...parsedFilter };
}
}
return filter;
}
// Converts a properties argument (string or array) to an array of strings
function convertPropertiesString(properties) {
if (typeof properties === 'string') {
return properties.split(',');
} else if (!Array.isArray(properties)) {
return [];
}
return properties;
}
export {
parseFilter,
convertToCamelCase,
extractConfigBlock,
newAuditLog,
editAuditLog,
deleteAuditLog,
getAuditLogs,
flatternObjectIds,
expandObjectIds,
distributeUpdate,
distributeNew,
getFilter, // <-- add here
convertPropertiesString
};

View File

@ -1,5 +1,4 @@
import { loadConfig } from './config.js'; import { loadConfig } from './config.js';
import { KeycloakAuth } from './auth/auth.js';
import { SocketManager } from './socket/socketmanager.js'; import { SocketManager } from './socket/socketmanager.js';
import { etcdServer } from './database/etcd.js'; import { etcdServer } from './database/etcd.js';
import express from 'express'; import express from 'express';
@ -19,9 +18,7 @@ import { mongoServer } from './database/mongo.js';
const app = express(); const app = express();
const server = http.createServer(app); const server = http.createServer(app);
// Setup Keycloak Integration new SocketManager(server);
const keycloakAuth = new KeycloakAuth(config);
new SocketManager(keycloakAuth, server);
// Connect to Etcd (await) // Connect to Etcd (await)
try { try {

View File

@ -11,8 +11,8 @@ logger.level = config.server.logLevel;
* LockManager handles distributed locking using Etcd and broadcasts lock events via websockets. * LockManager handles distributed locking using Etcd and broadcasts lock events via websockets.
*/ */
export class LockManager { export class LockManager {
constructor(socketManager) { constructor(socketClient) {
this.socketManager = socketManager; this.socketClient = socketClient;
this.setupLocksListeners(); this.setupLocksListeners();
} }
@ -20,7 +20,10 @@ export class LockManager {
// Add a 'lock' event to the 'locks' stream // Add a 'lock' event to the 'locks' stream
logger.debug('Locking object:', object._id); logger.debug('Locking object:', object._id);
try { try {
await etcdServer.set(`/locks/${object.type}s/${object._id}`, object); await etcdServer.setKey(`/${object.type}s/${object._id}/lock`, {
...object,
locked: true
});
logger.info(`Lock event to id: ${object._id}`); logger.info(`Lock event to id: ${object._id}`);
return true; return true;
} catch (err) { } catch (err) {
@ -31,16 +34,15 @@ export class LockManager {
async unlockObject(object) { async unlockObject(object) {
// Add an 'unlock' event to the 'locks' stream // Add an 'unlock' event to the 'locks' stream
const key = `/locks/${object.type}s/${object._id}`; const key = `/${object.type}s/${object._id}/lock`;
console.log('unlocking');
try { try {
logger.debug('Checking user can unlock:', object._id); logger.debug('Checking user can unlock:', object._id);
const lockEvent = await etcdServer.get(key); const lockEvent = await etcdServer.getKey(key);
if (lockEvent?.user === object.user) { if (lockEvent?.user === object.user) {
logger.debug('Unlocking object:', object._id); logger.debug('Unlocking object:', object._id);
await etcdServer.delete(key); await etcdServer.deleteKey(key);
logger.info(`Unlocked object: ${object._id}`); logger.info(`Unlocked object: ${object._id}`);
return true; return true;
} }
@ -54,8 +56,8 @@ export class LockManager {
// Get the current lock status of an object and broadcast it // Get the current lock status of an object and broadcast it
logger.info('Getting lock status for object:', object._id); logger.info('Getting lock status for object:', object._id);
try { try {
const lockKey = `/locks/${object.type}s/${object._id}`; const lockKey = `/${object.type}s/${object._id}/lock`;
const lockValue = await etcdServer.get(lockKey); const lockValue = await etcdServer.getKey(lockKey);
if (lockValue) { if (lockValue) {
// Object is locked // Object is locked
@ -79,15 +81,19 @@ export class LockManager {
} }
setupLocksListeners() { setupLocksListeners() {
etcdServer.onPrefixPut('/locks', (key, value) => { etcdServer.onPrefixPutEvent(
'/locks',
this.socketClient.id,
(key, value) => {
const id = key.split('/').pop(); const id = key.split('/').pop();
logger.debug('Lock object event:', id); logger.debug('Lock object event:', id);
this.socketManager.broadcast('notify_lock_update', { this.socketManager.broadcast('notify_lock_update', {
...value, ...value,
locked: true locked: true
}); });
}); }
etcdServer.onPrefixDelete('/locks', key => { );
etcdServer.onPrefixDeleteEvent('/locks', this.socketClient.id, key => {
const id = key.split('/').pop(); const id = key.split('/').pop();
logger.debug('Unlock object event:', id); logger.debug('Unlock object event:', id);
this.socketManager.broadcast('notify_lock_update', { this.socketManager.broadcast('notify_lock_update', {

View File

@ -1,128 +0,0 @@
import log4js from 'log4js';
// Load configuration
import { loadConfig } from '../config.js';
import { userModel } from '../database/user.schema.js';
const config = loadConfig();
const logger = log4js.getLogger('Socket Client');
logger.level = config.server.logLevel;
export class SocketClient {
constructor(socket, socketManager) {
this.socket = socket;
this.user = null;
this.socketManager = socketManager;
this.lockManager = socketManager.lockManager;
this.updateManager = socketManager.updateManager;
}
async initUser() {
if (this.socket?.user?.username) {
try {
const userDoc = await userModel
.findOne({ username: this.socket.user.username })
.lean();
this.user = userDoc;
logger.debug('ID:', this.user._id.toString());
logger.debug('Name:', this.user.name);
logger.debug('Username:', this.user.username);
logger.debug('Email:', this.user.email);
this.setupSocketEventHandlers();
} catch (err) {
logger.error('Error looking up user by username:', err);
this.user = null;
}
}
}
setupSocketEventHandlers() {
this.socket.on('lock', this.handleLockEvent.bind(this));
this.socket.on('unlock', this.handleUnlockEvent.bind(this));
this.socket.on('getLock', this.handleGetLockEvent.bind(this));
this.socket.on('update', this.handleUpdateEvent.bind(this));
}
async handleLockEvent(data) {
// data: { _id: string, params?: object }
if (!data || !data._id) {
this.socket.emit('lock_result', {
success: false,
error: 'Invalid lock event data'
});
return;
}
data = { ...data, user: this.user._id.toString() };
try {
await this.lockManager.lockObject(data);
} catch (err) {
logger.error('Lock event error:', err);
this.socket.emit('lock_result', { success: false, error: err.message });
}
}
async handleUnlockEvent(data) {
// data: { _id: string }
if (!data || !data._id) {
this.socket.emit('unlock_result', {
success: false,
error: 'Invalid unlock event data'
});
return;
}
data = { ...data, user: this.user._id.toString() };
try {
await this.lockManager.unlockObject(data);
} catch (err) {
logger.error('Unlock event error:', err);
this.socket.emit('unlock_result', { success: false, error: err.message });
}
}
async handleGetLockEvent(data, callback) {
// data: { _id: string }
if (!data || !data._id) {
callback({
error: 'Invalid getLock event data'
});
return;
}
try {
const lockEvent = await this.lockManager.getObjectLock(data);
callback(lockEvent);
} catch (err) {
logger.error('GetLock event error:', err);
callback({
error: err.message
});
}
}
async handleUpdateEvent(data) {
// data: { _id: string, type: string, ...otherProperties }
if (!data || !data._id || !data.type) {
return;
}
try {
// Add user information to the update data
const updateData = {
...data,
updatedAt: new Date()
};
// Use the updateManager to handle the update
if (this.updateManager) {
await this.updateManager.updateObject(updateData);
} else {
throw new Error('UpdateManager not available');
}
} catch (err) {
logger.error('Update event error:', err);
}
}
handleDisconnect() {
logger.info('External client disconnected:', this.socket.user?.username);
}
}

117
src/socket/sockethost.js Normal file
View File

@ -0,0 +1,117 @@
import log4js from 'log4js';
// Load configuration
import { loadConfig } from '../config.js';
import { CodeAuth, createAuthMiddleware } from '../auth/auth.js';
import { editObject, getObject } from '../database/database.js';
import { hostModel } from '../database/schemas/management/host.schema.js';
import { UpdateManager } from '../updates/updatemanager.js';
import { ActionManager } from '../actions/actionmanager.js';
import { getModelByName } from '../utils.js';
const config = loadConfig();
const logger = log4js.getLogger('Socket Host');
logger.level = config.server.logLevel;
export class SocketHost {
constructor(socket, socketManager) {
this.socket = socket;
this.authenticated = false;
this.socketId = socket.id;
this.id = null;
this.host = null;
this.socketManager = socketManager;
this.updateManager = new UpdateManager(this);
this.actionManager = new ActionManager(this);
this.codeAuth = new CodeAuth();
this.setupSocketEventHandlers();
}
setupSocketEventHandlers() {
this.socket.use(createAuthMiddleware(this));
this.socket.on('authenticate', this.handleAuthenticate.bind(this));
this.socket.on('updateHost', this.handleUpdateHost.bind(this));
this.socket.on('getObject', this.handleGetObject.bind(this));
this.socket.on('disconnect', this.handleDisconnect.bind(this));
}
async initializeHost() {
this.actionManager.subscribeToObjectActions(this.id, 'host');
}
async handleAuthenticate(data, callback) {
logger.trace('handleAuthenticateEvent');
const id = data.id || undefined;
const authCode = data.authCode || undefined;
const otp = data.otp || undefined;
if (id && authCode) {
logger.info('Authenticating host with id + authCode...');
const verifyResult = await this.codeAuth.verifyCode(id, authCode);
if (verifyResult.valid == true) {
logger.info('Host authenticated and valid.');
this.host = verifyResult.host;
this.id = this.host._id.toString();
this.authenticated = true;
await editObject({
model: hostModel,
id: this.host._id,
updateData: { online: true, state: { type: 'online' } },
owner: this.host,
ownerType: 'host'
});
await this.initializeHost();
}
callback(verifyResult);
return;
}
if (otp) {
logger.info('Authenticating host otp...');
const verifyResult = await this.codeAuth.verifyOtp(otp);
if (verifyResult.valid == true) {
logger.info('Host authenticated and valid.');
this.host = verifyResult.host;
this.authenticated = true;
}
callback(verifyResult);
return;
}
callback({ valid: false, error: 'Missing params.' });
}
async handleUpdateHost(data) {
await editObject({
model: hostModel,
id: this.host._id,
updateData: { ...data.host },
owner: this.host,
ownerType: 'host'
});
}
async handleGetObject(data, callback) {
const object = await getObject({
model: getModelByName(data.objectType),
id: data._id,
cached: true,
populate: data.populate
});
callback(object);
}
async handleDisconnect() {
if (this.authenticated) {
await editObject({
model: hostModel,
id: this.host._id,
updateData: { online: false, state: { type: 'offline' } },
owner: this.host,
ownerType: 'host'
});
this.authenticated = false;
}
logger.info('External host disconnected. Socket ID:', this.id);
}
}

View File

@ -1,12 +1,13 @@
// server.js - HTTP and Socket.IO server setup // server.js - HTTP and Socket.IO server setup
import { Server } from 'socket.io'; import { Server } from 'socket.io';
import { createAuthMiddleware } from '../auth/auth.js';
import log4js from 'log4js'; import log4js from 'log4js';
// Load configuration // Load configuration
import { loadConfig } from '../config.js'; import { loadConfig } from '../config.js';
import { SocketClient } from './socketclient.js'; import { SocketUser } from './socketuser.js';
import { LockManager } from '../lock/lockmanager.js'; import { LockManager } from '../lock/lockmanager.js';
import { UpdateManager } from '../updates/updatemanager.js'; import { UpdateManager } from '../updates/updatemanager.js';
import { TemplateManager } from '../templates/templatemanager.js';
import { SocketHost } from './sockethost.js';
const config = loadConfig(); const config = loadConfig();
@ -14,10 +15,10 @@ const logger = log4js.getLogger('Socket Manager');
logger.level = config.server.logLevel; logger.level = config.server.logLevel;
export class SocketManager { export class SocketManager {
constructor(auth, server) { constructor(server) {
this.socketClientConnections = new Map(); this.socketUsers = new Map();
this.lockManager = new LockManager(this); this.socketHosts = new Map();
this.updateManager = new UpdateManager(this); this.templateManager = new TemplateManager(this);
// Use the provided HTTP server // Use the provided HTTP server
// Create Socket.IO server // Create Socket.IO server
@ -28,55 +29,91 @@ export class SocketManager {
} }
}); });
// Apply authentication middleware // Handle user connections
io.use(createAuthMiddleware(auth));
// Handle client connections
io.on('connection', async socket => { io.on('connection', async socket => {
logger.info('External client connected:', socket.user?.username); const authType = socket.handshake?.auth?.type;
await this.addClient(socket); if (authType == 'user') {
await this.addUser(socket);
} else if (authType == 'host') {
await this.addHost(socket);
}
}); });
this.io = io; this.io = io;
this.server = server; this.server = server;
} }
async addClient(socket) { async addUser(socket) {
const client = new SocketClient(socket, this, this.lockManager); const socketUser = new SocketUser(socket, this, this.lockManager);
await client.initUser(); this.socketUsers.set(socketUser.id, socketUser);
this.socketClientConnections.set(socket.id, client); logger.info('External user connected. Socket ID:', socket.id);
logger.info('External client connected:', socket.user?.username);
// Handle disconnection // Handle disconnection
socket.on('disconnect', () => { socket.on('disconnect', () => {
logger.info('External client disconnected:', socket.user?.username); logger.info('External user disconnected. Socket ID:', socket.id);
this.removeClient(socket.id); this.removeUser(socket.id);
}); });
} }
removeClient(socketClientId) { async addHost(socket) {
const socketClient = this.socketClientConnections.get(socketClientId); const socketHost = new SocketHost(socket, this, this.lockManager);
if (socketClient) { this.socketHosts.set(socketHost.id, socketHost);
this.socketClientConnections.delete(socketClientId); logger.info('External host connected. Socket ID:', socket.id);
logger.info( // Handle disconnection
'External client disconnected:', socket.on('disconnect', () => {
socketClient.socket.user?.username logger.info('External host disconnected. Socket ID:', socket.id);
this.removeHost(socket.id);
});
}
removeUser(id) {
const socketUser = this.socketUsers.get(id);
if (socketUser) {
this.socketUsers.delete(id);
logger.info('External user disconnected. Socket ID:', id);
}
}
removeHost(id) {
const socketHost = this.socketHosts.get(id);
if (socketHost) {
this.socketHosts.delete(id);
logger.info('External host disconnected. Socket ID:', id);
}
}
getSocketUser(userId) {
return this.socketUserConnections.get(userId);
}
getAllSocketUsers() {
return Array.from(this.socketUserConnections.values());
}
broadcast(event, data, excludeUserId = null) {
for (const [userId, socketUser] of this.socketUserConnections) {
if (excludeUserId !== userId) {
socketUser.socket.emit(event, data);
}
}
}
/**
* Send a message to a specific user by their user ID
* @param {string} userId - The user ID to send the message to
* @param {string} event - The event name
* @param {any} data - The data to send
*/
sendToUser(id, event, data) {
let sentCount = 0;
for (const [, socketUser] of this.socketUsers) {
if (socketUser.user && socketUser.user._id.toString() === id) {
socketUser.socket.emit(event, data);
sentCount += 1;
logger.debug(
`Sent ${event} to user: ${id}, connection: ${socketUser.socket.id}`
); );
} }
} }
logger.debug(`Sent to ${sentCount} active connection(s).`);
getSocketClient(clientId) {
return this.socketClientConnections.get(clientId);
}
getAllSocketClients() {
return Array.from(this.socketClientConnections.values());
}
broadcast(event, data, excludeClientId = null) {
for (const [clientId, socketClient] of this.socketClientConnections) {
if (excludeClientId !== clientId) {
socketClient.socket.emit(event, data);
}
}
} }
} }

168
src/socket/socketuser.js Normal file
View File

@ -0,0 +1,168 @@
import log4js from 'log4js';
// Load configuration
import { loadConfig } from '../config.js';
import { createAuthMiddleware, KeycloakAuth } from '../auth/auth.js';
import { generateHostOTP } from '../utils.js';
import { LockManager } from '../lock/lockmanager.js';
import { UpdateManager } from '../updates/updatemanager.js';
import { ActionManager } from '../actions/actionmanager.js';
const config = loadConfig();
const logger = log4js.getLogger('Socket User');
logger.level = config.server.logLevel;
export class SocketUser {
constructor(socket, socketManager) {
this.socket = socket;
this.authenticated = false;
this.socketId = socket.id;
this.id = null;
this.user = null;
this.socketManager = socketManager;
this.lockManager = new LockManager(this);
this.updateManager = new UpdateManager(this);
this.actionManager = new ActionManager(this);
this.templateManager = socketManager.templateManager;
this.keycloakAuth = new KeycloakAuth();
this.setupSocketEventHandlers();
}
setupSocketEventHandlers() {
this.socket.use(createAuthMiddleware(this));
this.socket.on('authenticate', this.handleAuthenticateEvent.bind(this));
this.socket.on('lock', this.handleLockEvent.bind(this));
this.socket.on('unlock', this.handleUnlockEvent.bind(this));
this.socket.on('getLock', this.handleGetLockEvent.bind(this));
this.socket.on(
'subscribeToObjectTypeUpdate',
this.handleSubscribeToObjectTypeUpdateEvent.bind(this)
);
this.socket.on(
'subscribeToObjectUpdate',
this.handleSubscribeToObjectUpdateEvent.bind(this)
);
this.socket.on(
'previewTemplate',
this.handlePreviewTemplateEvent.bind(this)
);
this.socket.on(
'generateHostOtp',
this.handleGenerateHostOtpEvent.bind(this)
);
this.socket.on('objectAction', this.handleObjectActionEvent.bind(this));
}
async handleAuthenticateEvent(data, callback) {
const token = data.token || undefined;
logger.info('Authenticating user with token...');
if (token) {
const result = await this.keycloakAuth.verifyToken(token);
if (result.valid == true) {
logger.info('User authenticated and valid.');
this.user = result.user;
this.id = this.user._id.toString();
this.authenticated = true;
} else {
logger.warn('User is not authenticated.');
}
callback(result);
}
}
async handleLockEvent(data) {
// data: { _id: string, params?: object }
if (!data || !data._id) {
this.socket.emit('lock_result', {
success: false,
error: 'Invalid lock event data'
});
return;
}
data = { ...data, user: this.user._id.toString() };
try {
await this.lockManager.lockObject(data);
} catch (err) {
logger.error('Lock event error:', err);
this.socket.emit('lock_result', { success: false, error: err.message });
}
}
async handleUnlockEvent(data) {
// data: { _id: string }
if (!data || !data._id) {
this.socket.emit('unlock_result', {
success: false,
error: 'Invalid unlock event data'
});
return;
}
data = { ...data, user: this.user._id.toString() };
try {
await this.lockManager.unlockObject(data);
} catch (err) {
logger.error('Unlock event error:', err);
this.socket.emit('unlock_result', { success: false, error: err.message });
}
}
async handleGetLockEvent(data, callback) {
// data: { _id: string }
if (!data || !data._id) {
callback({
error: 'Invalid getLock event data'
});
return;
}
try {
const lockEvent = await this.lockManager.getObjectLock(data);
callback(lockEvent);
} catch (err) {
logger.error('GetLock event error:', err);
callback({
error: err.message
});
}
}
async handleSubscribeToObjectTypeUpdateEvent(data, callback) {
const result = this.updateManager.subscribeToObjectNew(data.objectType);
callback(result);
}
async handleSubscribeToObjectUpdateEvent(data, callback) {
const result = this.updateManager.subscribeToObjectUpdate(
data._id,
data.objectType
);
callback(result);
}
async handlePreviewTemplateEvent(data, callback) {
const result = await this.templateManager.renderTemplate(
data._id,
data.content,
data.testObject,
data.scale
);
callback(result);
}
async handleGenerateHostOtpEvent(data, callback) {
const result = await generateHostOTP(data._id);
callback(result);
}
async handleObjectActionEvent(data, callback) {
await this.actionManager.sendObjectAction(
data._id,
data.objectType,
data.action,
callback
);
}
handleDisconnect() {
logger.info('External user disconnected:', this.socket.user?.username);
}
}

View File

@ -0,0 +1,45 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="initial-scale=1.0" />
<title>Document</title>
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link
href="https://fonts.googleapis.com/css2?family=Figtree:ital,wght@0,300..900;1,300..900&display=swap"
rel="stylesheet"
/>
<style>
<%- baseCSS %>
</style>
<style>
body {
min-width: calc(<%= width || '50mm' %> + 100px);
min-height: calc(<%= height || '50mm' %> + 100px);
}
.previewContainer {
transform: scale(<%= scale || '1' %>);
min-width: calc(<%= width || '50mm' %> + 100px);
min-height: calc(<%= height || '50mm' %> + 100px);
}
.previewDocument {
width: <%= width || '50mm' %>;
height: <%= height || '50mm' %>;
}
.renderDocument {
width: <%= width || '50mm' %>;
height: <%= height || '50mm' %>;
transform: scale(<%= scale || '1' %>);
}
</style>
</head>
<body>
<%- content %>
<script src="https://cdn.jsdelivr.net/npm/jsbarcode@3.11.0/dist/JsBarcode.all.min.js"></script>
<script>
JsBarcode('.documentBarcode').init();
</script>
</body>
</html>

View File

@ -0,0 +1,3 @@
<div class="contentPlaceholder">
<p>Content</p>
</div>

View File

@ -0,0 +1,3 @@
<div class="previewContainer">
<div class="previewDocument"><%- content %></div>
</div>

View File

@ -0,0 +1 @@
<div class="renderDocument"><%- content %></div>

View File

@ -0,0 +1,73 @@
body {
margin: 0;
font-family: 'Figtree', sans-serif;
font-optical-sizing: auto;
font-weight: 400;
font-style: normal;
overflow: scroll;
}
.previewContainer {
display: flex;
justify-content: center; /* Horizontal center */
align-items: center; /* Vertical center */
}
.previewDocument {
background: #ffffff;
border: 1px solid #000;
box-shadow: 0 0 5px rgba(0, 0, 0, 0.2);
}
.documentText {
margin: 0;
}
.documentTitle {
margin: 0;
}
h1.documentTitle {
font-weight: 800;
font-size: 38px;
}
h2.documentTitle {
font-weight: 800;
}
h3.documentTitle {
font-weight: 700;
}
h4.documentTitle {
font-weight: 700;
}
.documentFlex {
display: flex;
}
.documentDivider {
background: black;
height: 1px;
margin: 4px 0;
border: none;
}
.contentPlaceholder {
border: 1px solid black;
max-height: 250px;
height: 100%;
width: 100%;
display: flex;
align-items: center;
justify-content: center;
background: repeating-linear-gradient(
45deg,
/* Angle of the stripes */ #ccc,
/* Light grey */ #ccc 10px,
/* End of first stripe */ #eee 10px,
/* Start of next stripe (slightly lighter grey) */ #eee 20px
/* End of second stripe */
);
}
.contentPlaceholder > p {
text-transform: uppercase;
font-weight: 700;
}

View File

@ -0,0 +1,296 @@
import ejs from 'ejs';
import log4js from 'log4js';
import posthtml from 'posthtml';
import { documentTemplateModel } from '../database/schemas/management/documenttemplate.schema.js';
import '../database/schemas/management/documentsize.schema.js';
// Load configuration
import { loadConfig } from '../config.js';
import fs from 'fs';
import dayjs from 'dayjs';
import utc from 'dayjs/plugin/utc.js';
import timezone from 'dayjs/plugin/timezone.js';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { getObject } from '../database/database.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Extend plugins
dayjs.extend(utc);
dayjs.extend(timezone);
const config = loadConfig();
const logger = log4js.getLogger('Template Manager');
logger.level = config.server.logLevel;
let baseTemplate;
let baseCSS;
let previewTemplate;
let contentPlaceholder;
async function loadTemplates() {
// Synchronously load files
baseTemplate = fs.readFileSync(
join(__dirname, '/assets/basetemplate.ejs'),
'utf8'
);
baseCSS = fs.readFileSync(join(__dirname, '/assets/styles.css'), 'utf8');
previewTemplate = fs.readFileSync(
join(__dirname, '/assets/previewtemplate.ejs'),
'utf8'
);
contentPlaceholder = fs.readFileSync(
join(__dirname, '/assets/contentplaceholder.ejs'),
'utf8'
);
}
loadTemplates();
function getNodeStyles(attributes) {
var styles = '';
if (attributes?.padding) {
styles += `padding: ${attributes.padding};`;
}
if (attributes?.width) {
styles += `width: ${attributes.width};`;
}
if (attributes?.height) {
styles += `height: ${attributes.height};`;
}
if (attributes?.gap && attributes?.vertical != 'true') {
styles += `column-gap: ${attributes.gap};`;
}
if (attributes?.gap && attributes?.vertical == 'true') {
styles += `row-gap: ${attributes.gap};`;
}
if (attributes?.justify) {
styles += `justify-content: ${attributes.justify};`;
}
if (attributes?.align) {
styles += `align-items: ${attributes.align};`;
}
if (attributes?.border) {
styles += `border: ${attributes.border};`;
}
if (attributes?.borderRadius) {
styles += `border-radius: ${attributes.borderRadius};`;
}
if (attributes?.vertical == 'true') {
styles += `flex-direction: column;`;
}
if (attributes?.grow) {
styles += `flex-grow: ${attributes.grow};`;
}
if (attributes?.shrink) {
styles += `flex-shrink: ${attributes.shrink};`;
}
return styles;
}
async function transformCustomElements(content) {
const result = await posthtml([
tree =>
tree.match({ tag: 'Title1' }, node => ({
...node,
tag: 'h1',
attrs: { class: 'documentTitle' }
})),
tree =>
tree.match({ tag: 'Title2' }, node => ({
...node,
tag: 'h2',
attrs: { class: 'documentTitle' }
})),
tree =>
tree.match({ tag: 'Title3' }, node => ({
...node,
tag: 'h3',
attrs: { class: 'documentText' }
})),
tree =>
tree.match({ tag: 'Title4' }, node => ({
...node,
tag: 'h4',
attrs: { class: 'documentText' }
})),
tree =>
tree.match({ tag: 'Text' }, node => ({
...node,
tag: 'p',
attrs: { class: 'documentText' }
})),
tree =>
tree.match({ tag: 'Bold' }, node => ({
...node,
tag: 'strong',
attrs: { style: 'font-weight: bold;', class: 'documentBoldText' }
})),
tree =>
tree.match({ tag: 'Barcode' }, node => {
return {
tag: 'svg',
attrs: {
class: 'documentBarcode',
'jsbarcode-width': node.attrs?.width,
'jsbarcode-height': node.attrs?.height,
'jsbarcode-value': node.content[0],
'jsbarcode-format': node.attrs.format
}
};
}),
tree =>
tree.match({ tag: 'Container' }, node => ({
...node,
tag: 'div',
attrs: {
class: 'documentContainer',
style: getNodeStyles(node.attrs)
}
})),
tree =>
tree.match({ tag: 'Flex' }, node => {
return {
...node,
tag: 'div',
attrs: {
class: 'documentFlex',
style: getNodeStyles(node.attrs)
}
};
}),
tree =>
tree.match({ tag: 'Divider' }, node => {
return {
...node,
tag: 'hr',
attrs: {
class: 'documentDivider',
style: getNodeStyles(node.attrs)
}
};
}),
tree =>
tree.match({ tag: 'DateTime' }, node => {
const dateTime = dayjs.utc(node.content[0]);
return {
content: [dateTime.format('YYYY-MM-DD hh:mm:ss')],
tag: 'span',
attrs: {
class: 'documentDateTime',
style: getNodeStyles(node.attrs)
}
};
})
]).process(content);
return result.html;
}
export class TemplateManager {
/**
* Previews an EJS template by rendering it with provided data
* @param {string} templateString - The EJS template as a string
* @param {Object} data - Data object to pass to the template
* @param {Object} options - EJS rendering options
* @returns {Promise<string>} The rendered HTML string
*/
async renderTemplate(id, content, data = {}, scale, options = {}) {
try {
// Set default options for EJS rendering
const defaultOptions = {
async: true,
...options
};
const documentTemplate = await getObject({
model: documentTemplateModel,
id,
populate: [
{ path: 'documentSize' },
{ path: 'parent', strictPopulate: false }
],
cached: true
});
if (documentTemplate == null) {
return { error: 'Document template not found.' };
}
const documentSize = documentTemplate.documentSize;
var templateData = data;
if (documentTemplate.global == true) {
templateData = { content: contentPlaceholder };
}
// Render the template
const templateContent = await ejs.render(
content,
templateData,
defaultOptions
);
var templateWithParentContent;
if (documentTemplate.parent != undefined) {
templateWithParentContent = await ejs.render(
documentTemplate.parent.content,
{ content: templateContent },
defaultOptions
);
} else {
templateWithParentContent = templateContent;
}
const templateHtml = await transformCustomElements(
templateWithParentContent
);
const previewHtml = await ejs.render(
previewTemplate,
{ content: templateHtml },
defaultOptions
);
const baseHtml = await ejs.render(
baseTemplate,
{
content: previewHtml,
width: `${documentSize.width}mm`,
height: `${documentSize.height}mm`,
scale: `${scale}`,
baseCSS: baseCSS
},
defaultOptions
);
const previewObject = {
html: baseHtml
};
return previewObject;
} catch (error) {
logger.warn('Error whilst previewing template:', error.message);
return { error: error.message };
}
}
/**
* Validates if a template string is valid EJS syntax
* @param {string} templateString - The EJS template as a string
* @returns {boolean} True if template is valid, false otherwise
*/
validateTemplate(templateString) {
try {
// Try to compile the template to check for syntax errors
ejs.compile(templateString);
return true;
} catch (error) {
return false;
}
}
}

View File

@ -1,7 +1,9 @@
import { etcdServer } from '../database/etcd.js';
import log4js from 'log4js'; import log4js from 'log4js';
import { loadConfig } from '../config.js'; import { loadConfig } from '../config.js';
import NodeCache from 'node-cache';
import { etcdServer } from '../database/etcd.js';
import { updateObjectCache } from '../database/database.js';
const config = loadConfig(); const config = loadConfig();
// Setup logger // Setup logger
@ -12,40 +14,88 @@ logger.level = config.server.logLevel;
* UpdateManager handles tracking object updates using Etcd and broadcasts update events via websockets. * UpdateManager handles tracking object updates using Etcd and broadcasts update events via websockets.
*/ */
export class UpdateManager { export class UpdateManager {
constructor(socketManager) { constructor(socketClient) {
this.socketManager = socketManager; this.socketClient = socketClient;
this.setupUpdatesListeners();
} }
async updateObject(object) { async subscribeToObjectNew(objectType) {
// Add an 'update' event to the 'updates' stream await etcdServer.onKeyPutEvent(
logger.debug('Updating object:', object._id); `/${objectType}s/new`,
try { this.socketClient.socketId,
const updateData = { (key, value) => {
_id: object._id, logger.trace('Object new event:', value);
type: object.type, this.socketClient.socket.emit('objectNew', {
updatedAt: new Date().toISOString() _id: value,
}; objectType: objectType
});
await etcdServer.set( }
`/updates/${object.type}s/${object._id}`,
updateData
); );
logger.info(`Update event for id: ${object._id}`); return { success: true };
} catch (err) { }
logger.error(`Error adding update event to: ${object._id}:`, err);
throw err; async subscribeToObjectUpdate(id, objectType) {
await etcdServer.onKeyPutEvent(
`/${objectType}s/${id}/object`,
this.socketClient.socketId,
(key, value) => {
logger.trace('Object update event:', id);
this.socketClient.socket.emit('objectUpdate', {
_id: id,
objectType: objectType,
object: { ...value }
});
}
);
return { success: true };
}
async removeObjectNewListener(objectType) {
await etcdServer.removeKeyWatcher(
`/${objectType}s/new`,
this.socketClient.socketId,
'put'
);
return { success: true };
}
async removeObjectUpdateListener(id, objectType) {
await etcdServer.removeKeyWatcher(
`/${objectType}s/${id}/object`,
this.socketClient.socketId,
'put'
);
return { success: true };
}
async getObjectUpdate(id, objectType) {
try {
const objectUpdate = {
_id: id,
objectType: objectType,
object: await etcdServer.get(`/${objectType}s/${id}/object`)
};
logger.trace(`Returning path: /${objectType}s/${id}/object`);
return objectUpdate;
} catch (error) {
logger.error(
`UpdateManager: Failed to get current value for /${objectType}s/${id}/object:`,
error
);
return { error: 'Not found' };
} }
} }
setupUpdatesListeners() { async setObjectUpdate(id, objectType, value) {
etcdServer.onPrefixPut('/updates', (key, value) => { try {
const id = key.split('/').pop(); await etcdServer.set(`/${objectType}s/${id}/object`, value);
logger.debug('Update object event:', id); logger.trace(`Set value for path: /${objectType}s/${id}/object`);
this.socketManager.broadcast('notify_object_update', { return true;
...value } catch (error) {
}); logger.error(
}); `Failed to set value for /${objectType}s/${id}/object:`,
logger.info('Subscribed to Etcd stream for update changes.'); error
);
return false;
}
} }
} }

90
src/utils.js Normal file
View File

@ -0,0 +1,90 @@
import { editObject } from './database/database.js';
import { hostModel } from './database/schemas/management/host.schema.js';
import crypto from 'crypto';
import { nanoid } from 'nanoid';
import { loadConfig } from './config.js';
import { userModel } from './database/schemas/management/user.schema.js';
import { documentSizeModel } from './database/schemas/management/documentsize.schema.js';
import { documentTemplateModel } from './database/schemas/management/documenttemplate.schema.js';
import { printerModel } from './database/schemas/production/printer.schema.js';
import { subJobModel } from './database/schemas/production/subjob.schema.js';
import { jobModel } from './database/schemas/production/job.schema.js';
import { filamentStockModel } from './database/schemas/inventory/filamentstock.schema.js';
const config = loadConfig();
const authCodeLength = 64;
const modelList = [
hostModel,
userModel,
documentSizeModel,
documentTemplateModel,
printerModel,
jobModel,
subJobModel,
filamentStockModel
];
export async function generateHostOTP(id) {
const otp = crypto.randomInt(0, 1000000).toString().padStart(6, '0'); // 0 to 999999
const expiresAt = new Date(
Date.now() + (config.otpExpiryMins || 2) * 60 * 1000
); // 2 minutes in ms
const otpHost = await editObject({
model: hostModel,
id: id,
updateData: { otp: otp, otpExpiresAt: expiresAt }
});
return otpHost;
}
export function generateAuthCode() {
return nanoid(authCodeLength);
}
export function generateEtcId() {
return nanoid(24);
}
export function getChangedValues(oldObj, newObj, old = false) {
const changes = {};
// Check all keys in the new object
for (const key in newObj) {
// Skip if the key is _id or timestamps
if (key === '_id' || key === 'createdAt' || key === 'updatedAt') continue;
const oldVal = oldObj ? oldObj[key] : undefined;
const newVal = newObj[key];
// If both values are objects (but not arrays or null), recurse
if (
oldVal &&
newVal &&
typeof oldVal === 'object' &&
typeof newVal === 'object' &&
!Array.isArray(oldVal) &&
!Array.isArray(newVal) &&
oldVal !== null &&
newVal !== null
) {
const nestedChanges = this.getChangedValues(oldVal, newVal, old);
if (Object.keys(nestedChanges).length > 0) {
changes[key] = nestedChanges;
}
} else if (JSON.stringify(oldVal) !== JSON.stringify(newVal)) {
// If the old value is different from the new value, include it
changes[key] = old ? oldVal : newVal;
}
}
return changes;
}
export function getModelByName(modelName) {
return modelList.filter(model => model.modelName == modelName);
}