Added loads of features, updated packages and created schema sync.
This commit is contained in:
parent
97b77f5155
commit
36151f35bd
@ -49,7 +49,7 @@ A comprehensive REST API for managing 3D printing farms, inventory, and producti
|
||||
KEYCLOAK_SECRET=your-client-secret
|
||||
|
||||
# Application Configuration
|
||||
APP_URL_CLIENT=http://localhost:3000
|
||||
APP_URL_CLIENT=http://localhost:5173
|
||||
LOG_LEVEL=info
|
||||
|
||||
# Optional: Scheduled Operations
|
||||
|
||||
52
fcdev.js
Normal file
52
fcdev.js
Normal file
@ -0,0 +1,52 @@
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
async function syncModelsWithWS() {
|
||||
const sourceDir = path.resolve(__dirname, 'src/schemas');
|
||||
const targetDir = path.resolve(__dirname, '../farmcontrol-ws/src/database/schemas');
|
||||
|
||||
console.log(`Syncing schemas from ${sourceDir} to ${targetDir}...`);
|
||||
|
||||
try {
|
||||
await syncDirectory(sourceDir, targetDir, sourceDir);
|
||||
console.log('✅ Schema sync completed successfully!');
|
||||
} catch (error) {
|
||||
console.error('❌ Error syncing schemas:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function syncDirectory(source, target, rootSource) {
|
||||
// Create target directory if it doesn't exist
|
||||
try {
|
||||
await fs.access(target);
|
||||
} catch {
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
}
|
||||
|
||||
// Read all items in source directory
|
||||
const items = await fs.readdir(source, { withFileTypes: true });
|
||||
|
||||
for (const item of items) {
|
||||
const sourcePath = path.join(source, item.name);
|
||||
const targetPath = path.join(target, item.name);
|
||||
|
||||
if (item.isDirectory()) {
|
||||
// Recursively sync subdirectories
|
||||
await syncDirectory(sourcePath, targetPath, rootSource);
|
||||
} else if (item.isFile()) {
|
||||
// Copy file from source to target
|
||||
await fs.copyFile(sourcePath, targetPath);
|
||||
console.log(` ✓ Copied: ${path.relative(rootSource, sourcePath)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the sync function when executed directly
|
||||
syncModelsWithWS();
|
||||
|
||||
export { syncModelsWithWS };
|
||||
13
nodemon.schemas.json
Normal file
13
nodemon.schemas.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"watch": ["src/schemas"],
|
||||
"ext": "js",
|
||||
"ignore": ["node_modules", "*.test.js", "*.spec.js", "../farmcontrol-ws"],
|
||||
"exec": "node fcdev.js",
|
||||
"delay": 2000,
|
||||
"verbose": false,
|
||||
"restartable": "rs",
|
||||
"env": {
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"colours": true
|
||||
}
|
||||
8127
package-lock.json
generated
8127
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
31
package.json
31
package.json
@ -4,14 +4,15 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.0.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.0.0",
|
||||
"@nats-io/transport-node": "^3.1.0",
|
||||
"axios": "^1.11.0",
|
||||
"@aws-sdk/client-s3": "^3.932.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.932.0",
|
||||
"@nats-io/transport-node": "^3.2.0",
|
||||
"axios": "^1.13.2",
|
||||
"bcrypt": "^6.0.0",
|
||||
"body-parser": "^2.2.0",
|
||||
"canonical-json": "^0.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^17.2.1",
|
||||
"dotenv": "^17.2.3",
|
||||
"etcd3": "^1.1.2",
|
||||
"exifr": "^7.1.3",
|
||||
"express": "^5.1.0",
|
||||
@ -20,26 +21,29 @@
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"keycloak-connect": "^26.1.1",
|
||||
"log4js": "^6.9.1",
|
||||
"mongodb": "^6.18.0",
|
||||
"mongoose": "^8.17.1",
|
||||
"mongodb": "^6.21.0",
|
||||
"mongoose": "^8.19.4",
|
||||
"multer": "^2.0.2",
|
||||
"nanoid": "^5.1.6",
|
||||
"node-cache": "^5.1.2",
|
||||
"node-cron": "^4.2.1",
|
||||
"nodemailer": "*",
|
||||
"nodemon": "^3.1.10",
|
||||
"nodemon": "^3.1.11",
|
||||
"pg": "^8.16.3",
|
||||
"redis": "^5.10.0",
|
||||
"sequelize": "^6.37.7"
|
||||
},
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.28.3",
|
||||
"@babel/core": "^7.28.3",
|
||||
"@babel/core": "^7.28.5",
|
||||
"@babel/node": "^7.28.0",
|
||||
"@babel/plugin-proposal-class-properties": "^7.18.6",
|
||||
"@babel/plugin-proposal-object-rest-spread": "^7.20.7",
|
||||
"@babel/preset-env": "^7.28.3",
|
||||
"@babel/preset-env": "^7.28.5",
|
||||
"@babel/register": "^7.28.3",
|
||||
"eslint": "^9.33.0",
|
||||
"concurrently": "^9.2.1",
|
||||
"eslint": "^9.39.1",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"prettier": "^3.6.2",
|
||||
@ -47,7 +51,10 @@
|
||||
"standard": "^17.1.2"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js",
|
||||
"syncModelsWithWS": "node fcdev.js",
|
||||
"watch:schemas": "nodemon --config nodemon.schemas.json",
|
||||
"dev": "concurrently --names \"API,SCHEMAS\" --prefix-colors \"cyan,yellow\" \"nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js\" \"nodemon --config nodemon.schemas.json\"",
|
||||
"dev:api": "nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"seed": "node src/mongo/seedData.js",
|
||||
"clear": "node src/mongo/clearDbs.js"
|
||||
|
||||
@ -1,21 +1,113 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { fileModel } from '../schemas/management/file.schema.js';
|
||||
import _ from 'lodash';
|
||||
import {
|
||||
deleteAuditLog,
|
||||
distributeDelete,
|
||||
expandObjectIds,
|
||||
modelHasRef,
|
||||
getFieldsByRef,
|
||||
jsonToCacheKey,
|
||||
} from '../utils.js';
|
||||
import log4js from 'log4js';
|
||||
import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js';
|
||||
import {
|
||||
editAuditLog,
|
||||
distributeUpdate,
|
||||
newAuditLog,
|
||||
distributeNew,
|
||||
distributeChildUpdate,
|
||||
distributeChildDelete,
|
||||
distributeChildNew,
|
||||
} from '../utils.js';
|
||||
import { getAllModels } from '../services/misc/model.js';
|
||||
import { redisServer } from './redis.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Database');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
const cacheLogger = log4js.getLogger('DatabaseCache');
|
||||
cacheLogger.level = process.env.LOG_LEVEL;
|
||||
|
||||
const CACHE_TTL_SECONDS = parseInt(process.env.REDIS_CACHE_TTL || '30', 10);
|
||||
|
||||
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
||||
if (!model || !id) return undefined;
|
||||
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id: id.toString(),
|
||||
};
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
cacheLogger.trace('Retrieving object from cache:', cacheKeyObject);
|
||||
|
||||
try {
|
||||
const cachedObject = await redisServer.getKey(cacheKey);
|
||||
if (cachedObject == null) {
|
||||
cacheLogger.trace('Cache miss:', cacheKeyObject);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
cacheLogger.trace('Cache hit:', {
|
||||
model: model.modelName,
|
||||
id: id.toString(),
|
||||
});
|
||||
|
||||
return cachedObject;
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error retrieving object from Redis cache:', err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const updateObjectCache = async ({ model, id, object, populate = [] }) => {
|
||||
if (!model || !id || !object) return object;
|
||||
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id: id.toString(),
|
||||
};
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
cacheLogger.trace('Updating object cache:', cacheKeyObject);
|
||||
|
||||
try {
|
||||
const cachedObject = (await redisServer.getKey(cacheKey)) || {};
|
||||
const mergedObject = _.merge(cachedObject, object);
|
||||
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
|
||||
cacheLogger.trace('Updated object cache:', cacheKeyObject);
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error updating object in Redis cache:', err);
|
||||
}
|
||||
|
||||
return object;
|
||||
};
|
||||
|
||||
export const deleteObjectCache = async ({ model, id }) => {
|
||||
if (!model || !id) return;
|
||||
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id: id.toString(),
|
||||
populate: [],
|
||||
};
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
cacheLogger.trace('Deleting object cache:', cacheKeyObject);
|
||||
|
||||
try {
|
||||
await redisServer.deleteKey(cacheKey);
|
||||
cacheLogger.trace('Deleted object cache:', cacheKeyObject);
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error deleting object from Redis cache:', err);
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
||||
export const listObjects = async ({
|
||||
model,
|
||||
@ -47,6 +139,10 @@ export const listObjects = async ({
|
||||
sort = 'createdAt';
|
||||
}
|
||||
|
||||
if (filter) {
|
||||
console.log('filter', filter);
|
||||
}
|
||||
|
||||
// Translate any key ending with ._id to remove the ._id suffix for Mongoose
|
||||
Object.keys(filter).forEach((key) => {
|
||||
if (key.endsWith('._id')) {
|
||||
@ -142,6 +238,8 @@ function nestGroups(groups, props, filter, idx = 0) {
|
||||
// Check if any group in this key matches the filter (by _id or name)
|
||||
const matches = groupList.filter((group) => {
|
||||
const { filterVals } = getKeyAndFilterVals(group._id[prop]);
|
||||
console.log('filterVals', filterVals);
|
||||
console.log('filterValue', filterValue);
|
||||
return filterVals.some((val) => val?.toString() === filterValue);
|
||||
});
|
||||
if (matches.length > 0) {
|
||||
@ -269,6 +367,15 @@ export const listObjectsByProperties = async ({
|
||||
// Reusable function to get a single object by ID
|
||||
export const getObject = async ({ model, id, populate }) => {
|
||||
try {
|
||||
logger.trace('Getting object:', {
|
||||
model,
|
||||
id,
|
||||
populate,
|
||||
});
|
||||
|
||||
// Try cache
|
||||
const cachedObject = await retrieveObjectCache({ model, id, populate });
|
||||
|
||||
let query = model.findById(id).lean();
|
||||
|
||||
// Auto-populate file references if the model has them
|
||||
@ -297,7 +404,17 @@ export const getObject = async ({ model, id, populate }) => {
|
||||
return { error: 'Object not found.', code: 404 };
|
||||
}
|
||||
|
||||
return expandObjectIds(result);
|
||||
const expanded = _.merge(cachedObject || {}, expandObjectIds(result));
|
||||
|
||||
// Update cache with the expanded object
|
||||
await updateObjectCache({
|
||||
model,
|
||||
id: expanded._id,
|
||||
object: expanded,
|
||||
populate,
|
||||
});
|
||||
|
||||
return expanded;
|
||||
} catch (error) {
|
||||
return { error: error, code: 500 };
|
||||
}
|
||||
@ -413,7 +530,24 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
|
||||
);
|
||||
// Distribute update
|
||||
await distributeUpdate(updateData, id, parentType);
|
||||
return { ...previousExpandedObject, ...updateData };
|
||||
// Call childUpdate event for any child objects
|
||||
await distributeChildUpdate(
|
||||
previousExpandedObject,
|
||||
{ ...previousExpandedObject, ...updateData },
|
||||
id,
|
||||
model
|
||||
);
|
||||
const updatedObject = { ...previousExpandedObject, ...updateData };
|
||||
|
||||
// Update cache with the new version
|
||||
await updateObjectCache({
|
||||
model,
|
||||
id,
|
||||
object: updatedObject,
|
||||
populate,
|
||||
});
|
||||
|
||||
return updatedObject;
|
||||
} catch (error) {
|
||||
logger.error('editObject error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
@ -435,6 +569,15 @@ export const newObject = async ({ model, newData, user = null }, distributeChang
|
||||
if (distributeChanges == true) {
|
||||
await distributeNew(created, parentType);
|
||||
}
|
||||
await distributeChildNew(created, created._id, model);
|
||||
|
||||
// Cache the newly created object
|
||||
await updateObjectCache({
|
||||
model,
|
||||
id: created._id,
|
||||
object: created,
|
||||
populate: [],
|
||||
});
|
||||
|
||||
return created;
|
||||
} catch (error) {
|
||||
@ -462,6 +605,11 @@ export const deleteObject = async ({ model, id, user = null }, distributeChanges
|
||||
await distributeDelete(deleted, parentType);
|
||||
}
|
||||
|
||||
await distributeChildDelete(deleted, id, model);
|
||||
|
||||
// Invalidate cache for this object
|
||||
await deleteObjectCache({ model, id });
|
||||
|
||||
return { deleted: true, object: deleted };
|
||||
} catch (error) {
|
||||
logger.error('deleteObject error:', error);
|
||||
|
||||
68
src/database/redis.js
Normal file
68
src/database/redis.js
Normal file
@ -0,0 +1,68 @@
|
||||
import { createClient } from 'redis';
|
||||
import log4js from 'log4js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
|
||||
const REDIS_URL = process.env.REDIS_URL;
|
||||
const REDIS_HOST = process.env.REDIS_HOST || '127.0.0.1';
|
||||
const REDIS_PORT = process.env.REDIS_PORT || 6379;
|
||||
const REDIS_PASSWORD = process.env.REDIS_PASSWORD || undefined;
|
||||
|
||||
const logger = log4js.getLogger('Redis');
|
||||
logger.level = LOG_LEVEL;
|
||||
|
||||
class RedisServer {
|
||||
constructor() {
|
||||
const url = REDIS_URL || `redis://${REDIS_HOST}:${REDIS_PORT}`;
|
||||
|
||||
this.client = createClient({
|
||||
url,
|
||||
password: REDIS_PASSWORD,
|
||||
});
|
||||
|
||||
this.client.on('error', (err) => {
|
||||
logger.error('Redis Client Error', err);
|
||||
});
|
||||
|
||||
this.connected = false;
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (this.connected) return;
|
||||
await this.client.connect();
|
||||
this.connected = true;
|
||||
logger.info('Connected to Redis');
|
||||
}
|
||||
|
||||
async setKey(key, value, ttlSeconds) {
|
||||
await this.connect();
|
||||
const payload = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
if (ttlSeconds) {
|
||||
await this.client.set(key, payload, { EX: ttlSeconds });
|
||||
} else {
|
||||
await this.client.set(key, payload);
|
||||
}
|
||||
}
|
||||
|
||||
async getKey(key) {
|
||||
await this.connect();
|
||||
const value = await this.client.get(key);
|
||||
if (value == null) return null;
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteKey(key) {
|
||||
await this.connect();
|
||||
await this.client.del(key);
|
||||
}
|
||||
}
|
||||
|
||||
const redisServer = new RedisServer();
|
||||
|
||||
export { RedisServer, redisServer };
|
||||
@ -43,7 +43,7 @@ import { initializeBuckets } from './services/storage/ceph.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const PORT = process.env.PORT || 8080;
|
||||
const PORT = process.env.PORT || 8787;
|
||||
const app = express();
|
||||
|
||||
const logger = log4js.getLogger('App');
|
||||
@ -70,8 +70,8 @@ async function initializeApp() {
|
||||
dbConnect();
|
||||
|
||||
// Connect to Etcd
|
||||
etcdServer.connect();
|
||||
logger.info('Connected to Etcd');
|
||||
//etcdServer.connect();
|
||||
//logger.info('Connected to Etcd');
|
||||
|
||||
// Connect to NATS
|
||||
natsServer.connect();
|
||||
|
||||
@ -6,6 +6,8 @@ import jwt from 'jsonwebtoken';
|
||||
import log4js from 'log4js';
|
||||
import NodeCache from 'node-cache';
|
||||
import { userModel } from './schemas/management/user.schema.js';
|
||||
import { getObject } from './database/database.js';
|
||||
import { hostModel } from './schemas/management/host.schema.js';
|
||||
|
||||
dotenv.config();
|
||||
const logger = log4js.getLogger('Keycloak');
|
||||
@ -120,6 +122,19 @@ const isAuthenticated = async (req, res, next) => {
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Checking host authentication', req.headers);
|
||||
|
||||
const hostId = req.headers['x-host-id'];
|
||||
const authCode = req.headers['x-auth-code'];
|
||||
if (hostId && authCode) {
|
||||
const host = await getObject({ model: hostModel, id: hostId });
|
||||
if (host && host.authCode == authCode) {
|
||||
return next();
|
||||
}
|
||||
} else {
|
||||
return res.status(401).json({ error: 'Not Authenticated', code: 'UNAUTHORIZED' });
|
||||
}
|
||||
|
||||
// Fallback to session-based authentication
|
||||
console.log('Using session token');
|
||||
if (req.session && req.session['keycloak-token']) {
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -8,26 +8,27 @@ import {
|
||||
getPartStockRouteHandler,
|
||||
editPartStockRouteHandler,
|
||||
newPartStockRouteHandler,
|
||||
deletePartStockRouteHandler,
|
||||
listPartStocksByPropertiesRouteHandler,
|
||||
} from '../../services/inventory/partstocks.js';
|
||||
|
||||
// list of partStocks
|
||||
// list of part stocks
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['part', 'state', 'startingQuantity', 'currentQuantity', 'part._id'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listPartStocksRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
const allowedFilters = ['country'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['part', 'state.type'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
var masterFilter = {};
|
||||
if (req.query.masterFilter) {
|
||||
masterFilter = JSON.parse(req.query.masterFilter);
|
||||
}
|
||||
|
||||
listPartStocksRouteHandler(req, res, page, limit, property, filter);
|
||||
listPartStocksByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
@ -38,9 +39,12 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getPartStockRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editPartStockRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deletePartStockRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -1,41 +1,50 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listStockEventsRouteHandler,
|
||||
getStockEventRouteHandler,
|
||||
newStockEventRouteHandler,
|
||||
editStockEventRouteHandler,
|
||||
deleteStockEventRouteHandler,
|
||||
listStockEventsByPropertiesRouteHandler,
|
||||
} from '../../services/inventory/stockevents.js';
|
||||
|
||||
// List stock events
|
||||
// list of stock events
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, sort, order } = req.query;
|
||||
|
||||
const allowedFilters = ['owner_.id', 'parent._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listStockEventsRouteHandler(req, res, page, limit, filter, sort, order);
|
||||
});
|
||||
|
||||
// Create new stock event
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['owner_.id', 'parent._id'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
var masterFilter = {};
|
||||
if (req.query.masterFilter) {
|
||||
masterFilter = JSON.parse(req.query.masterFilter);
|
||||
}
|
||||
listStockEventsByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// Get specific stock event
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteStockEventRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -8,6 +8,7 @@ import {
|
||||
getGCodeFileRouteHandler,
|
||||
newGCodeFileRouteHandler,
|
||||
listGCodeFilesByPropertiesRouteHandler,
|
||||
getGCodeFileContentRouteHandler,
|
||||
} from '../../services/production/gcodefiles.js';
|
||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||
|
||||
@ -21,7 +22,7 @@ router.get('/', isAuthenticated, (req, res) => {
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['tags'];
|
||||
const allowedFilters = ['filament'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listGCodeFilesByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
@ -35,6 +36,10 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getGCodeFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update gcodeFile info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editGCodeFileRouteHandler(req, res);
|
||||
|
||||
@ -15,7 +15,7 @@ import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||
// list of printers
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['tags', 'host._id'];
|
||||
const allowedFilters = ['tags', 'host._id', 'active', 'online', 'name'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listPrintersRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
@ -7,7 +7,7 @@ import {
|
||||
listSubJobsByPropertiesRouteHandler,
|
||||
getSubJobRouteHandler,
|
||||
} from '../../services/production/subjobs.js';
|
||||
import { getFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
// list of sub jobs
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
|
||||
@ -1,12 +1,14 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the main filamentStock schema
|
||||
const filamentStockSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
state: {
|
||||
type: { type: String, required: true },
|
||||
percent: { type: String, required: true },
|
||||
progress: { type: Number, required: false },
|
||||
},
|
||||
startingWeight: {
|
||||
net: { type: Number, required: true },
|
||||
@ -16,14 +18,14 @@ const filamentStockSchema = new Schema(
|
||||
net: { type: Number, required: true },
|
||||
gross: { type: Number, required: true },
|
||||
},
|
||||
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament' },
|
||||
filament: { type: mongoose.Schema.Types.ObjectId, ref: 'filament', required: true },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
filamentStockSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,12 +1,16 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the main partStock schema
|
||||
const partStockSchema = new Schema(
|
||||
{
|
||||
name: { type: String, required: true },
|
||||
fileName: { type: String, required: false },
|
||||
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part' },
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
state: {
|
||||
type: { type: String, required: true },
|
||||
progress: { type: Number, required: false },
|
||||
},
|
||||
part: { type: mongoose.Schema.Types.ObjectId, ref: 'part', required: true },
|
||||
startingQuantity: { type: Number, required: true },
|
||||
currentQuantity: { type: Number, required: true },
|
||||
},
|
||||
@ -15,7 +19,7 @@ const partStockSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
partStockSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const stockAuditItemSchema = new Schema({
|
||||
@ -11,6 +12,7 @@ const stockAuditItemSchema = new Schema({
|
||||
|
||||
const stockAuditSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
type: { type: String, required: true },
|
||||
status: {
|
||||
type: String,
|
||||
@ -28,7 +30,7 @@ const stockAuditSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
stockAuditSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,10 +1,11 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const stockEventSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
value: { type: Number, required: true },
|
||||
current: { type: Number, required: true },
|
||||
unit: { type: String, required: true },
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
@ -33,7 +34,7 @@ const stockEventSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
stockEventSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const auditLogSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
changes: {
|
||||
old: { type: Object, required: false },
|
||||
new: { type: Object, required: false },
|
||||
@ -19,27 +21,6 @@ const auditLogSchema = new Schema(
|
||||
parentType: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: [
|
||||
'printer',
|
||||
'job',
|
||||
'subJob',
|
||||
'filamentStock',
|
||||
'stockEvent',
|
||||
'vendor',
|
||||
'part',
|
||||
'host',
|
||||
'file',
|
||||
'product',
|
||||
'material',
|
||||
'filament',
|
||||
'gcodeFile',
|
||||
'noteType',
|
||||
'note',
|
||||
'user',
|
||||
'documentSize',
|
||||
'documentTemplate',
|
||||
'documentPrinter',
|
||||
], // Add other models as needed
|
||||
},
|
||||
owner: {
|
||||
type: Schema.Types.ObjectId,
|
||||
@ -57,7 +38,7 @@ const auditLogSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
auditLogSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,14 +1,21 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentJobSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
objectType: { type: String, required: false },
|
||||
object: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'objectType',
|
||||
required: true,
|
||||
},
|
||||
state: {
|
||||
type: { type: String, required: true, default: 'queued' },
|
||||
percent: { type: Number, required: false },
|
||||
@ -33,7 +40,7 @@ const documentJobSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
documentJobSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,28 +1,45 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const connectionSchema = new Schema(
|
||||
{
|
||||
interface: { type: String, required: true },
|
||||
protocol: { type: String, required: true },
|
||||
host: { type: String, required: true },
|
||||
port: { type: Number, required: false },
|
||||
},
|
||||
{ _id: false }
|
||||
);
|
||||
|
||||
const documentPrinterSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
},
|
||||
connection: { type: connectionSchema, required: true },
|
||||
currentDocumentSize: { type: Schema.Types.ObjectId, ref: 'documentSize', required: false },
|
||||
tags: [{ type: String }],
|
||||
online: { type: Boolean, required: true, default: false },
|
||||
active: { type: Boolean, required: true, default: true },
|
||||
state: {
|
||||
type: { type: String, required: true, default: 'offline' },
|
||||
message: { type: String, required: false },
|
||||
percent: { type: Number, required: false },
|
||||
progress: { type: Number, required: false },
|
||||
},
|
||||
connectedAt: { type: Date, default: null },
|
||||
host: { type: Schema.Types.ObjectId, ref: 'host', required: true },
|
||||
queue: [{ type: Schema.Types.ObjectId, ref: 'documentJob', required: false }],
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
documentPrinterSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentSizeSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
@ -18,13 +20,18 @@ const documentSizeSchema = new Schema(
|
||||
required: true,
|
||||
default: 0,
|
||||
},
|
||||
infiniteHeight: {
|
||||
type: Boolean,
|
||||
required: true,
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
documentSizeSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const documentTemplateSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
@ -52,7 +54,7 @@ const documentTemplateSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
documentTemplateSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const filamentSchema = new mongoose.Schema({
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
barcode: { required: false, type: String },
|
||||
url: { required: false, type: String },
|
||||
@ -18,7 +20,7 @@ const filamentSchema = new mongoose.Schema({
|
||||
});
|
||||
|
||||
filamentSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
filamentSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
|
||||
const fileSchema = new mongoose.Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
type: { required: true, type: String },
|
||||
extension: { required: true, type: String },
|
||||
@ -12,7 +14,7 @@ const fileSchema = new mongoose.Schema(
|
||||
);
|
||||
|
||||
fileSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
fileSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
|
||||
// Define the device schema
|
||||
const deviceInfoSchema = new mongoose.Schema(
|
||||
@ -41,6 +42,7 @@ const deviceInfoSchema = new mongoose.Schema(
|
||||
);
|
||||
|
||||
const hostSchema = new mongoose.Schema({
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
tags: [{ required: false, type: String }],
|
||||
online: { required: true, type: Boolean, default: false },
|
||||
@ -53,10 +55,11 @@ const hostSchema = new mongoose.Schema({
|
||||
connectedAt: { required: false, type: Date },
|
||||
authCode: { type: { required: false, type: String } },
|
||||
deviceInfo: { deviceInfoSchema },
|
||||
files: [{ type: mongoose.Schema.Types.ObjectId, ref: 'file' }],
|
||||
});
|
||||
|
||||
hostSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
hostSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
|
||||
const materialSchema = new mongoose.Schema({
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
url: { required: false, type: String },
|
||||
image: { required: false, type: Buffer },
|
||||
@ -8,7 +10,7 @@ const materialSchema = new mongoose.Schema({
|
||||
});
|
||||
|
||||
materialSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
materialSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const noteTypeSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: {
|
||||
type: String,
|
||||
required: true,
|
||||
@ -23,7 +25,7 @@ const noteTypeSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
noteTypeSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the main part schema
|
||||
const partSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { type: String, required: true },
|
||||
fileName: { type: String, required: false },
|
||||
product: { type: mongoose.Schema.Types.ObjectId, ref: 'product' },
|
||||
globalPricing: { type: Boolean, default: true },
|
||||
priceMode: { type: String, default: 'margin' },
|
||||
amount: { type: Number, required: false },
|
||||
@ -19,7 +20,7 @@ const partSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
partSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,9 +1,16 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const partSchema = new Schema({
|
||||
part: { type: Schema.Types.ObjectId, ref: 'part', required: true },
|
||||
quantity: { type: Number, required: true },
|
||||
});
|
||||
|
||||
// Define the main product schema
|
||||
const productSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { type: String, required: true },
|
||||
tags: [{ type: String }],
|
||||
version: { type: String },
|
||||
@ -11,12 +18,13 @@ const productSchema = new Schema(
|
||||
margin: { type: Number, required: false },
|
||||
amount: { type: Number, required: false },
|
||||
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
|
||||
parts: [partSchema],
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
// Add virtual id getter
|
||||
productSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
|
||||
const userSchema = new mongoose.Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
username: { required: true, type: String },
|
||||
name: { required: true, type: String },
|
||||
firstName: { required: false, type: String },
|
||||
@ -13,7 +15,7 @@ const userSchema = new mongoose.Schema(
|
||||
);
|
||||
|
||||
userSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
userSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
|
||||
const vendorSchema = new mongoose.Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
website: { required: false, type: String },
|
||||
email: { required: false, type: String },
|
||||
@ -13,7 +15,7 @@ const vendorSchema = new mongoose.Schema(
|
||||
);
|
||||
|
||||
vendorSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
vendorSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const noteSchema = new mongoose.Schema({
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
parent: {
|
||||
type: Schema.Types.ObjectId,
|
||||
refPath: 'parentType',
|
||||
@ -38,7 +40,7 @@ const noteSchema = new mongoose.Schema({
|
||||
});
|
||||
|
||||
noteSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
noteSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,22 +1,30 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const gcodeFileSchema = new mongoose.Schema({
|
||||
name: { required: true, type: String },
|
||||
gcodeFileName: { required: false, type: String },
|
||||
size: { type: Number, required: false },
|
||||
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
|
||||
parts: [{ type: Schema.Types.ObjectId, ref: 'part', required: true }],
|
||||
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
|
||||
cost: { type: Number, required: false },
|
||||
createdAt: { type: Date },
|
||||
updatedAt: { type: Date },
|
||||
const partSchema = new mongoose.Schema({
|
||||
part: { type: Schema.Types.ObjectId, ref: 'part', required: true },
|
||||
quantity: { type: Number, required: true },
|
||||
});
|
||||
|
||||
const gcodeFileSchema = new mongoose.Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { required: true, type: String },
|
||||
gcodeFileName: { required: false, type: String },
|
||||
size: { type: Number, required: false },
|
||||
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
|
||||
parts: [partSchema],
|
||||
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
|
||||
cost: { type: Number, required: false },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
gcodeFileSchema.index({ name: 'text', brand: 'text' });
|
||||
|
||||
gcodeFileSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
gcodeFileSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const jobSchema = new mongoose.Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
progress: { type: Number, required: false },
|
||||
},
|
||||
printers: [{ type: Schema.Types.ObjectId, ref: 'printer', required: false }],
|
||||
createdAt: { required: true, type: Date },
|
||||
@ -29,7 +32,7 @@ const jobSchema = new mongoose.Schema(
|
||||
);
|
||||
|
||||
jobSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
jobSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the moonraker connection schema
|
||||
@ -12,12 +13,13 @@ const moonrakerSchema = new Schema(
|
||||
{ _id: false }
|
||||
);
|
||||
|
||||
// Define the alert schema
|
||||
const alertSchema = new Schema(
|
||||
{
|
||||
priority: { type: String, required: true }, // order to show
|
||||
type: { type: String, required: true }, // selectFilament, error, info, message,
|
||||
type: { type: String, required: true }, // error, info, message
|
||||
message: { type: String, required: false },
|
||||
actions: [{ type: String, required: false, default: [] }],
|
||||
_id: { type: String, required: true },
|
||||
canDismiss: { type: Boolean, required: true, default: true },
|
||||
},
|
||||
{ timestamps: true, _id: false }
|
||||
);
|
||||
@ -25,11 +27,14 @@ const alertSchema = new Schema(
|
||||
// Define the main printer schema
|
||||
const printerSchema = new Schema(
|
||||
{
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
name: { type: String, required: true },
|
||||
online: { type: Boolean, required: true, default: false },
|
||||
active: { type: Boolean, required: true, default: true },
|
||||
state: {
|
||||
type: { type: String, required: true, default: 'offline' },
|
||||
percent: { type: Number, required: false },
|
||||
message: { type: String, required: false },
|
||||
progress: { type: Number, required: false },
|
||||
},
|
||||
connectedAt: { type: Date, default: null },
|
||||
loadedFilament: {
|
||||
@ -43,7 +48,7 @@ const printerSchema = new Schema(
|
||||
currentJob: { type: Schema.Types.ObjectId, ref: 'job' },
|
||||
currentSubJob: { type: Schema.Types.ObjectId, ref: 'subJob' },
|
||||
currentFilamentStock: { type: Schema.Types.ObjectId, ref: 'filamentStock' },
|
||||
subJobs: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
|
||||
queue: [{ type: Schema.Types.ObjectId, ref: 'subJob' }],
|
||||
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', default: null },
|
||||
host: { type: Schema.Types.ObjectId, ref: 'host', default: null },
|
||||
alerts: [alertSchema],
|
||||
@ -53,7 +58,7 @@ const printerSchema = new Schema(
|
||||
|
||||
// Add virtual id getter
|
||||
printerSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
import mongoose from 'mongoose';
|
||||
import { generateId } from '../../utils.js';
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const subJobSchema = new mongoose.Schema({
|
||||
_reference: { type: String, default: () => generateId()() },
|
||||
printer: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'printer',
|
||||
@ -12,7 +14,7 @@ const subJobSchema = new mongoose.Schema({
|
||||
ref: 'job',
|
||||
required: true,
|
||||
},
|
||||
subJobId: {
|
||||
moonrakerJobId: {
|
||||
type: String,
|
||||
required: false,
|
||||
},
|
||||
@ -23,7 +25,7 @@ const subJobSchema = new mongoose.Schema({
|
||||
},
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
percent: { required: false, type: Number },
|
||||
progress: { required: false, type: Number },
|
||||
},
|
||||
number: {
|
||||
type: Number,
|
||||
@ -42,7 +44,7 @@ const subJobSchema = new mongoose.Schema({
|
||||
});
|
||||
|
||||
subJobSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
return this._id;
|
||||
});
|
||||
|
||||
subJobSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
@ -120,6 +120,7 @@ export const newFilamentStockRouteHandler = async (req, res) => {
|
||||
startingWeight: req.body.startingWeight,
|
||||
currentWeight: req.body.currentWeight,
|
||||
filament: req.body.filament,
|
||||
state: req.body.state,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: filamentStockModel,
|
||||
|
||||
@ -2,10 +2,17 @@ import dotenv from 'dotenv';
|
||||
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('PartStocks');
|
||||
const logger = log4js.getLogger('Part Stocks');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listPartStocksRouteHandler = async (
|
||||
@ -14,126 +21,139 @@ export const listPartStocksRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {}
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
const result = await listObjects({
|
||||
model: partStockModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: [{ path: 'part' }],
|
||||
});
|
||||
|
||||
let partStock;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
partStock = await partStockModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
partStock
|
||||
);
|
||||
res.send(partStock);
|
||||
} catch (error) {
|
||||
logger.error('Error listing partStocks:', error);
|
||||
res.status(500).send({ error: error });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing part stocks.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of part stocks (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listPartStocksByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {},
|
||||
masterFilter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: partStockModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['part'],
|
||||
masterFilter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing part stocks.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of part stocks. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getPartStockRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the partStock with the given remote address
|
||||
const partStock = await partStockModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!partStock) {
|
||||
logger.warn(`PartStock not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||
res.send(partStock);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching PartStock:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: partStockModel,
|
||||
id,
|
||||
populate: [{ path: 'part' }],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Part Stock not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived part stock with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editPartStockRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the partStock with the given remote address
|
||||
const partStock = await partStockModel.findOne({ _id: id });
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
if (!partStock) {
|
||||
// Error handling
|
||||
logger.warn(`PartStock not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
logger.trace(`Part Stock with ID: ${id}`);
|
||||
|
||||
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||
const updateData = {};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: partStockModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
};
|
||||
|
||||
const result = await partStockModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No PartStock updated.');
|
||||
res.status(500).send({ error: 'No partStocks updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating partStock:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching partStock:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
if (result.error) {
|
||||
logger.error('Error editing part stock:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited part stock with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newPartStockRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newPartStock } = req.body;
|
||||
newPartStock = {
|
||||
...newPartStock,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const result = await partStockModel.create(newPartStock);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error('No partStock created.');
|
||||
res.status(500).send({ error: 'No partStock created.' });
|
||||
}
|
||||
res.status(200).send({ status: 'ok' });
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating partStock:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
startingQuantity: req.body.startingQuantity,
|
||||
currentQuantity: req.body.currentQuantity,
|
||||
part: req.body.part,
|
||||
state: req.body.state,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: partStockModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No part stock created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New part stock with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deletePartStockRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Part Stock with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: partStockModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No part stock deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted part stock with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -2,6 +2,14 @@ import dotenv from 'dotenv';
|
||||
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Stock Events');
|
||||
@ -13,94 +21,147 @@ export const listStockEventsRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
filter = {},
|
||||
sort = 'createdAt',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
const sortOrder = order === 'descend' ? 1 : -1;
|
||||
const result = await listObjects({
|
||||
model: stockEventModel,
|
||||
page,
|
||||
limit,
|
||||
filter,
|
||||
sort,
|
||||
order,
|
||||
populate: [
|
||||
{ path: 'owner', select: 'name _id' },
|
||||
{ path: 'parent', select: 'name _id' },
|
||||
],
|
||||
});
|
||||
|
||||
if (!sort || sort != '') {
|
||||
sort = 'createdAt';
|
||||
}
|
||||
|
||||
// Translate parent._id to parent for Mongoose
|
||||
if (filter['parent._id']) {
|
||||
filter.parent = filter['parent._id'];
|
||||
delete filter['parent._id'];
|
||||
}
|
||||
|
||||
// Translate owner._id to parent for Mongoose
|
||||
if (filter['owner._id']) {
|
||||
filter.owner = filter['owner._id'];
|
||||
delete filter['owner._id'];
|
||||
}
|
||||
|
||||
// Use find with population and filter
|
||||
let query = stockEventModel
|
||||
.find(filter)
|
||||
.sort({ [sort]: sortOrder })
|
||||
.skip(skip)
|
||||
.limit(Number(limit))
|
||||
.populate('owner', 'name _id')
|
||||
.populate('parent', 'name _id');
|
||||
|
||||
const stockEvents = await query;
|
||||
logger.trace(
|
||||
`List of stock events (Page ${page}, Limit ${limit}, Sort ${sort}, Order ${order}):`,
|
||||
stockEvents
|
||||
);
|
||||
res.send(stockEvents);
|
||||
} catch (error) {
|
||||
logger.error('Error listing stock events:', error);
|
||||
res.status(500).send({ error: error });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing stock events.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of stock events (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listStockEventsByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {},
|
||||
masterFilter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: stockEventModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: [
|
||||
{ path: 'owner', select: 'name _id' },
|
||||
{ path: 'parent', select: 'name _id' },
|
||||
],
|
||||
masterFilter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing stock events.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of stock events. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getStockEventRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
const stockEvent = await stockEventModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('filamentStock')
|
||||
.populate('subJob')
|
||||
.populate('job');
|
||||
|
||||
if (!stockEvent) {
|
||||
logger.warn(`Stock event not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Stock event not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
|
||||
res.send(stockEvent);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching stock event:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: stockEventModel,
|
||||
id,
|
||||
populate: [
|
||||
{ path: 'owner', select: 'name _id' },
|
||||
{ path: 'parent', select: 'name _id' },
|
||||
],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Stock event not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retrieved stock event with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newStockEventRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const newStockEvent = {
|
||||
type: req.body.type,
|
||||
value: req.body.value,
|
||||
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
|
||||
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
|
||||
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
|
||||
timestamp: new Date(),
|
||||
};
|
||||
|
||||
const result = await stockEventModel.create(newStockEvent);
|
||||
if (!result) {
|
||||
logger.error('No stock event created.');
|
||||
return res.status(500).send({ error: 'No stock event created.' });
|
||||
}
|
||||
return res.send({ status: 'ok', id: result._id });
|
||||
} catch (error) {
|
||||
logger.error('Error adding stock event:', error);
|
||||
return res.status(500).send({ error: error.message });
|
||||
const newData = {
|
||||
value: req.body.value,
|
||||
current: req.body.current,
|
||||
unit: req.body.unit,
|
||||
parent: req.body.parent,
|
||||
parentType: req.body.parentType,
|
||||
owner: req.body.owner,
|
||||
ownerType: req.body.ownerType,
|
||||
timestamp: req.body.timestamp || new Date(),
|
||||
};
|
||||
const result = await newObject({
|
||||
model: stockEventModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No stock event created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New stock event with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editStockEventRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Stock Event with ID: ${id}`);
|
||||
|
||||
const updateData = {};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: stockEventModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing stock event:', result.error);
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited stock event with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deleteStockEventRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`Stock Event with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: stockEventModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No stock event deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted stock event with ID: ${result._id || id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -74,6 +74,7 @@ export const getDocumentJobRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: documentJobModel,
|
||||
id,
|
||||
populate: ['documentTemplate', 'documentPrinter', 'object'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Document Job not found with supplied id.`);
|
||||
@ -89,12 +90,7 @@ export const editDocumentJobRouteHandler = async (req, res) => {
|
||||
|
||||
logger.trace(`Document Job with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
};
|
||||
const updateData = {};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
model: documentJobModel,
|
||||
@ -118,8 +114,14 @@ export const newDocumentJobRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
documentPrinter: req.body.documentPrinter,
|
||||
documentTemplate: req.body.documentTemplate,
|
||||
objectType: req.body.objectType,
|
||||
object: req.body.object,
|
||||
content: req.body.content,
|
||||
state: { type: 'draft' },
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentJobModel,
|
||||
|
||||
@ -35,7 +35,7 @@ export const listDocumentPrintersRouteHandler = async (
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['documentSize'],
|
||||
populate: ['currentDocumentSize', 'host'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -60,7 +60,7 @@ export const listDocumentPrintersByPropertiesRouteHandler = async (
|
||||
model: documentPrinterModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['documentSize'],
|
||||
populate: ['currentDocumentSize', 'host'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -78,7 +78,7 @@ export const getDocumentPrinterRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: documentPrinterModel,
|
||||
id,
|
||||
populate: ['documentSize'],
|
||||
populate: ['currentDocumentSize', 'host'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Document Template not found with supplied id.`);
|
||||
@ -99,10 +99,9 @@ export const editDocumentPrinterRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
global: req.body.global,
|
||||
parent: req.body.parent,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
connection: req.body.connection,
|
||||
currentDocumentSize: req.body.currentDocumentSize,
|
||||
host: req.body.host,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -129,10 +128,9 @@ export const newDocumentPrinterRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
tags: req.body.tags,
|
||||
active: req.body.active,
|
||||
isGlobal: req.body.isGlobal,
|
||||
globalDocumentPrinter: req.body.globalDocumentPrinter,
|
||||
documentSize: req.body.documentSize,
|
||||
documentPrinters: req.body.documentPrinters,
|
||||
connection: req.body.connection,
|
||||
currentDocumentSize: req.body.currentDocumentSize,
|
||||
host: req.body.host,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentPrinterModel,
|
||||
|
||||
@ -94,6 +94,7 @@ export const editDocumentSizeRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
infiniteHeight: req.body.infiniteHeight,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -120,6 +121,7 @@ export const newDocumentSizeRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
width: req.body.width,
|
||||
height: req.body.height,
|
||||
infiniteHeight: req.body.infiniteHeight,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: documentSizeModel,
|
||||
|
||||
@ -304,6 +304,11 @@ export const getFileContentRouteHandler = async (req, res) => {
|
||||
res.set('Content-Type', file.type || 'application/octet-stream');
|
||||
res.set('Content-Disposition', `attachment; filename="${file.name}${file.extension}"`);
|
||||
|
||||
// Expose file size so clients can compute download progress
|
||||
if (typeof file.size === 'number' && !Number.isNaN(file.size)) {
|
||||
res.set('Content-Length', String(file.size));
|
||||
}
|
||||
|
||||
// Stream or send buffer
|
||||
if (body && typeof body.pipe === 'function') {
|
||||
// Handle stream errors
|
||||
@ -358,6 +363,12 @@ export const getFileContentRouteHandler = async (req, res) => {
|
||||
|
||||
res.set('Content-Type', file.type || 'application/octet-stream');
|
||||
res.set('Content-Disposition', `inline; filename="${file.name}${file.extension || ''}"`);
|
||||
|
||||
// Ensure Content-Length is set for progress events if possible.
|
||||
const length =
|
||||
typeof file.size === 'number' && !Number.isNaN(file.size) ? file.size : data.length;
|
||||
res.set('Content-Length', String(length));
|
||||
|
||||
return res.send(data);
|
||||
});
|
||||
} else {
|
||||
|
||||
@ -76,6 +76,7 @@ export const getHostRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: hostModel,
|
||||
id,
|
||||
populate: ['files'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Host not found with supplied id.`);
|
||||
@ -96,6 +97,7 @@ export const editHostRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
active: req.body.active,
|
||||
tags: req.body.tags,
|
||||
files: req.body.files,
|
||||
};
|
||||
|
||||
const result = await editObject({
|
||||
|
||||
@ -35,7 +35,7 @@ export const listPartsRouteHandler = async (
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['vendor', 'product'],
|
||||
populate: ['vendor'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -53,7 +53,7 @@ export const listPartsByPropertiesRouteHandler = async (req, res, properties = '
|
||||
model: partModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['vendor', 'product'],
|
||||
populate: ['vendor'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -71,7 +71,7 @@ export const getPartRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: partModel,
|
||||
id,
|
||||
populate: ['vendor', 'product'],
|
||||
populate: ['vendor'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
@ -93,7 +93,6 @@ export const editPartRouteHandler = async (req, res) => {
|
||||
globalPricing: req.body.globalPricing,
|
||||
file: req.body?.file,
|
||||
vendor: req.body?.vendor,
|
||||
product: req.body?.product,
|
||||
margin: req.body?.margin,
|
||||
amount: req.body?.amount,
|
||||
priceMode: req.body?.priceMode,
|
||||
@ -124,7 +123,6 @@ export const newPartRouteHandler = async (req, res) => {
|
||||
globalPricing: req.body.globalPricing,
|
||||
file: req.body?.file,
|
||||
vendor: req.body?.vendor,
|
||||
product: req.body?.product,
|
||||
margin: req.body?.margin,
|
||||
amount: req.body?.amount,
|
||||
priceMode: req.body?.priceMode,
|
||||
|
||||
@ -76,7 +76,7 @@ export const getProductRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: productModel,
|
||||
id,
|
||||
populate: ['vendor'],
|
||||
populate: ['vendor', 'parts.part'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
@ -100,6 +100,8 @@ export const editProductRouteHandler = async (req, res) => {
|
||||
margin: req.body.margin,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
vendor: req.body.vendor,
|
||||
parts: req.body.parts,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -129,6 +131,8 @@ export const newProductRouteHandler = async (req, res) => {
|
||||
margin: req.body.margin,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
vendor: req.body.vendor,
|
||||
parts: req.body.parts,
|
||||
};
|
||||
|
||||
const result = await newObject({
|
||||
|
||||
@ -17,6 +17,9 @@ import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||
import { documentPrinterModel } from '../../schemas/management/documentprinter.schema.js';
|
||||
import { documentJobModel } from '../../schemas/management/documentjob.schema.js';
|
||||
import { fileModel } from '../../schemas/management/file.schema.js';
|
||||
|
||||
// Map prefixes to models and id fields
|
||||
const PREFIX_MODEL_MAP = {
|
||||
@ -39,7 +42,10 @@ const PREFIX_MODEL_MAP = {
|
||||
NTE: { model: noteModel, idField: '_id', type: 'note' },
|
||||
DSZ: { model: documentSizeModel, idField: '_id', type: 'documentSize' },
|
||||
DTP: { model: documentTemplateModel, idField: '_id', type: 'documentTemplate' },
|
||||
DPR: { model: documentPrinterModel, idField: '_id', type: 'documentPrinter' },
|
||||
DJB: { model: documentJobModel, idField: '_id', type: 'documentJob' },
|
||||
HST: { model: hostModel, idField: '_id', type: 'host' },
|
||||
FLE: { model: fileModel, idField: '_id', type: 'file' },
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import dotenv from 'dotenv';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { getAllModels, getModelByName, getModelByPrefix, PREFIX_MODEL_MAP } from './model.js';
|
||||
import { getAllModels, getModelByPrefix } from './model.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Jobs');
|
||||
const logger = log4js.getLogger('Spotlight');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Helper function to build search filter from query parameters
|
||||
@ -48,6 +48,7 @@ const trimSpotlightObject = (object, objectType) => {
|
||||
color: object.color || undefined,
|
||||
updatedAt: object.updatedAt || undefined,
|
||||
objectType: objectType || undefined,
|
||||
online: object.online || undefined,
|
||||
};
|
||||
};
|
||||
|
||||
@ -63,6 +64,8 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
const delimiter = query.substring(3, 4);
|
||||
const suffix = query.substring(4);
|
||||
|
||||
logger.trace(`Spotlight query: ${query}`);
|
||||
|
||||
if (delimiter == ':') {
|
||||
const prefixEntry = getModelByPrefix(prefix);
|
||||
if (!prefixEntry || !prefixEntry.model) {
|
||||
@ -161,6 +164,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
res.status(200).send(deduped);
|
||||
return;
|
||||
}
|
||||
return res.status(200).send([]);
|
||||
} catch (error) {
|
||||
logger.error('Error in spotlight lookup:', error);
|
||||
res.status(500).send({ error: error });
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
listObjectsByProperties,
|
||||
newObject,
|
||||
} from '../../database/database.js';
|
||||
import { getFileContentRouteHandler } from '../management/files.js';
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
dotenv.config();
|
||||
@ -77,7 +78,7 @@ export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
populate: ['filament'],
|
||||
populate: ['filament', 'parts.part'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
@ -87,17 +88,36 @@ export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getGCodeFileContentRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error getting gcodeFile content.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Retreived gcodeFile content with ID: ${id}`);
|
||||
return getFileContentRouteHandler({ ...req, params: { id: result.file._id } }, res);
|
||||
};
|
||||
|
||||
export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`GCodeFile with ID: ${id}`);
|
||||
console.log('REQ.BODY', req.body);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
file: req.body.file,
|
||||
filament: req.body.filament,
|
||||
parts: req.body.parts,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -124,6 +144,7 @@ export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
name: req.body.name,
|
||||
file: req.body.file,
|
||||
filament: req.body.filament,
|
||||
parts: req.body.parts,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: gcodeFileModel,
|
||||
|
||||
@ -36,6 +36,7 @@ export const listPrintersRouteHandler = async (
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['host'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -58,7 +59,7 @@ export const listPrintersByPropertiesRouteHandler = async (
|
||||
model: printerModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: 'vendor',
|
||||
populate: ['vendor', 'host'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
@ -99,6 +100,7 @@ export const editPrinterRouteHandler = async (req, res) => {
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
host: req.body.host,
|
||||
active: req.body.active,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -128,6 +130,7 @@ export const newPrinterRouteHandler = async (req, res) => {
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
host: req.body.host,
|
||||
active: req.body.active,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: printerModel,
|
||||
|
||||
137
src/utils.js
137
src/utils.js
@ -3,6 +3,31 @@ import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
||||
import exifr from 'exifr';
|
||||
import { etcdServer } from './database/etcd.js';
|
||||
import { natsServer } from './database/nats.js';
|
||||
import log4js from 'log4js';
|
||||
import dotenv from 'dotenv';
|
||||
import crypto from 'crypto';
|
||||
import canonicalize from 'canonical-json';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Utils');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
import { customAlphabet } from 'nanoid';
|
||||
|
||||
const ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
|
||||
const generateId = () => {
|
||||
// 10 characters
|
||||
return customAlphabet(ALPHABET, 12);
|
||||
};
|
||||
|
||||
function buildWildcardRegexPattern(input) {
|
||||
// Escape all regex special chars except * (which we treat as a wildcard)
|
||||
const escaped = input.replace(/[.+?^${}()|[\]\\]/g, '\\$&');
|
||||
// Convert * to "match anything"
|
||||
const withWildcards = escaped.replace(/\*/g, '.*');
|
||||
// Anchor so that, without *, this is an exact match
|
||||
return `^${withWildcards}$`;
|
||||
}
|
||||
|
||||
function parseFilter(property, value) {
|
||||
if (typeof value === 'string') {
|
||||
@ -26,10 +51,12 @@ function parseFilter(property, value) {
|
||||
return { [property]: parseFloat(trimmed) };
|
||||
}
|
||||
|
||||
// Default to case-insensitive regex for non-numeric strings
|
||||
// Default to case-insensitive regex for non-numeric strings.
|
||||
// Supports * as a wildcard (e.g. "filament*" matches "filament stock").
|
||||
const pattern = buildWildcardRegexPattern(trimmed);
|
||||
return {
|
||||
[property]: {
|
||||
$regex: trimmed,
|
||||
$regex: pattern,
|
||||
$options: 'i',
|
||||
},
|
||||
};
|
||||
@ -396,6 +423,60 @@ async function distributeDelete(value, type) {
|
||||
await natsServer.publish(`${type}s.delete`, value);
|
||||
}
|
||||
|
||||
async function distributeChildUpdate(oldValue, newValue, id, model) {
|
||||
const oldPopulatedObjects = populateObjects(oldValue, model) || [];
|
||||
const oldPopulatedObjectIds = oldPopulatedObjects.map((populate) => populate._id.toString());
|
||||
const newPopulatedObjects = populateObjects(newValue, model) || [];
|
||||
const newPopulatedObjectIds = newPopulatedObjects.map((populate) => populate._id.toString());
|
||||
|
||||
for (const populated of oldPopulatedObjects) {
|
||||
if (!newPopulatedObjectIds.includes(populated._id.toString())) {
|
||||
logger.debug(
|
||||
`Distributing child update for ${populated.ref}s.${populated._id}.events.childUpdate`
|
||||
);
|
||||
await natsServer.publish(`${populated.ref}s.${populated._id}.events.childUpdate`, {
|
||||
type: 'childUpdate',
|
||||
data: { parentId: id, parentType: model.modelName },
|
||||
});
|
||||
}
|
||||
}
|
||||
for (const populated of newPopulatedObjects) {
|
||||
if (!oldPopulatedObjectIds.includes(populated._id.toString())) {
|
||||
logger.debug(
|
||||
`Distributing child update for ${populated.ref}s.${populated._id}.events.childUpdate`
|
||||
);
|
||||
await natsServer.publish(`${populated.ref}s.${populated._id}.events.childUpdate`, {
|
||||
type: 'childUpdate',
|
||||
data: { parentId: id, parentType: model.modelName },
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function distributeChildDelete(value, id, model) {
|
||||
const populatedObjects = populateObjects(value, model) || [];
|
||||
for (const populated of populatedObjects) {
|
||||
logger.debug(
|
||||
`Distributing child delete for ${populated.ref}s.${populated._id}.events.childDelete`
|
||||
);
|
||||
await natsServer.publish(`${populated.ref}s.${populated._id}.events.childDelete`, {
|
||||
type: 'childDelete',
|
||||
data: { parentId: id, parentType: model.modelName },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function distributeChildNew(value, id, model) {
|
||||
const populatedObjects = populateObjects(value, model) || [];
|
||||
for (const populated of populatedObjects) {
|
||||
logger.debug(`Distributing child new for ${populated.ref}s.${populated._id}.events.childNew`);
|
||||
await natsServer.publish(`${populated.ref}s.${populated._id}.events.childNew`, {
|
||||
type: 'childNew',
|
||||
data: { parentId: id, parentType: model.modelName },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function flatternObjectIds(object) {
|
||||
if (!object || typeof object !== 'object') {
|
||||
return object;
|
||||
@ -464,6 +545,8 @@ function getFilter(query, allowedFilters, parse = true) {
|
||||
let filter = {};
|
||||
for (const [key, value] of Object.entries(query)) {
|
||||
if (allowedFilters.includes(key)) {
|
||||
console.log('key', key);
|
||||
console.log('value', value);
|
||||
const parsedFilter = parse ? parseFilter(key, value) : { [key]: value };
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
@ -546,6 +629,51 @@ function getFieldsByRef(model, refName) {
|
||||
return fields;
|
||||
}
|
||||
|
||||
// Build a nested populate specification by walking the schema graph,
|
||||
// instead of recursing over already-populated documents.
|
||||
function buildDeepPopulateSpec(object, model, populated = new Set()) {
|
||||
// prevent infinite recursion across cyclic model relationships
|
||||
if (populated.has(model.modelName)) return [];
|
||||
populated.add(model.modelName);
|
||||
|
||||
const schema = model.schema;
|
||||
const populateSpec = [];
|
||||
|
||||
schema.eachPath((pathname, schemaType) => {
|
||||
const directRef = schemaType.options?.ref;
|
||||
const arrayRef = schemaType.caster?.options?.ref;
|
||||
const ref = directRef || arrayRef;
|
||||
if (!ref) return;
|
||||
|
||||
const refModel = model.db.model(ref);
|
||||
const childPopulate = buildDeepPopulateSpec(object, refModel, populated);
|
||||
|
||||
const id = object[pathname]?._id || object[pathname];
|
||||
|
||||
if (id == null || !id) return;
|
||||
|
||||
if (childPopulate.length > 0) {
|
||||
populateSpec.push({ path: pathname, populate: childPopulate, ref: ref, _id: id });
|
||||
} else {
|
||||
populateSpec.push({ path: pathname, ref: ref, _id: id });
|
||||
}
|
||||
});
|
||||
|
||||
return populateSpec;
|
||||
}
|
||||
|
||||
function populateObjects(object, model, populated = new Set()) {
|
||||
const populateSpec = buildDeepPopulateSpec(object, model, populated);
|
||||
|
||||
return populateSpec;
|
||||
}
|
||||
|
||||
function jsonToCacheKey(obj) {
|
||||
const normalized = canonicalize(obj);
|
||||
const hash = crypto.createHash('sha256').update(normalized).digest('hex');
|
||||
return hash;
|
||||
}
|
||||
|
||||
export {
|
||||
parseFilter,
|
||||
convertToCamelCase,
|
||||
@ -558,9 +686,14 @@ export {
|
||||
distributeUpdate,
|
||||
distributeNew,
|
||||
distributeDelete,
|
||||
distributeChildUpdate,
|
||||
distributeChildDelete,
|
||||
distributeChildNew,
|
||||
getFilter, // <-- add here
|
||||
convertPropertiesString,
|
||||
getFileMeta,
|
||||
modelHasRef,
|
||||
getFieldsByRef,
|
||||
jsonToCacheKey,
|
||||
generateId,
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user