Compare commits

...

10 Commits

79 changed files with 1774 additions and 475 deletions

99
config.json Normal file
View File

@ -0,0 +1,99 @@
{
"development": {
"server": {
"port": 8787,
"logLevel": "debug"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client"
},
"requiredRoles": []
},
"app": {
"urlClient": "http://localhost:3000",
"urlElectronClient": "http://localhost:3000",
"urlApi": "http://localhost:8787",
"devAuthClient": "http://localhost:3500"
},
"database": {
"mongo": {
"url": "mongodb://127.0.0.1:27017/farmcontrol",
"link": "127.0.0.1:27017"
},
"redis": {
"url": "",
"host": "localhost",
"port": 6379,
"password": "",
"cacheTtl": 30
},
"nats": {
"host": "localhost",
"port": 4222
}
},
"storage": {
"fileStorage": "./uploads",
"ceph": {
"accessKeyId": "minioadmin",
"secretAccessKey": "minioadmin123",
"endpoint": "http://127.0.0.1:9000",
"region": "us-east-1",
"filesBucket": "farmcontrol"
}
},
"otpExpiryMins": 0.5
},
"production": {
"server": {
"port": 8080,
"logLevel": "info"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client"
},
"requiredRoles": []
},
"app": {
"urlClient": "http://localhost:3000",
"urlElectronClient": "http://localhost:3000",
"urlApi": "http://localhost:8080",
"devAuthClient": "http://localhost:3500"
},
"database": {
"mongo": {
"url": "mongodb://localhost:27017/farmcontrol",
"link": "localhost:27017"
},
"redis": {
"url": "",
"host": "localhost",
"port": 6379,
"password": "",
"cacheTtl": 30
},
"nats": {
"host": "localhost",
"port": 4222
}
},
"storage": {
"fileStorage": "./uploads",
"ceph": {
"accessKeyId": "minioadmin",
"secretAccessKey": "minioadmin123",
"endpoint": "http://127.0.0.1:9000",
"region": "us-east-1",
"filesBucket": "farmcontrol"
}
}
}
}

View File

@ -6,7 +6,7 @@ const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function syncModelsWithWS() {
const sourceDir = path.resolve(__dirname, 'src/schemas');
const sourceDir = path.resolve(__dirname, 'src/database/schemas');
const targetDir = path.resolve(__dirname, '../farmcontrol-ws/src/database/schemas');
console.log(`Syncing schemas from ${sourceDir} to ${targetDir}...`);

View File

@ -13,7 +13,6 @@
"canonical-json": "^0.2.0",
"cors": "^2.8.5",
"dotenv": "^17.2.3",
"etcd3": "^1.1.2",
"exifr": "^7.1.3",
"express": "^5.1.0",
"express-session": "^1.18.2",

42
src/config.js Normal file
View File

@ -0,0 +1,42 @@
// config.js - Configuration handling
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// Configure paths relative to this file
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const CONFIG_PATH = path.resolve(__dirname, '../config.json');
// Determine environment
const NODE_ENV = process.env.NODE_ENV || 'development';
// Load config file
function loadConfig() {
try {
if (!fs.existsSync(CONFIG_PATH)) {
throw new Error(`Configuration file not found at ${CONFIG_PATH}`);
}
const configData = fs.readFileSync(CONFIG_PATH, 'utf8');
const config = JSON.parse(configData);
if (!config[NODE_ENV]) {
throw new Error(`Configuration for environment '${NODE_ENV}' not found in config.json`);
}
return config[NODE_ENV];
} catch (err) {
console.error('Error loading config:', err);
throw err;
}
}
// Get current environment
export function getEnvironment() {
return NODE_ENV;
}
// Export singleton config instance
const config = loadConfig();
export default config;

View File

@ -1,36 +0,0 @@
import mongoose from 'mongoose';
import bcrypt from 'bcrypt';
import { userModel } from '../schemas/management/user.schema.js';
import { dbConnect } from './mongo.js';
const ReseedAction = () => {
async function clear() {
dbConnect();
await userModel.deleteMany({});
console.log('DB cleared');
}
async function seedDB() {
await clear();
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash('secret', salt);
const user = {
_id: mongoose.Types.ObjectId(1),
name: 'Admin',
email: 'admin@jsonapi.com',
password: hashPassword,
createdAt: new Date(),
profile_image: '../../images/admin.jpg',
};
const admin = new userModel(user);
await admin.save();
console.log('DB seeded');
}
seedDB();
};
export default ReseedAction;

View File

@ -11,29 +11,27 @@ import {
} from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
import config from '../config.js';
const logger = log4js.getLogger('CephStorage');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Configure AWS SDK v3 for Ceph (S3-compatible)
const s3Config = {
credentials: {
accessKeyId: process.env.CEPH_ACCESS_KEY_ID,
secretAccessKey: process.env.CEPH_SECRET_ACCESS_KEY,
accessKeyId: config.storage.ceph.accessKeyId,
secretAccessKey: config.storage.ceph.secretAccessKey,
},
endpoint: process.env.CEPH_ENDPOINT, // e.g., 'http://ceph-gateway:7480'
endpoint: config.storage.ceph.endpoint, // e.g., 'http://ceph-gateway:7480'
forcePathStyle: true, // Required for Ceph (renamed from s3ForcePathStyle)
region: process.env.CEPH_REGION || 'us-east-1',
region: config.storage.ceph.region,
};
const s3Client = new S3Client(s3Config);
// Default bucket names for different file types
const BUCKETS = {
FILES: process.env.CEPH_FILES_BUCKET || 'farmcontrol',
FILES: config.storage.ceph.filesBucket,
};
/**
@ -41,6 +39,7 @@ const BUCKETS = {
*/
export const initializeBuckets = async () => {
try {
logger.info('Initializing Ceph buckets...');
for (const [type, bucketName] of Object.entries(BUCKETS)) {
try {
await s3Client.send(new HeadBucketCommand({ Bucket: bucketName }));
@ -54,6 +53,7 @@ export const initializeBuckets = async () => {
}
}
}
logger.info('Ceph buckets initialized successfully.');
} catch (error) {
logger.error('Error initializing buckets:', error);
throw error;
@ -80,7 +80,7 @@ export const uploadFile = async (bucket, key, body, contentType, metadata = {})
};
await s3Client.send(new PutObjectCommand(params));
const result = { Location: `${process.env.CEPH_ENDPOINT}/${bucket}/${key}` };
const result = { Location: `${config.storage.ceph.endpoint}/${bucket}/${key}` };
logger.debug(`File uploaded successfully: ${key} to bucket ${bucket}`);
return result;
} catch (error) {

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { fileModel } from '../schemas/management/file.schema.js';
import config from '../config.js';
import { fileModel } from './schemas/management/file.schema.js';
import _ from 'lodash';
import {
deleteAuditLog,
@ -7,7 +7,7 @@ import {
expandObjectIds,
modelHasRef,
getFieldsByRef,
jsonToCacheKey,
getQueryToCacheKey,
} from '../utils.js';
import log4js from 'log4js';
import {
@ -18,36 +18,31 @@ import {
distributeChildUpdate,
distributeChildDelete,
distributeChildNew,
distributeStats,
} from '../utils.js';
import { getAllModels } from '../services/misc/model.js';
import { redisServer } from './redis.js';
dotenv.config();
import { auditLogModel } from './schemas/management/auditlog.schema.js';
const logger = log4js.getLogger('Database');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
const cacheLogger = log4js.getLogger('DatabaseCache');
cacheLogger.level = process.env.LOG_LEVEL;
cacheLogger.level = config.server.logLevel;
const CACHE_TTL_SECONDS = parseInt(process.env.REDIS_CACHE_TTL || '30', 10);
const CACHE_TTL_SECONDS = parseInt(config.database.redis.cacheTtl || '30', 10);
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
if (!model || !id) return undefined;
const cacheKeyObject = {
model: model.modelName,
id: id.toString(),
};
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Retrieving object from cache:', cacheKeyObject);
cacheLogger.trace('Retrieving object from cache:', { model: model.modelName, id, populate });
try {
const cachedObject = await redisServer.getKey(cacheKey);
if (cachedObject == null) {
cacheLogger.trace('Cache miss:', cacheKeyObject);
cacheLogger.trace('Cache miss:', { model: model.modelName, id });
return undefined;
}
@ -66,48 +61,285 @@ export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
export const updateObjectCache = async ({ model, id, object, populate = [] }) => {
if (!model || !id || !object) return object;
const cacheKeyObject = {
model: model.modelName,
id: id.toString(),
};
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Updating object cache:', cacheKeyObject);
cacheLogger.trace('Updating object cache:', cacheKeyFilter);
try {
const cachedObject = (await redisServer.getKey(cacheKey)) || {};
const mergedObject = _.merge(cachedObject, object);
// Get all keys matching the filter pattern
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
// Merge the object with each cached object and update
const mergedObjects = [];
for (const key of matchingKeys) {
logger.trace('Updating object cache:', key);
const cachedObject = (await redisServer.getKey(key)) || {};
const mergedObject = _.merge(cachedObject, object);
await redisServer.setKey(key, mergedObject, CACHE_TTL_SECONDS);
mergedObjects.push(mergedObject);
}
const cacheObject = (await redisServer.getKey(cacheKey)) || {};
const mergedObject = _.merge(cacheObject, object);
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
cacheLogger.trace('Updated object cache:', cacheKeyObject);
cacheLogger.trace('Updated object cache:', {
filter: cacheKeyFilter,
keysUpdated: matchingKeys.length,
});
// Return the merged object
return mergedObject;
} catch (err) {
cacheLogger.error('Error updating object in Redis cache:', err);
// Fallback to returning the provided object if cache fails
return object;
}
return object;
};
export const deleteObjectCache = async ({ model, id }) => {
if (!model || !id) return;
const cacheKeyObject = {
model: model.modelName,
id: id.toString(),
populate: [],
};
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Deleting object cache:', cacheKeyObject);
cacheLogger.trace('Deleting object cache:', cacheKeyFilter);
try {
await redisServer.deleteKey(cacheKey);
cacheLogger.trace('Deleted object cache:', cacheKeyObject);
// Get all keys matching the filter pattern and delete them
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
for (const cacheKey of matchingKeys) {
await redisServer.deleteKey(cacheKey);
}
cacheLogger.trace('Deleted object cache:', {
filter: cacheKeyFilter,
keysDeleted: matchingKeys.length,
});
} catch (err) {
cacheLogger.error('Error deleting object from Redis cache:', err);
}
};
// Utility to run one or many rollup aggregations in a single query via $facet.
export const aggregateRollups = async ({ model, baseFilter = {}, rollupConfigs = [] }) => {
if (!rollupConfigs.length) {
return {};
}
const facetStage = rollupConfigs.reduce((facets, definition, index) => {
const key = definition.name || `rollup${index}`;
const matchStage = { $match: { ...baseFilter, ...(definition.filter || {}) } };
const groupStage = { $group: { _id: null } };
(definition.rollups || []).forEach((rollup) => {
switch (rollup.operation) {
case 'sum':
groupStage.$group[rollup.name] = { $sum: `$${rollup.property}` };
break;
case 'count':
groupStage.$group[rollup.name] = { $sum: 1 };
break;
case 'avg':
groupStage.$group[rollup.name] = { $avg: `$${rollup.property}` };
break;
default:
throw new Error(`Unsupported rollup operation: ${rollup.operation}`);
}
});
facets[key] = [matchStage, groupStage];
return facets;
}, {});
const [results] = await model.aggregate([{ $facet: facetStage }]);
return rollupConfigs.reduce((acc, definition, index) => {
const key = definition.name || `rollup${index}`;
const rawResult = results?.[key]?.[0] || {};
// Transform the result to nest rollup values under operation type
const transformedResult = {};
(definition.rollups || []).forEach((rollup) => {
const value = rawResult[rollup.name] || 0;
// If there's only one rollup and its name matches the key, flatten the structure
if (definition.rollups.length === 1 && rollup.name === key) {
transformedResult[rollup.operation] = value;
} else {
transformedResult[rollup.name] = { [rollup.operation]: value };
}
});
acc[key] = transformedResult;
return acc;
}, {});
};
// Reusable function to aggregate rollups over history using state reconstruction
export const aggregateRollupsHistory = async ({
model,
baseFilter = {},
rollupConfigs = [],
startDate,
endDate,
}) => {
if (!rollupConfigs.length) {
return [];
}
// Set default dates if not provided
const end = endDate ? new Date(endDate) : new Date();
const start = startDate ? new Date(startDate) : new Date(end.getTime() - 24 * 60 * 60 * 1000);
// Get model name for filtering audit logs
const parentType = model.modelName ? model.modelName : 'unknown';
// 1. Fetch all audit logs for this model type from start date to now
// Filter by parentType instead of fetching object IDs first
const auditLogs = await auditLogModel
.find({
parentType,
createdAt: { $gte: start },
})
.sort({ createdAt: -1 }) // Newest first
.lean();
// 2. Extract unique parent IDs from audit logs
const parentIds = [...new Set(auditLogs.map((log) => log.parent.toString()))];
if (parentIds.length === 0) {
return [];
}
// 3. Fetch current state of relevant objects that match baseFilter
// Note: This only includes objects that CURRENTLY match the baseFilter.
// Objects that matched in the past but don't match now are excluded.
const currentObjects = await model
.find({
_id: { $in: parentIds },
...baseFilter,
})
.lean();
const objectMap = new Map();
currentObjects.forEach((obj) => {
// Ensure _id is a string for map keys
objectMap.set(obj._id.toString(), expandObjectIds(obj));
});
if (objectMap.size === 0) {
return [];
}
// Helper to check if object matches filter
const matchesFilter = (obj, filter) => {
if (!filter || Object.keys(filter).length === 0) return true;
for (const [key, expectedValue] of Object.entries(filter)) {
const actualValue = _.get(obj, key);
// Handle simple equality
if (actualValue != expectedValue) {
return false;
}
}
return true;
};
// 3. Generate time buckets (1 minute intervals)
const buckets = [];
let currentTime = new Date(end);
// Round down to nearest minute
currentTime.setSeconds(0, 0);
while (currentTime >= start) {
buckets.push(new Date(currentTime));
currentTime = new Date(currentTime.getTime() - 60000); // -1 minute
}
// 4. Rewind state and snapshot
const results = [];
let logIndex = 0;
// Create a working copy of objects to mutate during rewind
// (deep clone to avoid issues if we need original later, though expandObjectIds creates new objs)
const workingObjects = new Map();
objectMap.forEach((val, key) => workingObjects.set(key, _.cloneDeep(val)));
// Iterate backwards through time
for (const bucketDate of buckets) {
// Apply all logs that happened AFTER this bucket time (between last bucket and this one)
// Since we iterate backwards, these are logs with createdAt > bucketDate
while (logIndex < auditLogs.length) {
const log = auditLogs[logIndex];
const logDate = new Date(log.createdAt);
if (logDate <= bucketDate) {
// This log happened at or before the current bucket time,
// so its effects are already present (or rather, will be handled in a future/earlier bucket).
// Stop processing logs for this step.
break;
}
// Revert this change
const objectId = log.parent.toString();
const object = workingObjects.get(objectId);
if (object) {
if (log.operation === 'new') {
// Object didn't exist before this creation event
workingObjects.delete(objectId);
} else if (log.changes && log.changes.old) {
// Apply old values to revert state
_.merge(object, log.changes.old);
}
}
logIndex++;
}
// Snapshot: Calculate rollups for current state of all objects
const bucketResult = {
date: bucketDate.toISOString(),
};
const activeObjects = Array.from(workingObjects.values());
rollupConfigs.forEach((config) => {
const configName = config.name;
// Filter objects for this config
// Note: We also check baseFilter here in case the object state reverted to something
// that no longer matches baseFilter (e.g. active: false)
const matchingObjects = activeObjects.filter(
(obj) => matchesFilter(obj, baseFilter) && matchesFilter(obj, config.filter)
);
// Calculate rollups
(config.rollups || []).forEach((rollup) => {
const rollupName = rollup.name;
let value = 0;
if (rollup.operation === 'count') {
value = matchingObjects.length;
} else if (rollup.operation === 'sum') {
value = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
} else if (rollup.operation === 'avg') {
const sum = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
value = matchingObjects.length ? sum / matchingObjects.length : 0;
}
// Nest the value under the operation type
bucketResult[rollupName] = { [rollup.operation]: value };
});
});
results.push(bucketResult);
}
// Reverse results to be chronological
return results.reverse();
};
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
export const listObjects = async ({
model,
@ -324,14 +556,13 @@ export const listObjectsByProperties = async ({
} else if (typeof pop === 'object' && pop.path) {
pipeline.push({
$lookup: {
from:
pop.options && pop.options.from ? pop.options.from : pop.path.toLowerCase() + 's',
from: pop.from ? pop.from : pop.path.toLowerCase(),
localField: pop.path,
foreignField: '_id',
as: pop.path,
},
});
if (!pop.justOne === false) {
if (pop?.multiple == false || pop?.multiple == undefined) {
// default to unwind unless justOne is explicitly false
pipeline.push({
$unwind: {
@ -363,6 +594,7 @@ export const listObjectsByProperties = async ({
// Run aggregation
const results = await model.aggregate(pipeline);
console.log('results', results);
return nestGroups(results, properties, filter);
} else {
// If no properties specified, just return all objects without grouping
@ -435,6 +667,22 @@ export const getObject = async ({ model, id, populate }) => {
}
};
export const getModelStats = async ({ model }) => {
if (!model.stats) {
logger.warn(`Model ${model.modelName} does not have a stats method.`);
return { error: 'Model does not have a stats method.', code: 500 };
}
return await model.stats();
};
export const getModelHistory = async ({ model, from, to }) => {
if (!model.history && !from && !to) {
logger.warn(`Model ${model.modelName} does not have a history method.`);
return { error: 'Model does not have a history method.', code: 500 };
}
return await model.history(from, to);
};
export const listObjectDependencies = async ({ model, id }) => {
try {
const dependencies = [];
@ -562,6 +810,17 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
populate,
});
if (model.recalculate) {
logger.debug(`Recalculating ${model.modelName}`);
await model.recalculate(updatedObject, user);
}
if (model.stats) {
logger.debug(`Getting stats for ${model.modelName}`);
const statsData = await model.stats();
await distributeStats(statsData, parentType);
}
return updatedObject;
} catch (error) {
logger.error('editObject error:', error);
@ -594,6 +853,17 @@ export const newObject = async ({ model, newData, user = null }, distributeChang
populate: [],
});
if (model.recalculate) {
logger.debug(`Recalculating ${model.modelName}`);
await model.recalculate(created, user);
}
if (model.stats) {
logger.debug(`Getting stats for ${model.modelName}`);
const statsData = await model.stats();
await distributeStats(statsData, parentType);
}
return created;
} catch (error) {
logger.error('newObject error:', error);
@ -625,6 +895,17 @@ export const deleteObject = async ({ model, id, user = null }, distributeChanges
// Invalidate cache for this object
await deleteObjectCache({ model, id });
if (model.recalculate) {
logger.debug(`Recalculating ${model.modelName}`);
await model.recalculate(deleted, user);
}
if (model.stats) {
logger.debug(`Getting stats for ${model.modelName}`);
const statsData = await model.stats();
await distributeStats(statsData, parentType);
}
return { deleted: true, object: deleted };
} catch (error) {
logger.error('deleteObject error:', error);
@ -686,8 +967,8 @@ export const flushFile = async ({ id, user }) => {
// Try to delete from Ceph storage if it exists
if (file.extension) {
try {
const { deleteFile } = await import('../services/storage/ceph.js');
const { BUCKETS } = await import('../services/storage/ceph.js');
const { deleteFile } = await import('./ceph.js');
const { BUCKETS } = await import('./ceph.js');
const cephKey = `files/${file._id}${file.extension}`;
await deleteFile(BUCKETS.FILES, cephKey);

View File

@ -1,19 +1,18 @@
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import config from '../config.js';
import log4js from 'log4js';
const logger = log4js.getLogger('MongoDB');
logger.level = process.env.LOG_LEVEL;
dotenv.config();
logger.level = config.server.logLevel;
// Set strictQuery to false to prepare for Mongoose 7
mongoose.set('strictQuery', false);
function dbConnect() {
mongoose.connection.once('open', () => logger.info('Database connected.'));
logger.info(`Connecting to MongoDB...`);
mongoose.connection.once('open', () => logger.info('Connected to MongoDB.'));
return mongoose.connect(
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
`mongodb://${config.database.mongo.link}/farmcontrol?retryWrites=true&w=majority`,
{}
);
}

View File

@ -1,15 +1,9 @@
import { connect } from '@nats-io/transport-node';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const NATS_HOST = process.env.NATS_HOST || 'localhost';
const NATS_PORT = process.env.NATS_PORT || 4222;
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
import config from '../config.js';
const logger = log4js.getLogger('Nats');
logger.level = LOG_LEVEL;
logger.level = config.server.logLevel;
class NatsServer {
constructor() {
@ -17,7 +11,7 @@ class NatsServer {
this.subscriptions = new Map(); // subject → { subscription, callbacks }
this.requestHandlers = new Map(); // subject → { handler, callbacks }
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
this.servers = [`nats://${NATS_HOST}:${NATS_PORT}`];
this.servers = [`nats://${config.database.nats.host}:${config.database.nats.port}`];
this.textEncoder = new TextEncoder();
this.textDecoder = new TextDecoder();
@ -43,7 +37,7 @@ class NatsServer {
if (this.client.isClosed()) {
throw new Error('NATS client connection failed');
}
logger.trace('NATS client connected successfully.');
logger.info('Connected to NATS.');
} catch (error) {
throw error;
}

View File

@ -1,25 +1,17 @@
import { createClient } from 'redis';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
const REDIS_URL = process.env.REDIS_URL;
const REDIS_HOST = process.env.REDIS_HOST || '127.0.0.1';
const REDIS_PORT = process.env.REDIS_PORT || 6379;
const REDIS_PASSWORD = process.env.REDIS_PASSWORD || undefined;
import config from '../config.js';
const logger = log4js.getLogger('Redis');
logger.level = LOG_LEVEL;
logger.level = config.server.logLevel;
class RedisServer {
constructor() {
const url = REDIS_URL || `redis://${REDIS_HOST}:${REDIS_PORT}`;
const url = config.database.redis.url || `redis://${config.database.redis.host}:${config.database.redis.port}`;
this.client = createClient({
url,
password: REDIS_PASSWORD,
password: config.database.redis.password || undefined,
});
this.client.on('error', (err) => {
@ -31,6 +23,7 @@ class RedisServer {
async connect() {
if (this.connected) return;
logger.info('Connecting to Redis...');
await this.client.connect();
this.connected = true;
logger.info('Connected to Redis');
@ -61,6 +54,21 @@ class RedisServer {
await this.connect();
await this.client.del(key);
}
async getKeysByPattern(pattern) {
await this.connect();
const keys = [];
let cursor = '0';
do {
const result = await this.client.scan(cursor, {
MATCH: pattern,
COUNT: 100,
});
cursor = result.cursor;
keys.push(...result.keys);
} while (cursor !== '0');
return keys;
}
}
const redisServer = new RedisServer();

View File

@ -1,42 +0,0 @@
import bcrypt from "bcrypt";
import mongoose from "mongoose";
import { userModel } from "../schemas/user.schema.js";
import { jobModel } from "../schemas/job.schema.js";
import { dbConnect } from "../mongo/index.js";
async function seedDB() {
dbConnect();
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash("secret", salt);
const user = {
_id: new mongoose.Types.ObjectId(1),
name: "Admin",
email: "admin@jsonapi.com",
password: hashPassword,
createdAt: new Date(),
profile_image: "../../images/admin.jpg",
};
const admin = new userModel(user);
await admin.save();
const job = {
_id: new mongoose.Types.ObjectId(1),
status : {
type: "Queued"
},
createdAt: new Date(),
updatedAt: new Date(),
started_at: new Date(),
};
const newJob = new jobModel(job);
await newJob.save();
console.log("DB seeded");
}
seedDB().then(() => {
mongoose.connection.close();
});

View File

@ -1,7 +1,7 @@
import express from 'express';
import bodyParser from 'body-parser';
import cors from 'cors';
import dotenv from 'dotenv';
import config from './config.js';
import { expressSession, keycloak } from './keycloak.js';
import { dbConnect } from './database/mongo.js';
import {
@ -40,24 +40,21 @@ import {
} from './routes/index.js';
import path from 'path';
import * as fs from 'fs';
import cron from 'node-cron';
import ReseedAction from './database/ReseedAction.js';
import log4js from 'log4js';
import { populateUserMiddleware } from './services/misc/auth.js';
import { natsServer } from './database/nats.js';
import { initializeBuckets } from './services/storage/ceph.js';
import { initializeBuckets } from './database/ceph.js';
import { getEnvironment } from './config.js';
dotenv.config();
const PORT = process.env.PORT || 8787;
const PORT = config.server.port;
const app = express();
const logger = log4js.getLogger('App');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
app.use(log4js.connectLogger(logger, { level: 'trace' }));
const whitelist = [process.env.APP_URL_CLIENT, process.env.APP_URL_ELECTRON_CLIENT];
const whitelist = [config.app.urlClient, config.app.urlElectronClient];
const corsOptions = {
origin: function (origin, callback) {
if (!origin || whitelist.indexOf(origin) !== -1) {
@ -71,18 +68,20 @@ const corsOptions = {
// Initialize application
async function initializeApp() {
logger.info('Initializing application...');
logger.info(`Environment: ${getEnvironment()}`);
logger.info(`Port: ${PORT}`);
logger.info(`Log Level: ${config.server.logLevel}`);
try {
// Connect to database
dbConnect();
await dbConnect();
// Connect to NATS
natsServer.connect();
logger.info('Connected to NATS');
await natsServer.connect();
// Initialize Ceph buckets
try {
await initializeBuckets();
logger.info('Ceph buckets initialized successfully');
} catch (err) {
logger.error('Failed to initialize Ceph buckets:', err);
// Don't throw error - allow app to start without Ceph for development
@ -142,11 +141,5 @@ app.use('/taxrates', taxRateRoutes);
app.use('/taxrecords', taxRecordRoutes);
app.use('/notes', noteRoutes);
if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
ReseedAction();
});
}
// Start the application
initializeApp();

View File

@ -1,17 +1,18 @@
import Keycloak from 'keycloak-connect';
import session from 'express-session';
import dotenv from 'dotenv';
import config, { getEnvironment } from './config.js';
import axios from 'axios';
import jwt from 'jsonwebtoken';
import dotenv from 'dotenv';
import log4js from 'log4js';
import NodeCache from 'node-cache';
import { userModel } from './schemas/management/user.schema.js';
import { userModel } from './database/schemas/management/user.schema.js';
import { getObject } from './database/database.js';
import { hostModel } from './schemas/management/host.schema.js';
import { hostModel } from './database/schemas/management/host.schema.js';
const logger = log4js.getLogger('Keycloak');
logger.level = config.server.logLevel || 'info';
dotenv.config();
const logger = log4js.getLogger('Keycloak');
logger.level = process.env.LOG_LEVEL || 'info';
// Initialize NodeCache with 5-minute TTL
const userCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
@ -56,10 +57,10 @@ const lookupUser = async (preferredUsername) => {
// Initialize Keycloak
const keycloakConfig = {
realm: process.env.KEYCLOAK_REALM || 'farm-control',
'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
realm: config.auth.keycloak.realm,
'auth-server-url': config.auth.keycloak.url,
'ssl-required': getEnvironment() === 'production' ? 'external' : 'none',
resource: config.auth.keycloak.clientId,
'confidential-port': 0,
'bearer-only': true,
'public-client': false,
@ -95,10 +96,10 @@ const isAuthenticated = async (req, res, next) => {
try {
// Verify token with Keycloak introspection endpoint
const response = await axios.post(
`${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token/introspect`,
`${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token/introspect`,
new URLSearchParams({
token: token,
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_id: config.auth.keycloak.clientId,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
}),
{

View File

@ -15,6 +15,8 @@ import materialRoutes from './management/materials.js';
import partStockRoutes from './inventory/partstocks.js';
import filamentStockRoutes from './inventory/filamentstocks.js';
import purchaseOrderRoutes from './inventory/purchaseorders.js';
import orderItemRoutes from './inventory/orderitems.js';
import shipmentRoutes from './inventory/shipments.js';
import stockEventRoutes from './inventory/stockevents.js';
import stockAuditRoutes from './inventory/stockaudits.js';
import auditLogRoutes from './management/auditlogs.js';
@ -47,6 +49,8 @@ export {
partStockRoutes,
filamentStockRoutes,
purchaseOrderRoutes,
orderItemRoutes,
shipmentRoutes,
stockEventRoutes,
stockAuditRoutes,
auditLogRoutes,

View File

@ -10,6 +10,8 @@ import {
newFilamentStockRouteHandler,
deleteFilamentStockRouteHandler,
listFilamentStocksByPropertiesRouteHandler,
getFilamentStockStatsRouteHandler,
getFilamentStockHistoryRouteHandler,
} from '../../services/inventory/filamentstocks.js';
// list of filament stocks
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newFilamentStockRouteHandler(req, res);
});
// get filament stock stats
router.get('/stats', isAuthenticated, (req, res) => {
getFilamentStockStatsRouteHandler(req, res);
});
// get filament stock history
router.get('/history', isAuthenticated, (req, res) => {
getFilamentStockHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getFilamentStockRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newOrderItemRouteHandler,
deleteOrderItemRouteHandler,
listOrderItemsByPropertiesRouteHandler,
getOrderItemStatsRouteHandler,
getOrderItemHistoryRouteHandler,
} from '../../services/inventory/orderitems.js';
// list of order items
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newOrderItemRouteHandler(req, res);
});
// get order item stats
router.get('/stats', isAuthenticated, (req, res) => {
getOrderItemStatsRouteHandler(req, res);
});
// get order item history
router.get('/history', isAuthenticated, (req, res) => {
getOrderItemHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getOrderItemRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newPartStockRouteHandler,
deletePartStockRouteHandler,
listPartStocksByPropertiesRouteHandler,
getPartStockStatsRouteHandler,
getPartStockHistoryRouteHandler,
} from '../../services/inventory/partstocks.js';
// list of part stocks
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newPartStockRouteHandler(req, res);
});
// get part stock stats
router.get('/stats', isAuthenticated, (req, res) => {
getPartStockStatsRouteHandler(req, res);
});
// get part stock history
router.get('/history', isAuthenticated, (req, res) => {
getPartStockHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getPartStockRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newPurchaseOrderRouteHandler,
deletePurchaseOrderRouteHandler,
listPurchaseOrdersByPropertiesRouteHandler,
getPurchaseOrderStatsRouteHandler,
getPurchaseOrderHistoryRouteHandler,
} from '../../services/inventory/purchaseorders.js';
// list of purchase orders
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newPurchaseOrderRouteHandler(req, res);
});
// get purchase order stats
router.get('/stats', isAuthenticated, (req, res) => {
getPurchaseOrderStatsRouteHandler(req, res);
});
// get purchase order history
router.get('/history', isAuthenticated, (req, res) => {
getPurchaseOrderHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getPurchaseOrderRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newShipmentRouteHandler,
deleteShipmentRouteHandler,
listShipmentsByPropertiesRouteHandler,
getShipmentStatsRouteHandler,
getShipmentHistoryRouteHandler,
} from '../../services/inventory/shipments.js';
// list of shipments
@ -49,6 +51,16 @@ router.post('/', isAuthenticated, (req, res) => {
newShipmentRouteHandler(req, res);
});
// get shipment stats
router.get('/stats', isAuthenticated, (req, res) => {
getShipmentStatsRouteHandler(req, res);
});
// get shipment history
router.get('/history', isAuthenticated, (req, res) => {
getShipmentHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getShipmentRouteHandler(req, res);
});

View File

@ -9,6 +9,8 @@ import {
newStockAuditRouteHandler,
updateStockAuditRouteHandler,
deleteStockAuditRouteHandler,
getStockAuditStatsRouteHandler,
getStockAuditHistoryRouteHandler,
} from '../../services/inventory/stockaudits.js';
// List stock audits
@ -36,6 +38,16 @@ router.post('/', isAuthenticated, (req, res) => {
newStockAuditRouteHandler(req, res);
});
// get stock audit stats
router.get('/stats', isAuthenticated, (req, res) => {
getStockAuditStatsRouteHandler(req, res);
});
// get stock audit history
router.get('/history', isAuthenticated, (req, res) => {
getStockAuditHistoryRouteHandler(req, res);
});
// Get specific stock audit
router.get('/:id', isAuthenticated, (req, res) => {
getStockAuditRouteHandler(req, res);

View File

@ -10,6 +10,8 @@ import {
editStockEventRouteHandler,
deleteStockEventRouteHandler,
listStockEventsByPropertiesRouteHandler,
getStockEventStatsRouteHandler,
getStockEventHistoryRouteHandler,
} from '../../services/inventory/stockevents.js';
// list of stock events
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newStockEventRouteHandler(req, res);
});
// get stock event stats
router.get('/stats', isAuthenticated, (req, res) => {
getStockEventStatsRouteHandler(req, res);
});
// get stock event history
router.get('/history', isAuthenticated, (req, res) => {
getStockEventHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getStockEventRouteHandler(req, res);
});

View File

@ -3,6 +3,8 @@ import { isAuthenticated } from '../../keycloak.js';
import {
listAuditLogsRouteHandler,
getAuditLogRouteHandler,
getAuditLogStatsRouteHandler,
getAuditLogHistoryRouteHandler,
} from '../../services/management/auditlogs.js';
import { parseFilter } from '../../utils.js';
@ -27,6 +29,16 @@ router.get('/', isAuthenticated, async (req, res) => {
listAuditLogsRouteHandler(req, res, page, limit, filter, sort, order);
});
// get audit log stats
router.get('/stats', isAuthenticated, (req, res) => {
getAuditLogStatsRouteHandler(req, res);
});
// get audit log history
router.get('/history', isAuthenticated, (req, res) => {
getAuditLogHistoryRouteHandler(req, res);
});
/**
* @route GET /api/auditlogs/:id
* @desc Get a single audit log by ID

View File

@ -10,6 +10,8 @@ import {
newCourierRouteHandler,
deleteCourierRouteHandler,
listCouriersByPropertiesRouteHandler,
getCourierStatsRouteHandler,
getCourierHistoryRouteHandler,
} from '../../services/management/courier.js';
// list of couriers
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newCourierRouteHandler(req, res);
});
// get courier stats
router.get('/stats', isAuthenticated, (req, res) => {
getCourierStatsRouteHandler(req, res);
});
// get courier history
router.get('/history', isAuthenticated, (req, res) => {
getCourierHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getCourierRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newCourierServiceRouteHandler,
deleteCourierServiceRouteHandler,
listCourierServicesByPropertiesRouteHandler,
getCourierServiceStatsRouteHandler,
getCourierServiceHistoryRouteHandler,
} from '../../services/management/courierservice.js';
// list of courier services
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newCourierServiceRouteHandler(req, res);
});
// get courier service stats
router.get('/stats', isAuthenticated, (req, res) => {
getCourierServiceStatsRouteHandler(req, res);
});
// get courierservice history
router.get('/history', isAuthenticated, (req, res) => {
getCourierServiceHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getCourierServiceRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newDocumentJobRouteHandler,
deleteDocumentJobRouteHandler,
listDocumentJobsByPropertiesRouteHandler,
getDocumentJobStatsRouteHandler,
getDocumentJobHistoryRouteHandler,
} from '../../services/management/documentjobs.js';
// list of document jobs
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newDocumentJobRouteHandler(req, res);
});
// get document job stats
router.get('/stats', isAuthenticated, (req, res) => {
getDocumentJobStatsRouteHandler(req, res);
});
// get documentjobs history
router.get('/history', isAuthenticated, (req, res) => {
getDocumentJobHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentJobRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newDocumentPrinterRouteHandler,
deleteDocumentPrinterRouteHandler,
listDocumentPrintersByPropertiesRouteHandler,
getDocumentPrinterStatsRouteHandler,
getDocumentPrinterHistoryRouteHandler,
} from '../../services/management/documentprinters.js';
// list of document printers
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newDocumentPrinterRouteHandler(req, res);
});
// get document printer stats
router.get('/stats', isAuthenticated, (req, res) => {
getDocumentPrinterStatsRouteHandler(req, res);
});
// get documentprinters history
router.get('/history', isAuthenticated, (req, res) => {
getDocumentPrinterHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentPrinterRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newDocumentSizeRouteHandler,
deleteDocumentSizeRouteHandler,
listDocumentSizesByPropertiesRouteHandler,
getDocumentSizeStatsRouteHandler,
getDocumentSizeHistoryRouteHandler,
} from '../../services/management/documentsizes.js';
// list of document sizes
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newDocumentSizeRouteHandler(req, res);
});
// get document size stats
router.get('/stats', isAuthenticated, (req, res) => {
getDocumentSizeStatsRouteHandler(req, res);
});
// get documentsizes history
router.get('/history', isAuthenticated, (req, res) => {
getDocumentSizeHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentSizeRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newDocumentTemplateRouteHandler,
deleteDocumentTemplateRouteHandler,
listDocumentTemplatesByPropertiesRouteHandler,
getDocumentTemplateStatsRouteHandler,
getDocumentTemplateHistoryRouteHandler,
} from '../../services/management/documenttemplates.js';
// list of document templates
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newDocumentTemplateRouteHandler(req, res);
});
// get document template stats
router.get('/stats', isAuthenticated, (req, res) => {
getDocumentTemplateStatsRouteHandler(req, res);
});
// get document template history
router.get('/history', isAuthenticated, (req, res) => {
getDocumentTemplateHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getDocumentTemplateRouteHandler(req, res);
});

View File

@ -9,6 +9,8 @@ import {
getFilamentRouteHandler,
editFilamentRouteHandler,
newFilamentRouteHandler,
getFilamentStatsRouteHandler,
getFilamentHistoryRouteHandler,
} from '../../services/management/filaments.js';
// list of filaments
@ -50,6 +52,16 @@ router.post('/', isAuthenticated, (req, res) => {
newFilamentRouteHandler(req, res);
});
// get filament stats
router.get('/stats', isAuthenticated, (req, res) => {
getFilamentStatsRouteHandler(req, res);
});
// get filaments history
router.get('/history', isAuthenticated, (req, res) => {
getFilamentHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getFilamentRouteHandler(req, res);
});

View File

@ -12,6 +12,8 @@ import {
flushFileRouteHandler,
deleteFileRouteHandler,
listFilesByPropertiesRouteHandler,
getFileStatsRouteHandler,
getFileHistoryRouteHandler,
} from '../../services/management/files.js';
// list of files
@ -33,6 +35,16 @@ router.post('/', isAuthenticated, (req, res) => {
newFileRouteHandler(req, res);
});
// get file stats
router.get('/stats', isAuthenticated, (req, res) => {
getFileStatsRouteHandler(req, res);
});
// get file history
router.get('/history', isAuthenticated, (req, res) => {
getFileHistoryRouteHandler(req, res);
});
router.delete('/:id/flush', isAuthenticated, (req, res) => {
flushFileRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newHostRouteHandler,
deleteHostRouteHandler,
listHostsByPropertiesRouteHandler,
getHostStatsRouteHandler,
getHostHistoryRouteHandler,
} from '../../services/management/hosts.js';
// list of hosts
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newHostRouteHandler(req, res);
});
// get host stats
router.get('/stats', isAuthenticated, (req, res) => {
getHostStatsRouteHandler(req, res);
});
// get hosts history
router.get('/history', isAuthenticated, (req, res) => {
getHostHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getHostRouteHandler(req, res);
});

View File

@ -8,6 +8,8 @@ import {
getMaterialRouteHandler,
editMaterialRouteHandler,
newMaterialRouteHandler,
getMaterialStatsRouteHandler,
getMaterialHistoryRouteHandler,
} from '../../services/management/materials.js';
// list of materials
@ -34,6 +36,16 @@ router.post('/', isAuthenticated, (req, res) => {
newMaterialRouteHandler(req, res);
});
// get material stats
router.get('/stats', isAuthenticated, (req, res) => {
getMaterialStatsRouteHandler(req, res);
});
// get materials history
router.get('/history', isAuthenticated, (req, res) => {
getMaterialHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getMaterialRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newNoteTypeRouteHandler,
deleteNoteTypeRouteHandler,
listNoteTypesByPropertiesRouteHandler,
getNoteTypeStatsRouteHandler,
getNoteTypeHistoryRouteHandler,
} from '../../services/management/notetypes.js';
// list of note types
@ -35,6 +37,16 @@ router.post('/', isAuthenticated, (req, res) => {
newNoteTypeRouteHandler(req, res);
});
// get note type stats
router.get('/stats', isAuthenticated, (req, res) => {
getNoteTypeStatsRouteHandler(req, res);
});
// get notetypes history
router.get('/history', isAuthenticated, (req, res) => {
getNoteTypeHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getNoteTypeRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newPartRouteHandler,
deletePartRouteHandler,
listPartsByPropertiesRouteHandler,
getPartStatsRouteHandler,
getPartHistoryRouteHandler,
} from '../../services/management/parts.js';
// list of parts
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newPartRouteHandler(req, res);
});
// get part stats
router.get('/stats', isAuthenticated, (req, res) => {
getPartStatsRouteHandler(req, res);
});
// get parts history
router.get('/history', isAuthenticated, (req, res) => {
getPartHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getPartRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newProductRouteHandler,
deleteProductRouteHandler,
listProductsByPropertiesRouteHandler,
getProductStatsRouteHandler,
getProductHistoryRouteHandler,
} from '../../services/management/products.js';
// list of products
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newProductRouteHandler(req, res);
});
// get product stats
router.get('/stats', isAuthenticated, (req, res) => {
getProductStatsRouteHandler(req, res);
});
// get products history
router.get('/history', isAuthenticated, (req, res) => {
getProductHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getProductRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newTaxRateRouteHandler,
deleteTaxRateRouteHandler,
listTaxRatesByPropertiesRouteHandler,
getTaxRateStatsRouteHandler,
getTaxRateHistoryRouteHandler,
} from '../../services/management/taxrates.js';
// list of tax rates
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newTaxRateRouteHandler(req, res);
});
// get tax rate stats
router.get('/stats', isAuthenticated, (req, res) => {
getTaxRateStatsRouteHandler(req, res);
});
// get tax rate history
router.get('/history', isAuthenticated, (req, res) => {
getTaxRateHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getTaxRateRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newTaxRecordRouteHandler,
deleteTaxRecordRouteHandler,
listTaxRecordsByPropertiesRouteHandler,
getTaxRecordStatsRouteHandler,
getTaxRecordHistoryRouteHandler,
} from '../../services/management/taxrecords.js';
// list of tax records
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newTaxRecordRouteHandler(req, res);
});
// get tax record stats
router.get('/stats', isAuthenticated, (req, res) => {
getTaxRecordStatsRouteHandler(req, res);
});
// get tax record history
router.get('/history', isAuthenticated, (req, res) => {
getTaxRecordHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getTaxRecordRouteHandler(req, res);
});

View File

@ -8,6 +8,8 @@ import {
listUsersByPropertiesRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
getUserStatsRouteHandler,
getUserHistoryRouteHandler,
} from '../../services/management/users.js';
// list of document templates
@ -29,6 +31,16 @@ router.get('/properties', isAuthenticated, (req, res) => {
listUsersByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
});
// get user stats
router.get('/stats', isAuthenticated, (req, res) => {
getUserStatsRouteHandler(req, res);
});
// get user history
router.get('/history', isAuthenticated, (req, res) => {
getUserHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});

View File

@ -10,6 +10,8 @@ import {
newVendorRouteHandler,
deleteVendorRouteHandler,
listVendorsByPropertiesRouteHandler,
getVendorStatsRouteHandler,
getVendorHistoryRouteHandler,
} from '../../services/management/vendors.js';
// list of vendors
@ -31,6 +33,16 @@ router.post('/', isAuthenticated, (req, res) => {
newVendorRouteHandler(req, res);
});
// get vendor stats
router.get('/stats', isAuthenticated, (req, res) => {
getVendorStatsRouteHandler(req, res);
});
// get vendors history
router.get('/history', isAuthenticated, (req, res) => {
getVendorHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getVendorRouteHandler(req, res);
});

View File

@ -6,8 +6,11 @@ import {
editNoteRouteHandler,
newNoteRouteHandler,
deleteNoteRouteHandler,
listNotesByPropertiesRouteHandler,
getNoteStatsRouteHandler,
getNoteHistoryRouteHandler,
} from '../../services/misc/notes.js';
import { getFilter } from '../../utils.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
const router = express.Router();
@ -31,6 +34,16 @@ router.post('/', isAuthenticated, (req, res) => {
newNoteRouteHandler(req, res);
});
// get note stats
router.get('/stats', isAuthenticated, (req, res) => {
getNoteStatsRouteHandler(req, res);
});
// get note history
router.get('/history', isAuthenticated, (req, res) => {
getNoteHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getNoteRouteHandler(req, res);
});

View File

@ -9,6 +9,7 @@ import {
newGCodeFileRouteHandler,
listGCodeFilesByPropertiesRouteHandler,
getGCodeFileContentRouteHandler,
getGCodeFileStatsRouteHandler,
} from '../../services/production/gcodefiles.js';
import { convertPropertiesString, getFilter } from '../../utils.js';
@ -32,6 +33,11 @@ router.post('/', isAuthenticated, (req, res) => {
newGCodeFileRouteHandler(req, res);
});
// get gcodeFile stats
router.get('/stats', isAuthenticated, (req, res) => {
getGCodeFileStatsRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getGCodeFileRouteHandler(req, res);
});

View File

@ -9,6 +9,7 @@ import {
newJobRouteHandler,
deleteJobRouteHandler,
getJobStatsRouteHandler,
getJobHistoryRouteHandler,
} from '../../services/production/jobs.js';
import { convertPropertiesString, getFilter } from '../../utils.js';
@ -31,6 +32,16 @@ router.post('/', isAuthenticated, (req, res) => {
newJobRouteHandler(req, res);
});
// get job stats
router.get('/stats', isAuthenticated, (req, res) => {
getJobStatsRouteHandler(req, res);
});
// get job history
router.get('/history', isAuthenticated, (req, res) => {
getJobHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getJobRouteHandler(req, res);
});
@ -39,9 +50,4 @@ router.delete('/:id', isAuthenticated, async (req, res) => {
deleteJobRouteHandler(req, res);
});
// get printer stats
router.get('/stats', isAuthenticated, (req, res) => {
getJobStatsRouteHandler(req, res);
});
export default router;

View File

@ -9,6 +9,7 @@ import {
newPrinterRouteHandler,
getPrinterStatsRouteHandler,
listPrintersByPropertiesRouteHandler,
getPrinterHistoryRouteHandler,
} from '../../services/production/printers.js';
import { convertPropertiesString, getFilter } from '../../utils.js';
@ -32,6 +33,11 @@ router.post('/', isAuthenticated, (req, res) => {
newPrinterRouteHandler(req, res);
});
// get printer history
router.get('/history', isAuthenticated, (req, res) => {
getPrinterHistoryRouteHandler(req, res);
});
// get printer stats
router.get('/stats', isAuthenticated, (req, res) => {
getPrinterStatsRouteHandler(req, res);

View File

@ -6,6 +6,8 @@ import {
listSubJobsRouteHandler,
listSubJobsByPropertiesRouteHandler,
getSubJobRouteHandler,
getSubJobStatsRouteHandler,
getSubJobHistoryRouteHandler,
} from '../../services/production/subjobs.js';
import { getFilter, convertPropertiesString } from '../../utils.js';
@ -24,6 +26,16 @@ router.get('/properties', isAuthenticated, (req, res) => {
listSubJobsByPropertiesRouteHandler(req, res, properties, filter);
});
// get sub job stats
router.get('/stats', isAuthenticated, (req, res) => {
getSubJobStatsRouteHandler(req, res);
});
// get sub job history
router.get('/history', isAuthenticated, (req, res) => {
getSubJobHistoryRouteHandler(req, res);
});
router.get('/:id', isAuthenticated, (req, res) => {
getSubJobRouteHandler(req, res);
});

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
import config from '../../config.js';
import { filamentStockModel } from '../../database/schemas/inventory/filamentstock.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Filament Stocks');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listFilamentStocksRouteHandler = async (
req,
@ -157,3 +157,25 @@ export const deleteFilamentStockRouteHandler = async (req, res) => {
res.send(result);
};
export const getFilamentStockStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: filamentStockModel });
if (result?.error) {
logger.error('Error fetching filament stock stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Filament stock stats:', result);
res.send(result);
};
export const getFilamentStockHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: filamentStockModel, from, to });
if (result?.error) {
logger.error('Error fetching filament stock history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Filament stock history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { orderItemModel } from '../../schemas/inventory/orderitem.schema.js';
import config from '../../config.js';
import { orderItemModel } from '../../database/schemas/inventory/orderitem.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Order Items');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listOrderItemsRouteHandler = async (
req,
@ -45,11 +45,11 @@ export const listOrderItemsRouteHandler = async (
},
{
path: 'item',
populate: { path: 'costTaxRate' },
populate: { path: 'costTaxRate', strictPopulate: false },
},
{
path: 'item',
populate: { path: 'priceTaxRate' },
populate: { path: 'priceTaxRate', strictPopulate: false },
},
],
});
@ -104,11 +104,13 @@ export const getOrderItemRouteHandler = async (req, res) => {
},
{
path: 'item',
populate: { path: 'costTaxRate' },
populate: { path: 'costTaxRate', strictPopulate: false },
strictPopulate: false,
},
{
path: 'item',
populate: { path: 'priceTaxRate' },
populate: { path: 'priceTaxRate', strictPopulate: false },
strictPopulate: false,
},
],
});
@ -128,9 +130,10 @@ export const editOrderItemRouteHandler = async (req, res) => {
const updateData = {
updatedAt: new Date(),
purchaseOrder: req.body.purchaseOrder,
itemType: req.body.itemType,
item: req.body.item,
orderType: req.body.orderType,
order: req.body.order,
syncAmount: req.body.syncAmount,
itemAmount: req.body.itemAmount,
quantity: req.body.quantity,
@ -161,6 +164,7 @@ export const newOrderItemRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
purchaseOrder: req.body.purchaseOrder,
state: { type: 'draft' },
itemType: req.body.itemType,
item: req.body.item,
orderType: req.body.orderType,
@ -207,3 +211,25 @@ export const deleteOrderItemRouteHandler = async (req, res) => {
res.send(result);
};
export const getOrderItemStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: orderItemModel });
if (result?.error) {
logger.error('Error fetching order item stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Order item stats:', result);
res.send(result);
};
export const getOrderItemHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: orderItemModel, from, to });
if (result?.error) {
logger.error('Error fetching order item history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Order item history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
import config from '../../config.js';
import { partStockModel } from '../../database/schemas/inventory/partstock.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Part Stocks');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPartStocksRouteHandler = async (
req,
@ -157,3 +157,25 @@ export const deletePartStockRouteHandler = async (req, res) => {
res.send(result);
};
export const getPartStockStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: partStockModel });
if (result?.error) {
logger.error('Error fetching part stock stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Part stock stats:', result);
res.send(result);
};
export const getPartStockHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: partStockModel, from, to });
if (result?.error) {
logger.error('Error fetching part stock history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Part stock history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { purchaseOrderModel } from '../../schemas/inventory/purchaseorder.schema.js';
import config from '../../config.js';
import { purchaseOrderModel } from '../../database/schemas/inventory/purchaseorder.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,12 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Purchase Orders');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPurchaseOrdersRouteHandler = async (
req,
@ -78,7 +79,7 @@ export const getPurchaseOrderRouteHandler = async (req, res) => {
const result = await getObject({
model: purchaseOrderModel,
id,
populate: ['vendor', 'items.item', 'items.taxRate'],
populate: ['vendor'],
});
if (result?.error) {
logger.warn(`Purchase Order not found with supplied id.`);
@ -97,8 +98,6 @@ export const editPurchaseOrderRouteHandler = async (req, res) => {
const updateData = {
updatedAt: new Date(),
vendor: req.body.vendor,
items: req.body.items,
cost: req.body.cost,
};
// Create audit log before updating
const result = await editObject({
@ -123,8 +122,6 @@ export const newPurchaseOrderRouteHandler = async (req, res) => {
const newData = {
updatedAt: new Date(),
vendor: req.body.vendor,
items: req.body.items,
cost: req.body.cost,
};
const result = await newObject({
model: purchaseOrderModel,
@ -161,3 +158,25 @@ export const deletePurchaseOrderRouteHandler = async (req, res) => {
res.send(result);
};
export const getPurchaseOrderStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: purchaseOrderModel });
if (result?.error) {
logger.error('Error fetching purchase order stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Purchase order stats:', result);
res.send(result);
};
export const getPurchaseOrderHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: purchaseOrderModel, from, to });
if (result?.error) {
logger.error('Error fetching purchase order history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Purchase order history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { shipmentModel } from '../../schemas/inventory/shipment.schema.js';
import config from '../../config.js';
import { shipmentModel } from '../../database/schemas/inventory/shipment.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Shipments');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listShipmentsRouteHandler = async (
req,
@ -175,3 +175,25 @@ export const deleteShipmentRouteHandler = async (req, res) => {
res.send(result);
};
export const getShipmentStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: shipmentModel });
if (result?.error) {
logger.error('Error fetching shipment stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Shipment stats:', result);
res.send(result);
};
export const getShipmentHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: shipmentModel, from, to });
if (result?.error) {
logger.error('Error fetching shipment history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Shipment history:', result);
res.send(result);
};

View File

@ -1,13 +1,12 @@
import dotenv from 'dotenv';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import config from '../../config.js';
import { stockAuditModel } from '../../database/schemas/inventory/stockaudit.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getAuditLogs } from '../../utils.js';
dotenv.config();
import { getModelStats, getModelHistory } from '../../database/database.js';
const logger = log4js.getLogger('Stock Audits');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listStockAuditsRouteHandler = async (
req,
@ -168,3 +167,25 @@ export const deleteStockAuditRouteHandler = async (req, res) => {
res.status(500).send({ error: error.message });
}
};
export const getStockAuditStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: stockAuditModel });
if (result?.error) {
logger.error('Error fetching stock audit stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Stock audit stats:', result);
res.send(result);
};
export const getStockAuditHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: stockAuditModel, from, to });
if (result?.error) {
logger.error('Error fetching stock audit history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Stock audit history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import config from '../../config.js';
import { stockEventModel } from '../../database/schemas/inventory/stockevent.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Stock Events');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listStockEventsRouteHandler = async (
req,
@ -165,3 +165,25 @@ export const deleteStockEventRouteHandler = async (req, res) => {
res.send(result);
};
export const getStockEventStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: stockEventModel });
if (result?.error) {
logger.error('Error fetching stock event stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Stock event stats:', result);
res.send(result);
};
export const getStockEventHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: stockEventModel, from, to });
if (result?.error) {
logger.error('Error fetching stock event history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Stock event history:', result);
res.send(result);
};

View File

@ -1,11 +1,11 @@
import dotenv from 'dotenv';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import config from '../../config.js';
import { auditLogModel } from '../../database/schemas/management/auditlog.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getModelStats, getModelHistory } from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('AuditLogs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listAuditLogsRouteHandler = async (
req,
@ -88,3 +88,25 @@ export const getAuditLogRouteHandler = async (req, res) => {
res.status(500).send({ error: error.message });
}
};
export const getAuditLogStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: auditLogModel });
if (result?.error) {
logger.error('Error fetching audit log stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Audit log stats:', result);
res.send(result);
};
export const getAuditLogHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: auditLogModel, from, to });
if (result?.error) {
logger.error('Error fetching audit log history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Audit log history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { courierModel } from '../../schemas/management/courier.schema.js';
import config from '../../config.js';
import { courierModel } from '../../database/schemas/management/courier.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Couriers');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listCouriersRouteHandler = async (
req,
@ -162,3 +162,25 @@ export const deleteCourierRouteHandler = async (req, res) => {
res.send(result);
};
export const getCourierStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: courierModel });
if (result?.error) {
logger.error('Error fetching courier stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Courier stats:', result);
res.send(result);
};
export const getCourierHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: courierModel, from, to });
if (result?.error) {
logger.error('Error fetching courier history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Courier history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { courierServiceModel } from '../../schemas/management/courierservice.schema.js';
import config from '../../config.js';
import { courierServiceModel } from '../../database/schemas/management/courierservice.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('CourierServices');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listCourierServicesRouteHandler = async (
req,
@ -165,3 +165,25 @@ export const deleteCourierServiceRouteHandler = async (req, res) => {
res.send(result);
};
export const getCourierServiceStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: courierServiceModel });
if (result?.error) {
logger.error('Error fetching courier service stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Courier service stats:', result);
res.send(result);
};
export const getCourierServiceHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: courierServiceModel, from, to });
if (result?.error) {
logger.error('Error fetching courier service history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Courier service history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { documentJobModel } from '../../schemas/management/documentjob.schema.js';
import config from '../../config.js';
import { documentJobModel } from '../../database/schemas/management/documentjob.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentJobsRouteHandler = async (
req,
@ -158,3 +158,25 @@ export const deleteDocumentJobRouteHandler = async (req, res) => {
res.send(result);
};
export const getDocumentJobStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: documentJobModel });
if (result?.error) {
logger.error('Error fetching document job stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document job stats:', result);
res.send(result);
};
export const getDocumentJobHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: documentJobModel, from, to });
if (result?.error) {
logger.error('Error fetching document job history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document job history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { documentPrinterModel } from '../../schemas/management/documentprinter.schema.js';
import config from '../../config.js';
import { documentPrinterModel } from '../../database/schemas/management/documentprinter.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Templates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentPrintersRouteHandler = async (
req,
@ -167,3 +167,25 @@ export const deleteDocumentPrinterRouteHandler = async (req, res) => {
res.send(result);
};
export const getDocumentPrinterStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: documentPrinterModel });
if (result?.error) {
logger.error('Error fetching document printer stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document printer stats:', result);
res.send(result);
};
export const getDocumentPrinterHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: documentPrinterModel, from, to });
if (result?.error) {
logger.error('Error fetching document printer history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document printer history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
import config from '../../config.js';
import { documentSizeModel } from '../../database/schemas/management/documentsize.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Sizes');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentSizesRouteHandler = async (
req,
@ -158,3 +158,25 @@ export const deleteDocumentSizeRouteHandler = async (req, res) => {
res.send(result);
};
export const getDocumentSizeStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: documentSizeModel });
if (result?.error) {
logger.error('Error fetching document size stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document size stats:', result);
res.send(result);
};
export const getDocumentSizeHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: documentSizeModel, from, to });
if (result?.error) {
logger.error('Error fetching document size history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document size history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
import config from '../../config.js';
import { documentTemplateModel } from '../../database/schemas/management/documenttemplate.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Templates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentTemplatesRouteHandler = async (
req,
@ -184,3 +184,25 @@ export const deleteDocumentTemplateRouteHandler = async (req, res) => {
res.send(result);
};
export const getDocumentTemplateStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: documentTemplateModel });
if (result?.error) {
logger.error('Error fetching document template stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document template stats:', result);
res.send(result);
};
export const getDocumentTemplateHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: documentTemplateModel, from, to });
if (result?.error) {
logger.error('Error fetching document template history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Document template history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv, { populate } from 'dotenv';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import config from '../../config.js';
import { filamentModel } from '../../database/schemas/management/filament.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -8,11 +8,12 @@ import {
listObjectsByProperties,
editObject,
newObject,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Filaments');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listFilamentsRouteHandler = async (
req,
@ -34,7 +35,7 @@ export const listFilamentsRouteHandler = async (
search,
sort,
order,
populate: ['vendor'],
populate: ['vendor', 'costTaxRate'],
});
if (result?.error) {
@ -75,7 +76,7 @@ export const getFilamentRouteHandler = async (req, res) => {
const result = await getObject({
model: filamentModel,
id,
populate: 'vendor',
populate: ['vendor', 'costTaxRate'],
});
if (result?.error) {
logger.warn(`Filament not found with supplied id.`);
@ -101,6 +102,8 @@ export const editFilamentRouteHandler = async (req, res) => {
vendor: req.body.vendor,
type: req.body.type,
cost: req.body.cost,
costTaxRate: req.body.costTaxRate,
costWithTax: req.body.costWithTax,
diameter: req.body.diameter,
density: req.body.density,
emptySpoolWeight: req.body.emptySpoolWeight,
@ -135,6 +138,8 @@ export const newFilamentRouteHandler = async (req, res) => {
vendor: req.body.vendor,
type: req.body.type,
cost: req.body.cost,
costTaxRate: req.body.costTaxRate,
costWithTax: req.body.costWithTax,
diameter: req.body.diameter,
density: req.body.density,
emptySpoolWeight: req.body.emptySpoolWeight,
@ -154,3 +159,25 @@ export const newFilamentRouteHandler = async (req, res) => {
res.send(result);
};
export const getFilamentStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: filamentModel });
if (result?.error) {
logger.error('Error fetching filament stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Filament stats:', result);
res.send(result);
};
export const getFilamentHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: filamentModel, from, to });
if (result?.error) {
logger.error('Error fetching filament history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Filament history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { fileModel } from '../../schemas/management/file.schema.js';
import config from '../../config.js';
import { fileModel } from '../../database/schemas/management/file.schema.js';
import log4js from 'log4js';
import multer from 'multer';
import path from 'path';
@ -13,18 +13,19 @@ import {
newObject,
listObjectsByProperties,
flushFile,
getModelStats,
getModelHistory,
} from '../../database/database.js';
import {
uploadFile,
downloadFile,
deleteFile as deleteCephFile,
BUCKETS,
} from '../storage/ceph.js';
} from '../../database/ceph.js';
import { getFileMeta } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Files');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Set storage engine to memory for Ceph upload
const fileStorage = multer.memoryStorage();
@ -347,10 +348,7 @@ export const getFileContentRouteHandler = async (req, res) => {
}
// Fallback to local file system for backward compatibility
const filePath = path.join(
process.env.FILE_STORAGE || './uploads',
file.fileName || file.name
);
const filePath = path.join(config.storage.fileStorage, file.fileName || file.name);
// Read the file
fs.readFile(filePath, (err, data) => {
@ -421,3 +419,25 @@ export const parseFileHandler = async (req, res) => {
res.status(500).send({ error: error.message });
}
};
export const getFileStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: fileModel });
if (result?.error) {
logger.error('Error fetching file stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('File stats:', result);
res.send(result);
};
export const getFileHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: fileModel, from, to });
if (result?.error) {
logger.error('Error fetching file history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('File history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { hostModel } from '../../schemas/management/host.schema.js';
import config from '../../config.js';
import { hostModel } from '../../database/schemas/management/host.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Hosts');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listHostsRouteHandler = async (
req,
@ -159,3 +159,25 @@ export const deleteHostRouteHandler = async (req, res) => {
res.send(result);
};
export const getHostStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: hostModel });
if (result?.error) {
logger.error('Error fetching host stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Host stats:', result);
res.send(result);
};
export const getHostHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: hostModel, from, to });
if (result?.error) {
logger.error('Error fetching host history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Host history:', result);
res.send(result);
};

View File

@ -1,11 +1,10 @@
import dotenv from 'dotenv';
import { materialModel } from '../../schemas/management/material.schema.js';
import config from '../../config.js';
import { materialModel } from '../../database/schemas/management/material.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
dotenv.config();
import { getModelStats, getModelHistory } from '../../database/database.js';
const logger = log4js.getLogger('Materials');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listMaterialsRouteHandler = async (
req,
@ -128,3 +127,25 @@ export const newMaterialRouteHandler = async (req, res) => {
res.status(500).send({ error: updateError.message });
}
};
export const getMaterialStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: materialModel });
if (result?.error) {
logger.error('Error fetching material stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Material stats:', result);
res.send(result);
};
export const getMaterialHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: materialModel, from, to });
if (result?.error) {
logger.error('Error fetching material history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Material history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import config from '../../config.js';
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Note Types');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listNoteTypesRouteHandler = async (
req,
@ -158,3 +158,25 @@ export const deleteNoteTypeRouteHandler = async (req, res) => {
res.send(result);
};
export const getNoteTypeStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: noteTypeModel });
if (result?.error) {
logger.error('Error fetching note type stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Note type stats:', result);
res.send(result);
};
export const getNoteTypeHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: noteTypeModel, from, to });
if (result?.error) {
logger.error('Error fetching note type history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Note type history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { partModel } from '../../schemas/management/part.schema.js';
import config from '../../config.js';
import { partModel } from '../../database/schemas/management/part.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Parts');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPartsRouteHandler = async (
req,
@ -53,7 +53,20 @@ export const listPartsByPropertiesRouteHandler = async (req, res, properties = '
model: partModel,
properties,
filter,
populate: ['vendor', 'priceTaxRate', 'costTaxRate'],
populate: [
{
path: 'vendor',
from: 'vendors',
},
{
path: 'priceTaxRate',
from: 'taxrates',
},
{
path: 'costTaxRate',
from: 'taxrates',
},
],
});
if (result?.error) {
@ -171,3 +184,25 @@ export const deletePartRouteHandler = async (req, res) => {
res.send(result);
};
export const getPartStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: partModel });
if (result?.error) {
logger.error('Error fetching part stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Part stats:', result);
res.send(result);
};
export const getPartHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: partModel, from, to });
if (result?.error) {
logger.error('Error fetching part history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Part history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { productModel } from '../../schemas/management/product.schema.js';
import config from '../../config.js';
import { productModel } from '../../database/schemas/management/product.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Products');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listProductsRouteHandler = async (
req,
@ -170,3 +170,25 @@ export const deleteProductRouteHandler = async (req, res) => {
res.send(result);
};
export const getProductStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: productModel });
if (result?.error) {
logger.error('Error fetching product stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Product stats:', result);
res.send(result);
};
export const getProductHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: productModel, from, to });
if (result?.error) {
logger.error('Error fetching product history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Product history:', result);
res.send(result);
};

View File

@ -1,26 +1,25 @@
import dotenv from 'dotenv';
import { jobModel } from '../../schemas/production/job.schema.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import config from '../../config.js';
import { jobModel } from '../../database/schemas/production/job.schema.js';
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { printerModel } from '../../schemas/production/printer.schema.js';
import { filamentModel } from '../../schemas/management/filament.schema.js';
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
import { partModel } from '../../schemas/management/part.schema.js';
import { productModel } from '../../schemas/management/product.schema.js';
import { vendorModel } from '../../schemas/management/vendor.schema.js';
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
import { userModel } from '../../schemas/management/user.schema.js';
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
import { noteModel } from '../../schemas/misc/note.schema.js';
import { printerModel } from '../../database/schemas/production/printer.schema.js';
import { filamentModel } from '../../database/schemas/management/filament.schema.js';
import { gcodeFileModel } from '../../database/schemas/production/gcodefile.schema.js';
import { partModel } from '../../database/schemas/management/part.schema.js';
import { productModel } from '../../database/schemas/management/product.schema.js';
import { vendorModel } from '../../database/schemas/management/vendor.schema.js';
import { filamentStockModel } from '../../database/schemas/inventory/filamentstock.schema.js';
import { stockEventModel } from '../../database/schemas/inventory/stockevent.schema.js';
import { stockAuditModel } from '../../database/schemas/inventory/stockaudit.schema.js';
import { partStockModel } from '../../database/schemas/inventory/partstock.schema.js';
import { auditLogModel } from '../../database/schemas/management/auditlog.schema.js';
import { userModel } from '../../database/schemas/management/user.schema.js';
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
import { noteModel } from '../../database/schemas/misc/note.schema.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Map prefixes to models and id fields
const PREFIX_MODEL_MAP = {

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { taxRateModel } from '../../schemas/management/taxrates.schema.js';
import config from '../../config.js';
import { taxRateModel } from '../../database/schemas/management/taxrates.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('TaxRates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listTaxRatesRouteHandler = async (
req,
@ -166,3 +166,25 @@ export const deleteTaxRateRouteHandler = async (req, res) => {
res.send(result);
};
export const getTaxRateStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: taxRateModel });
if (result?.error) {
logger.error('Error fetching tax rate stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Tax rate stats:', result);
res.send(result);
};
export const getTaxRateHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: taxRateModel, from, to });
if (result?.error) {
logger.error('Error fetching tax rate history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Tax rate history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { taxRecordModel } from '../../schemas/management/taxrecord.schema.js';
import config from '../../config.js';
import { taxRecordModel } from '../../database/schemas/management/taxrecord.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('TaxRecords');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listTaxRecordsRouteHandler = async (
req,
@ -162,3 +162,25 @@ export const deleteTaxRecordRouteHandler = async (req, res) => {
res.send(result);
};
export const getTaxRecordStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: taxRecordModel });
if (result?.error) {
logger.error('Error fetching tax record stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Tax record stats:', result);
res.send(result);
};
export const getTaxRecordHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: taxRecordModel, from, to });
if (result?.error) {
logger.error('Error fetching tax record history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Tax record history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { userModel } from '../../schemas/management/user.schema.js';
import config from '../../config.js';
import { userModel } from '../../database/schemas/management/user.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -7,12 +7,12 @@ import {
listObjectsByProperties,
getObject,
editObject,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Users');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listUsersRouteHandler = async (
req,
@ -117,3 +117,25 @@ export const editUserRouteHandler = async (req, res) => {
res.send(result);
};
export const getUserStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: userModel });
if (result?.error) {
logger.error('Error fetching user stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('User stats:', result);
res.send(result);
};
export const getUserHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: userModel, from, to });
if (result?.error) {
logger.error('Error fetching user history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('User history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { vendorModel } from '../../schemas/management/vendor.schema.js';
import config from '../../config.js';
import { vendorModel } from '../../database/schemas/management/vendor.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import {
@ -9,11 +9,11 @@ import {
editObject,
newObject,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Vendors');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listVendorsRouteHandler = async (
req,
@ -166,3 +166,25 @@ export const deleteVendorRouteHandler = async (req, res) => {
res.send(result);
};
export const getVendorStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: vendorModel });
if (result?.error) {
logger.error('Error fetching vendor stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Vendor stats:', result);
res.send(result);
};
export const getVendorHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: vendorModel, from, to });
if (result?.error) {
logger.error('Error fetching vendor history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Vendor history:', result);
res.send(result);
};

View File

@ -1,16 +1,15 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { keycloak } from '../../keycloak.js';
import log4js from 'log4js';
import axios from 'axios';
import { userModel } from '../../schemas/management/user.schema.js';
import { userModel } from '../../database/schemas/management/user.schema.js';
import { readFileSync } from 'fs';
import { resolve } from 'path';
import NodeCache from 'node-cache';
import jwt from 'jsonwebtoken';
dotenv.config();
const logger = log4js.getLogger('Auth');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Initialize NodeCache with 5-minute TTL for token-based user lookup
const tokenUserCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
@ -32,16 +31,16 @@ const lookupUserByToken = async (token) => {
// Check cache first
const cachedUser = tokenUserCache.get(token);
if (cachedUser) {
logger.debug(`User found in token cache for token: ${token.substring(0, 20)}...`);
logger.trace(`User found in token cache for token: ${token.substring(0, 20)}...`);
return cachedUser;
}
// If not in cache, decode token and lookup user
logger.debug(`User not in token cache, decoding token: ${token.substring(0, 20)}...`);
logger.trace(`User not in token cache, decoding token: ${token.substring(0, 20)}...`);
const decodedToken = jwt.decode(token);
if (!decodedToken || !decodedToken.preferred_username) {
logger.warn('Invalid token or missing preferred_username');
logger.trace('Invalid token or missing preferred_username');
return null;
}
@ -51,7 +50,7 @@ const lookupUserByToken = async (token) => {
if (user) {
// Store in cache using token as key
tokenUserCache.set(token, user);
logger.debug(`User stored in token cache for token: ${token.substring(0, 20)}...`);
logger.trace(`User stored in token cache for token: ${token.substring(0, 20)}...`);
return user;
}
@ -84,27 +83,27 @@ export const loginRouteHandler = (req, res, redirectType = 'web') => {
const redirectUrl = req.query.redirect_uri || '/production/overview';
// Store the original URL to redirect after login
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
const authUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/auth`;
const callBackState = `/auth/${redirectType}/callback`;
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
const callbackUrl = `${config.app.urlApi}${callBackState}`;
const state = encodeURIComponent(redirectUrl);
logger.warn(req.query.redirect_uri);
res.redirect(
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
`${authUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
);
};
// Function to fetch user from Keycloak and store in database and session
const fetchAndStoreUser = async (req, token) => {
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
const userInfoUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/userinfo`;
try {
const response = await axios.post(
userInfoUrl,
new URLSearchParams({
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_id: config.auth.keycloak.clientId,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
}),
{
@ -154,14 +153,14 @@ export const loginTokenRouteHandler = async (req, res, redirectType = 'web') =>
// Otherwise, start the request and store the promise
const tokenPromise = (async () => {
const callBackState = `/auth/${redirectType}/callback`;
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const callbackUrl = `${config.app.urlApi}${callBackState}`;
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
const response = await axios.post(
tokenUrl,
new URLSearchParams({
grant_type: 'authorization_code',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_id: config.auth.keycloak.clientId,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
code: code,
redirect_uri: callbackUrl,
@ -211,16 +210,16 @@ export const loginCallbackRouteHandler = async (req, res, redirectType = 'web')
var appUrl;
switch (redirectType) {
case 'web':
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
appUrl = config.app.urlClient;
break;
case 'app-scheme':
appUrl = 'farmcontrol://app';
break;
case 'app-localhost':
appUrl = process.env.APP_DEV_AUTH_CLIENT || 'http://localhost:3500';
appUrl = config.app.devAuthClient;
break;
default:
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
appUrl = config.app.urlClient;
break;
}
const redirectUriRaw = `${appUrl}${state}`;
@ -325,12 +324,12 @@ export const logoutRouteHandler = (req, res) => {
}
// Construct the Keycloak logout URL with the redirect URI
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
const logoutUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(`${config.app.urlClient}${redirectUrl}`);
// Redirect to Keycloak logout with the redirect URI
res.redirect(
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
`${logoutUrl}?client_id=${config.auth.keycloak.clientId}&post_logout_redirect_uri=${encodedRedirectUri}`
);
});
};
@ -365,21 +364,21 @@ export const getUserInfoHandler = (req, res) => {
// Register route - Since we're using Keycloak, registration should be handled there
// This endpoint will redirect to Keycloak's registration page
export const registerRouteHandler = (req, res) => {
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
const registrationUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
res.redirect(
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
`${registrationUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
);
};
// Forgot password handler - redirect to Keycloak's reset password page
export const forgotPasswordRouteHandler = (req, res) => {
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
const resetUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
res.redirect(
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
`${resetUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
);
};
@ -394,14 +393,14 @@ export const refreshTokenRouteHandler = (req, res) => {
}
const refreshToken = req.session['keycloak-token'].refresh_token;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: 'refresh_token',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_id: config.auth.keycloak.clientId,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
refresh_token: refreshToken,
}).toString(),

View File

@ -1,4 +1,4 @@
import { models } from '../../schemas/models.js';
import { models } from '../../database/schemas/models.js';
/**
* Get all models from the PREFIX_MODEL_MAP

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { noteModel } from '../../schemas/misc/note.schema.js';
import config from '../../config.js';
import { noteModel } from '../../database/schemas/misc/note.schema.js';
import log4js from 'log4js';
import {
deleteObject,
@ -9,13 +9,13 @@ import {
listObjectsByProperties,
newObject,
recursivelyDeleteChildObjects,
getModelStats,
getModelHistory,
} from '../../database/database.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Notes');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listNotesRouteHandler = async (
req,
@ -73,7 +73,7 @@ export const getNoteRouteHandler = async (req, res) => {
const result = await getObject({
model: noteModel,
id,
populate: ['noteType', 'user'],
populate: ['noteType', 'user', 'parent'],
});
if (result?.error) {
logger.warn(`Note not found with supplied id.`);
@ -161,3 +161,25 @@ export const deleteNoteRouteHandler = async (req, res) => {
status: 'ok',
});
};
export const getNoteStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: noteModel });
if (result?.error) {
logger.error('Error fetching note stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Note stats:', result);
res.send(result);
};
export const getNoteHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: noteModel, from, to });
if (result?.error) {
logger.error('Error fetching note history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Note history:', result);
res.send(result);
};

View File

@ -1,11 +1,10 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getAllModels, getModelByPrefix } from './model.js';
dotenv.config();
const logger = log4js.getLogger('Spotlight');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Helper function to build search filter from query parameters
const buildSearchFilter = (params) => {

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
import config from '../../config.js';
import { gcodeFileModel } from '../../database/schemas/production/gcodefile.schema.js';
import log4js from 'log4js';
import {
deleteObject,
@ -12,10 +12,8 @@ import {
import { getFileContentRouteHandler } from '../management/files.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('GCodeFiles');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listGCodeFilesRouteHandler = async (
req,

View File

@ -1,6 +1,6 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import mongoose from 'mongoose';
import { jobModel } from '../../schemas/production/job.schema.js';
import { jobModel } from '../../database/schemas/production/job.schema.js';
import log4js from 'log4js';
import {
deleteObject,
@ -8,12 +8,12 @@ import {
listObjects,
listObjectsByProperties,
newObject,
getModelStats,
getModelHistory,
} from '../../database/database.js';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
dotenv.config();
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listJobsRouteHandler = async (
req,
@ -161,26 +161,24 @@ export const deleteJobRouteHandler = async (req, res) => {
};
export const getJobStatsRouteHandler = async (req, res) => {
try {
const stats = await jobModel.aggregate([
{
$group: {
_id: '$state.type',
count: { $sum: 1 },
},
},
]);
// Transform the results into a more readable format
const formattedStats = stats.reduce((acc, curr) => {
acc[curr._id] = curr.count;
return acc;
}, {});
logger.trace('Print job stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
logger.error('Error fetching print job stats:', error);
res.status(500).send({ error: error.message });
console.log('Getting job stats');
const result = await getModelStats({ model: jobModel });
if (result?.error) {
logger.error('Error fetching job stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Job stats:', result);
res.send(result);
};
export const getJobHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: jobModel, from, to });
if (result?.error) {
logger.error('Error fetching job history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Job history:', result);
res.send(result);
};

View File

@ -1,5 +1,5 @@
import dotenv from 'dotenv';
import { printerModel } from '../../schemas/production/printer.schema.js';
import config from '../../config.js';
import { printerModel } from '../../database/schemas/production/printer.schema.js';
import log4js from 'log4js';
import {
deleteObject,
@ -8,13 +8,13 @@ import {
listObjects,
listObjectsByProperties,
newObject,
getModelStats,
getModelHistory,
} from '../../database/database.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Printers');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPrintersRouteHandler = async (
req,
@ -169,26 +169,24 @@ export const deletePrinterRouteHandler = async (req, res) => {
};
export const getPrinterStatsRouteHandler = async (req, res) => {
try {
const stats = await printerModel.aggregate([
{
$group: {
_id: '$state.type',
count: { $sum: 1 },
},
},
]);
// Transform the results into a more readable format
const formattedStats = stats.reduce((acc, curr) => {
acc[curr._id] = curr.count;
return acc;
}, {});
logger.trace('Printer stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
logger.error('Error fetching printer stats:', error);
res.status(500).send({ error: error.message });
const result = await getModelStats({ model: printerModel });
console.log(result);
if (!result) {
logger.error('Error fetching printer stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Printer stats:', result);
res.send(result);
};
export const getPrinterHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: printerModel, from, to });
if (result?.error) {
logger.error('Error fetching printer history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Printer history:', result);
res.send(result);
};

View File

@ -1,11 +1,15 @@
import dotenv from 'dotenv';
import { subJobModel } from '../../schemas/production/subjob.schema.js';
import config from '../../config.js';
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
import log4js from 'log4js';
import { getObject, listObjects, listObjectsByProperties } from '../../database/database.js';
dotenv.config();
import {
getObject,
listObjects,
listObjectsByProperties,
getModelStats,
getModelHistory,
} from '../../database/database.js';
const logger = log4js.getLogger('Sub Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listSubJobsRouteHandler = async (
req,
@ -27,7 +31,7 @@ export const listSubJobsRouteHandler = async (
search,
sort,
order,
populate: ['printer'],
populate: ['printer', 'job'],
});
if (result?.error) {
@ -69,6 +73,7 @@ export const getSubJobRouteHandler = async (req, res) => {
const result = await getObject({
model: subJobModel,
id,
populate: ['printer'],
});
if (result?.error) {
logger.warn(`Sub job not found with supplied id.`);
@ -77,3 +82,25 @@ export const getSubJobRouteHandler = async (req, res) => {
logger.debug(`Retreived sub job with ID: ${id}`);
res.send(result);
};
export const getSubJobStatsRouteHandler = async (req, res) => {
const result = await getModelStats({ model: subJobModel });
if (result?.error) {
logger.error('Error fetching sub job stats:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Sub job stats:', result);
res.send(result);
};
export const getSubJobHistoryRouteHandler = async (req, res) => {
const from = req.query.from;
const to = req.query.to;
const result = await getModelHistory({ model: subJobModel, from, to });
if (result?.error) {
logger.error('Error fetching sub job history:', result.error);
return res.status(result.code).send(result);
}
logger.trace('Sub job history:', result);
res.send(result);
};

View File

@ -1,24 +1,14 @@
import { ObjectId } from 'mongodb';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
import { auditLogModel } from './database/schemas/management/auditlog.schema.js';
import exifr from 'exifr';
import { etcdServer } from './database/etcd.js';
import { natsServer } from './database/nats.js';
import log4js from 'log4js';
import dotenv from 'dotenv';
import config from './config.js';
import crypto from 'crypto';
import canonicalize from 'canonical-json';
dotenv.config();
const logger = log4js.getLogger('Utils');
logger.level = process.env.LOG_LEVEL;
import { customAlphabet } from 'nanoid';
const ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
const generateId = () => {
// 10 characters
return customAlphabet(ALPHABET, 12);
};
logger.level = config.server.logLevel;
function buildWildcardRegexPattern(input) {
// Escape all regex special chars except * (which we treat as a wildcard)
@ -434,6 +424,10 @@ async function distributeUpdate(value, id, type) {
await natsServer.publish(`${type}s.${id}.object`, value);
}
async function distributeStats(value, type) {
await natsServer.publish(`${type}s.stats`, value);
}
async function distributeNew(value, type) {
await natsServer.publish(`${type}s.new`, value);
}
@ -693,6 +687,21 @@ function jsonToCacheKey(obj) {
return hash;
}
export function getQueryToCacheKey({ model, id, populate }) {
const populateKey = [];
if (populate) {
const populateArray = Array.isArray(populate) ? populate : [populate];
for (const pop of populateArray) {
if (typeof pop === 'string') {
populateKey.push(pop);
} else if (typeof pop === 'object' && pop.path) {
populateKey.push(pop.path);
}
}
}
return `${model}:${id?.toString()}-${populateKey.join(',')}`;
}
export {
parseFilter,
convertToCamelCase,
@ -703,6 +712,7 @@ export {
flatternObjectIds,
expandObjectIds,
distributeUpdate,
distributeStats,
distributeNew,
distributeDelete,
distributeChildUpdate,
@ -714,5 +724,4 @@ export {
modelHasRef,
getFieldsByRef,
jsonToCacheKey,
generateId,
};