Compare commits
No commits in common. "b24c9e1b3e324e463998cec2281f529a9ec55538" and "dc87278ca634fe394004959b60989d8df6016d7e" have entirely different histories.
b24c9e1b3e
...
dc87278ca6
99
config.json
99
config.json
@ -1,99 +0,0 @@
|
|||||||
{
|
|
||||||
"development": {
|
|
||||||
"server": {
|
|
||||||
"port": 8787,
|
|
||||||
"logLevel": "debug"
|
|
||||||
},
|
|
||||||
"auth": {
|
|
||||||
"enabled": true,
|
|
||||||
"keycloak": {
|
|
||||||
"url": "https://auth.tombutcher.work",
|
|
||||||
"realm": "master",
|
|
||||||
"clientId": "farmcontrol-client"
|
|
||||||
},
|
|
||||||
"requiredRoles": []
|
|
||||||
},
|
|
||||||
"app": {
|
|
||||||
"urlClient": "http://localhost:3000",
|
|
||||||
"urlElectronClient": "http://localhost:3000",
|
|
||||||
"urlApi": "http://localhost:8787",
|
|
||||||
"devAuthClient": "http://localhost:3500"
|
|
||||||
},
|
|
||||||
"database": {
|
|
||||||
"mongo": {
|
|
||||||
"url": "mongodb://127.0.0.1:27017/farmcontrol",
|
|
||||||
"link": "127.0.0.1:27017"
|
|
||||||
},
|
|
||||||
"redis": {
|
|
||||||
"url": "",
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 6379,
|
|
||||||
"password": "",
|
|
||||||
"cacheTtl": 30
|
|
||||||
},
|
|
||||||
"nats": {
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 4222
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"storage": {
|
|
||||||
"fileStorage": "./uploads",
|
|
||||||
"ceph": {
|
|
||||||
"accessKeyId": "minioadmin",
|
|
||||||
"secretAccessKey": "minioadmin123",
|
|
||||||
"endpoint": "http://127.0.0.1:9000",
|
|
||||||
"region": "us-east-1",
|
|
||||||
"filesBucket": "farmcontrol"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"otpExpiryMins": 0.5
|
|
||||||
},
|
|
||||||
"production": {
|
|
||||||
"server": {
|
|
||||||
"port": 8080,
|
|
||||||
"logLevel": "info"
|
|
||||||
},
|
|
||||||
"auth": {
|
|
||||||
"enabled": true,
|
|
||||||
"keycloak": {
|
|
||||||
"url": "https://auth.tombutcher.work",
|
|
||||||
"realm": "master",
|
|
||||||
"clientId": "farmcontrol-client"
|
|
||||||
},
|
|
||||||
"requiredRoles": []
|
|
||||||
},
|
|
||||||
"app": {
|
|
||||||
"urlClient": "http://localhost:3000",
|
|
||||||
"urlElectronClient": "http://localhost:3000",
|
|
||||||
"urlApi": "http://localhost:8080",
|
|
||||||
"devAuthClient": "http://localhost:3500"
|
|
||||||
},
|
|
||||||
"database": {
|
|
||||||
"mongo": {
|
|
||||||
"url": "mongodb://localhost:27017/farmcontrol",
|
|
||||||
"link": "localhost:27017"
|
|
||||||
},
|
|
||||||
"redis": {
|
|
||||||
"url": "",
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 6379,
|
|
||||||
"password": "",
|
|
||||||
"cacheTtl": 30
|
|
||||||
},
|
|
||||||
"nats": {
|
|
||||||
"host": "localhost",
|
|
||||||
"port": 4222
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"storage": {
|
|
||||||
"fileStorage": "./uploads",
|
|
||||||
"ceph": {
|
|
||||||
"accessKeyId": "minioadmin",
|
|
||||||
"secretAccessKey": "minioadmin123",
|
|
||||||
"endpoint": "http://127.0.0.1:9000",
|
|
||||||
"region": "us-east-1",
|
|
||||||
"filesBucket": "farmcontrol"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
2
fcdev.js
2
fcdev.js
@ -6,7 +6,7 @@ const __filename = fileURLToPath(import.meta.url);
|
|||||||
const __dirname = path.dirname(__filename);
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
async function syncModelsWithWS() {
|
async function syncModelsWithWS() {
|
||||||
const sourceDir = path.resolve(__dirname, 'src/database/schemas');
|
const sourceDir = path.resolve(__dirname, 'src/schemas');
|
||||||
const targetDir = path.resolve(__dirname, '../farmcontrol-ws/src/database/schemas');
|
const targetDir = path.resolve(__dirname, '../farmcontrol-ws/src/database/schemas');
|
||||||
|
|
||||||
console.log(`Syncing schemas from ${sourceDir} to ${targetDir}...`);
|
console.log(`Syncing schemas from ${sourceDir} to ${targetDir}...`);
|
||||||
|
|||||||
@ -13,6 +13,7 @@
|
|||||||
"canonical-json": "^0.2.0",
|
"canonical-json": "^0.2.0",
|
||||||
"cors": "^2.8.5",
|
"cors": "^2.8.5",
|
||||||
"dotenv": "^17.2.3",
|
"dotenv": "^17.2.3",
|
||||||
|
"etcd3": "^1.1.2",
|
||||||
"exifr": "^7.1.3",
|
"exifr": "^7.1.3",
|
||||||
"express": "^5.1.0",
|
"express": "^5.1.0",
|
||||||
"express-session": "^1.18.2",
|
"express-session": "^1.18.2",
|
||||||
|
|||||||
@ -1,42 +0,0 @@
|
|||||||
// config.js - Configuration handling
|
|
||||||
import fs from 'fs';
|
|
||||||
import path from 'path';
|
|
||||||
import { fileURLToPath } from 'url';
|
|
||||||
|
|
||||||
// Configure paths relative to this file
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
|
||||||
const __dirname = path.dirname(__filename);
|
|
||||||
const CONFIG_PATH = path.resolve(__dirname, '../config.json');
|
|
||||||
|
|
||||||
// Determine environment
|
|
||||||
const NODE_ENV = process.env.NODE_ENV || 'development';
|
|
||||||
|
|
||||||
// Load config file
|
|
||||||
function loadConfig() {
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(CONFIG_PATH)) {
|
|
||||||
throw new Error(`Configuration file not found at ${CONFIG_PATH}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const configData = fs.readFileSync(CONFIG_PATH, 'utf8');
|
|
||||||
const config = JSON.parse(configData);
|
|
||||||
|
|
||||||
if (!config[NODE_ENV]) {
|
|
||||||
throw new Error(`Configuration for environment '${NODE_ENV}' not found in config.json`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return config[NODE_ENV];
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Error loading config:', err);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get current environment
|
|
||||||
export function getEnvironment() {
|
|
||||||
return NODE_ENV;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export singleton config instance
|
|
||||||
const config = loadConfig();
|
|
||||||
export default config;
|
|
||||||
36
src/database/ReseedAction.js
Normal file
36
src/database/ReseedAction.js
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import mongoose from 'mongoose';
|
||||||
|
import bcrypt from 'bcrypt';
|
||||||
|
import { userModel } from '../schemas/management/user.schema.js';
|
||||||
|
import { dbConnect } from './mongo.js';
|
||||||
|
|
||||||
|
const ReseedAction = () => {
|
||||||
|
async function clear() {
|
||||||
|
dbConnect();
|
||||||
|
await userModel.deleteMany({});
|
||||||
|
console.log('DB cleared');
|
||||||
|
}
|
||||||
|
|
||||||
|
async function seedDB() {
|
||||||
|
await clear();
|
||||||
|
const salt = await bcrypt.genSalt(10);
|
||||||
|
const hashPassword = await bcrypt.hash('secret', salt);
|
||||||
|
|
||||||
|
const user = {
|
||||||
|
_id: mongoose.Types.ObjectId(1),
|
||||||
|
name: 'Admin',
|
||||||
|
email: 'admin@jsonapi.com',
|
||||||
|
password: hashPassword,
|
||||||
|
createdAt: new Date(),
|
||||||
|
profile_image: '../../images/admin.jpg',
|
||||||
|
};
|
||||||
|
|
||||||
|
const admin = new userModel(user);
|
||||||
|
await admin.save();
|
||||||
|
|
||||||
|
console.log('DB seeded');
|
||||||
|
}
|
||||||
|
|
||||||
|
seedDB();
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ReseedAction;
|
||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { fileModel } from './schemas/management/file.schema.js';
|
import { fileModel } from '../schemas/management/file.schema.js';
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
import {
|
import {
|
||||||
deleteAuditLog,
|
deleteAuditLog,
|
||||||
@ -7,7 +7,7 @@ import {
|
|||||||
expandObjectIds,
|
expandObjectIds,
|
||||||
modelHasRef,
|
modelHasRef,
|
||||||
getFieldsByRef,
|
getFieldsByRef,
|
||||||
getQueryToCacheKey,
|
jsonToCacheKey,
|
||||||
} from '../utils.js';
|
} from '../utils.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import {
|
||||||
@ -18,31 +18,36 @@ import {
|
|||||||
distributeChildUpdate,
|
distributeChildUpdate,
|
||||||
distributeChildDelete,
|
distributeChildDelete,
|
||||||
distributeChildNew,
|
distributeChildNew,
|
||||||
distributeStats,
|
|
||||||
} from '../utils.js';
|
} from '../utils.js';
|
||||||
import { getAllModels } from '../services/misc/model.js';
|
import { getAllModels } from '../services/misc/model.js';
|
||||||
import { redisServer } from './redis.js';
|
import { redisServer } from './redis.js';
|
||||||
import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Database');
|
const logger = log4js.getLogger('Database');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
const cacheLogger = log4js.getLogger('DatabaseCache');
|
const cacheLogger = log4js.getLogger('DatabaseCache');
|
||||||
cacheLogger.level = config.server.logLevel;
|
cacheLogger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
const CACHE_TTL_SECONDS = parseInt(config.database.redis.cacheTtl || '30', 10);
|
const CACHE_TTL_SECONDS = parseInt(process.env.REDIS_CACHE_TTL || '30', 10);
|
||||||
|
|
||||||
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
||||||
if (!model || !id) return undefined;
|
if (!model || !id) return undefined;
|
||||||
|
|
||||||
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
|
const cacheKeyObject = {
|
||||||
|
model: model.modelName,
|
||||||
|
id: id.toString(),
|
||||||
|
};
|
||||||
|
|
||||||
cacheLogger.trace('Retrieving object from cache:', { model: model.modelName, id, populate });
|
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||||
|
|
||||||
|
cacheLogger.trace('Retrieving object from cache:', cacheKeyObject);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const cachedObject = await redisServer.getKey(cacheKey);
|
const cachedObject = await redisServer.getKey(cacheKey);
|
||||||
if (cachedObject == null) {
|
if (cachedObject == null) {
|
||||||
cacheLogger.trace('Cache miss:', { model: model.modelName, id });
|
cacheLogger.trace('Cache miss:', cacheKeyObject);
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,285 +66,48 @@ export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
|||||||
export const updateObjectCache = async ({ model, id, object, populate = [] }) => {
|
export const updateObjectCache = async ({ model, id, object, populate = [] }) => {
|
||||||
if (!model || !id || !object) return object;
|
if (!model || !id || !object) return object;
|
||||||
|
|
||||||
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
|
const cacheKeyObject = {
|
||||||
const cacheKey = getQueryToCacheKey({ model: model.modelName, id, populate });
|
model: model.modelName,
|
||||||
|
id: id.toString(),
|
||||||
|
};
|
||||||
|
|
||||||
cacheLogger.trace('Updating object cache:', cacheKeyFilter);
|
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||||
|
|
||||||
|
cacheLogger.trace('Updating object cache:', cacheKeyObject);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get all keys matching the filter pattern
|
const cachedObject = (await redisServer.getKey(cacheKey)) || {};
|
||||||
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
|
|
||||||
// Merge the object with each cached object and update
|
|
||||||
const mergedObjects = [];
|
|
||||||
for (const key of matchingKeys) {
|
|
||||||
logger.trace('Updating object cache:', key);
|
|
||||||
const cachedObject = (await redisServer.getKey(key)) || {};
|
|
||||||
const mergedObject = _.merge(cachedObject, object);
|
const mergedObject = _.merge(cachedObject, object);
|
||||||
await redisServer.setKey(key, mergedObject, CACHE_TTL_SECONDS);
|
|
||||||
mergedObjects.push(mergedObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
const cacheObject = (await redisServer.getKey(cacheKey)) || {};
|
|
||||||
const mergedObject = _.merge(cacheObject, object);
|
|
||||||
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
|
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
|
||||||
|
cacheLogger.trace('Updated object cache:', cacheKeyObject);
|
||||||
cacheLogger.trace('Updated object cache:', {
|
|
||||||
filter: cacheKeyFilter,
|
|
||||||
keysUpdated: matchingKeys.length,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Return the merged object
|
|
||||||
return mergedObject;
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
cacheLogger.error('Error updating object in Redis cache:', err);
|
cacheLogger.error('Error updating object in Redis cache:', err);
|
||||||
// Fallback to returning the provided object if cache fails
|
|
||||||
return object;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return object;
|
||||||
};
|
};
|
||||||
|
|
||||||
export const deleteObjectCache = async ({ model, id }) => {
|
export const deleteObjectCache = async ({ model, id }) => {
|
||||||
if (!model || !id) return;
|
if (!model || !id) return;
|
||||||
|
|
||||||
const cacheKeyFilter = `${model.modelName}:${id?.toString()}*`;
|
const cacheKeyObject = {
|
||||||
|
model: model.modelName,
|
||||||
|
id: id.toString(),
|
||||||
|
populate: [],
|
||||||
|
};
|
||||||
|
|
||||||
cacheLogger.trace('Deleting object cache:', cacheKeyFilter);
|
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||||
|
|
||||||
|
cacheLogger.trace('Deleting object cache:', cacheKeyObject);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get all keys matching the filter pattern and delete them
|
|
||||||
const matchingKeys = await redisServer.getKeysByPattern(cacheKeyFilter);
|
|
||||||
|
|
||||||
for (const cacheKey of matchingKeys) {
|
|
||||||
await redisServer.deleteKey(cacheKey);
|
await redisServer.deleteKey(cacheKey);
|
||||||
}
|
cacheLogger.trace('Deleted object cache:', cacheKeyObject);
|
||||||
|
|
||||||
cacheLogger.trace('Deleted object cache:', {
|
|
||||||
filter: cacheKeyFilter,
|
|
||||||
keysDeleted: matchingKeys.length,
|
|
||||||
});
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
cacheLogger.error('Error deleting object from Redis cache:', err);
|
cacheLogger.error('Error deleting object from Redis cache:', err);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Utility to run one or many rollup aggregations in a single query via $facet.
|
|
||||||
export const aggregateRollups = async ({ model, baseFilter = {}, rollupConfigs = [] }) => {
|
|
||||||
if (!rollupConfigs.length) {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const facetStage = rollupConfigs.reduce((facets, definition, index) => {
|
|
||||||
const key = definition.name || `rollup${index}`;
|
|
||||||
const matchStage = { $match: { ...baseFilter, ...(definition.filter || {}) } };
|
|
||||||
const groupStage = { $group: { _id: null } };
|
|
||||||
|
|
||||||
(definition.rollups || []).forEach((rollup) => {
|
|
||||||
switch (rollup.operation) {
|
|
||||||
case 'sum':
|
|
||||||
groupStage.$group[rollup.name] = { $sum: `$${rollup.property}` };
|
|
||||||
break;
|
|
||||||
case 'count':
|
|
||||||
groupStage.$group[rollup.name] = { $sum: 1 };
|
|
||||||
break;
|
|
||||||
case 'avg':
|
|
||||||
groupStage.$group[rollup.name] = { $avg: `$${rollup.property}` };
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported rollup operation: ${rollup.operation}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
facets[key] = [matchStage, groupStage];
|
|
||||||
return facets;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
const [results] = await model.aggregate([{ $facet: facetStage }]);
|
|
||||||
|
|
||||||
return rollupConfigs.reduce((acc, definition, index) => {
|
|
||||||
const key = definition.name || `rollup${index}`;
|
|
||||||
const rawResult = results?.[key]?.[0] || {};
|
|
||||||
|
|
||||||
// Transform the result to nest rollup values under operation type
|
|
||||||
const transformedResult = {};
|
|
||||||
(definition.rollups || []).forEach((rollup) => {
|
|
||||||
const value = rawResult[rollup.name] || 0;
|
|
||||||
// If there's only one rollup and its name matches the key, flatten the structure
|
|
||||||
if (definition.rollups.length === 1 && rollup.name === key) {
|
|
||||||
transformedResult[rollup.operation] = value;
|
|
||||||
} else {
|
|
||||||
transformedResult[rollup.name] = { [rollup.operation]: value };
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
acc[key] = transformedResult;
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
};
|
|
||||||
|
|
||||||
// Reusable function to aggregate rollups over history using state reconstruction
|
|
||||||
export const aggregateRollupsHistory = async ({
|
|
||||||
model,
|
|
||||||
baseFilter = {},
|
|
||||||
rollupConfigs = [],
|
|
||||||
startDate,
|
|
||||||
endDate,
|
|
||||||
}) => {
|
|
||||||
if (!rollupConfigs.length) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set default dates if not provided
|
|
||||||
const end = endDate ? new Date(endDate) : new Date();
|
|
||||||
const start = startDate ? new Date(startDate) : new Date(end.getTime() - 24 * 60 * 60 * 1000);
|
|
||||||
|
|
||||||
// Get model name for filtering audit logs
|
|
||||||
const parentType = model.modelName ? model.modelName : 'unknown';
|
|
||||||
|
|
||||||
// 1. Fetch all audit logs for this model type from start date to now
|
|
||||||
// Filter by parentType instead of fetching object IDs first
|
|
||||||
const auditLogs = await auditLogModel
|
|
||||||
.find({
|
|
||||||
parentType,
|
|
||||||
createdAt: { $gte: start },
|
|
||||||
})
|
|
||||||
.sort({ createdAt: -1 }) // Newest first
|
|
||||||
.lean();
|
|
||||||
|
|
||||||
// 2. Extract unique parent IDs from audit logs
|
|
||||||
const parentIds = [...new Set(auditLogs.map((log) => log.parent.toString()))];
|
|
||||||
|
|
||||||
if (parentIds.length === 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Fetch current state of relevant objects that match baseFilter
|
|
||||||
// Note: This only includes objects that CURRENTLY match the baseFilter.
|
|
||||||
// Objects that matched in the past but don't match now are excluded.
|
|
||||||
const currentObjects = await model
|
|
||||||
.find({
|
|
||||||
_id: { $in: parentIds },
|
|
||||||
...baseFilter,
|
|
||||||
})
|
|
||||||
.lean();
|
|
||||||
const objectMap = new Map();
|
|
||||||
currentObjects.forEach((obj) => {
|
|
||||||
// Ensure _id is a string for map keys
|
|
||||||
objectMap.set(obj._id.toString(), expandObjectIds(obj));
|
|
||||||
});
|
|
||||||
|
|
||||||
if (objectMap.size === 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper to check if object matches filter
|
|
||||||
const matchesFilter = (obj, filter) => {
|
|
||||||
if (!filter || Object.keys(filter).length === 0) return true;
|
|
||||||
|
|
||||||
for (const [key, expectedValue] of Object.entries(filter)) {
|
|
||||||
const actualValue = _.get(obj, key);
|
|
||||||
|
|
||||||
// Handle simple equality
|
|
||||||
if (actualValue != expectedValue) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
|
|
||||||
// 3. Generate time buckets (1 minute intervals)
|
|
||||||
const buckets = [];
|
|
||||||
let currentTime = new Date(end);
|
|
||||||
// Round down to nearest minute
|
|
||||||
currentTime.setSeconds(0, 0);
|
|
||||||
|
|
||||||
while (currentTime >= start) {
|
|
||||||
buckets.push(new Date(currentTime));
|
|
||||||
currentTime = new Date(currentTime.getTime() - 60000); // -1 minute
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. Rewind state and snapshot
|
|
||||||
const results = [];
|
|
||||||
let logIndex = 0;
|
|
||||||
|
|
||||||
// Create a working copy of objects to mutate during rewind
|
|
||||||
// (deep clone to avoid issues if we need original later, though expandObjectIds creates new objs)
|
|
||||||
const workingObjects = new Map();
|
|
||||||
objectMap.forEach((val, key) => workingObjects.set(key, _.cloneDeep(val)));
|
|
||||||
|
|
||||||
// Iterate backwards through time
|
|
||||||
for (const bucketDate of buckets) {
|
|
||||||
// Apply all logs that happened AFTER this bucket time (between last bucket and this one)
|
|
||||||
// Since we iterate backwards, these are logs with createdAt > bucketDate
|
|
||||||
while (logIndex < auditLogs.length) {
|
|
||||||
const log = auditLogs[logIndex];
|
|
||||||
const logDate = new Date(log.createdAt);
|
|
||||||
|
|
||||||
if (logDate <= bucketDate) {
|
|
||||||
// This log happened at or before the current bucket time,
|
|
||||||
// so its effects are already present (or rather, will be handled in a future/earlier bucket).
|
|
||||||
// Stop processing logs for this step.
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Revert this change
|
|
||||||
const objectId = log.parent.toString();
|
|
||||||
const object = workingObjects.get(objectId);
|
|
||||||
|
|
||||||
if (object) {
|
|
||||||
if (log.operation === 'new') {
|
|
||||||
// Object didn't exist before this creation event
|
|
||||||
workingObjects.delete(objectId);
|
|
||||||
} else if (log.changes && log.changes.old) {
|
|
||||||
// Apply old values to revert state
|
|
||||||
_.merge(object, log.changes.old);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logIndex++;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Snapshot: Calculate rollups for current state of all objects
|
|
||||||
const bucketResult = {
|
|
||||||
date: bucketDate.toISOString(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const activeObjects = Array.from(workingObjects.values());
|
|
||||||
|
|
||||||
rollupConfigs.forEach((config) => {
|
|
||||||
const configName = config.name;
|
|
||||||
|
|
||||||
// Filter objects for this config
|
|
||||||
// Note: We also check baseFilter here in case the object state reverted to something
|
|
||||||
// that no longer matches baseFilter (e.g. active: false)
|
|
||||||
const matchingObjects = activeObjects.filter(
|
|
||||||
(obj) => matchesFilter(obj, baseFilter) && matchesFilter(obj, config.filter)
|
|
||||||
);
|
|
||||||
|
|
||||||
// Calculate rollups
|
|
||||||
(config.rollups || []).forEach((rollup) => {
|
|
||||||
const rollupName = rollup.name;
|
|
||||||
|
|
||||||
let value = 0;
|
|
||||||
if (rollup.operation === 'count') {
|
|
||||||
value = matchingObjects.length;
|
|
||||||
} else if (rollup.operation === 'sum') {
|
|
||||||
value = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
|
|
||||||
} else if (rollup.operation === 'avg') {
|
|
||||||
const sum = _.sumBy(matchingObjects, (obj) => _.get(obj, rollup.property) || 0);
|
|
||||||
value = matchingObjects.length ? sum / matchingObjects.length : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Nest the value under the operation type
|
|
||||||
bucketResult[rollupName] = { [rollup.operation]: value };
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
results.push(bucketResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reverse results to be chronological
|
|
||||||
return results.reverse();
|
|
||||||
};
|
|
||||||
|
|
||||||
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
||||||
export const listObjects = async ({
|
export const listObjects = async ({
|
||||||
model,
|
model,
|
||||||
@ -556,13 +324,14 @@ export const listObjectsByProperties = async ({
|
|||||||
} else if (typeof pop === 'object' && pop.path) {
|
} else if (typeof pop === 'object' && pop.path) {
|
||||||
pipeline.push({
|
pipeline.push({
|
||||||
$lookup: {
|
$lookup: {
|
||||||
from: pop.from ? pop.from : pop.path.toLowerCase(),
|
from:
|
||||||
|
pop.options && pop.options.from ? pop.options.from : pop.path.toLowerCase() + 's',
|
||||||
localField: pop.path,
|
localField: pop.path,
|
||||||
foreignField: '_id',
|
foreignField: '_id',
|
||||||
as: pop.path,
|
as: pop.path,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (pop?.multiple == false || pop?.multiple == undefined) {
|
if (!pop.justOne === false) {
|
||||||
// default to unwind unless justOne is explicitly false
|
// default to unwind unless justOne is explicitly false
|
||||||
pipeline.push({
|
pipeline.push({
|
||||||
$unwind: {
|
$unwind: {
|
||||||
@ -594,7 +363,6 @@ export const listObjectsByProperties = async ({
|
|||||||
|
|
||||||
// Run aggregation
|
// Run aggregation
|
||||||
const results = await model.aggregate(pipeline);
|
const results = await model.aggregate(pipeline);
|
||||||
console.log('results', results);
|
|
||||||
return nestGroups(results, properties, filter);
|
return nestGroups(results, properties, filter);
|
||||||
} else {
|
} else {
|
||||||
// If no properties specified, just return all objects without grouping
|
// If no properties specified, just return all objects without grouping
|
||||||
@ -667,22 +435,6 @@ export const getObject = async ({ model, id, populate }) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getModelStats = async ({ model }) => {
|
|
||||||
if (!model.stats) {
|
|
||||||
logger.warn(`Model ${model.modelName} does not have a stats method.`);
|
|
||||||
return { error: 'Model does not have a stats method.', code: 500 };
|
|
||||||
}
|
|
||||||
return await model.stats();
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getModelHistory = async ({ model, from, to }) => {
|
|
||||||
if (!model.history && !from && !to) {
|
|
||||||
logger.warn(`Model ${model.modelName} does not have a history method.`);
|
|
||||||
return { error: 'Model does not have a history method.', code: 500 };
|
|
||||||
}
|
|
||||||
return await model.history(from, to);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const listObjectDependencies = async ({ model, id }) => {
|
export const listObjectDependencies = async ({ model, id }) => {
|
||||||
try {
|
try {
|
||||||
const dependencies = [];
|
const dependencies = [];
|
||||||
@ -810,17 +562,6 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
|
|||||||
populate,
|
populate,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (model.recalculate) {
|
|
||||||
logger.debug(`Recalculating ${model.modelName}`);
|
|
||||||
await model.recalculate(updatedObject, user);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (model.stats) {
|
|
||||||
logger.debug(`Getting stats for ${model.modelName}`);
|
|
||||||
const statsData = await model.stats();
|
|
||||||
await distributeStats(statsData, parentType);
|
|
||||||
}
|
|
||||||
|
|
||||||
return updatedObject;
|
return updatedObject;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('editObject error:', error);
|
logger.error('editObject error:', error);
|
||||||
@ -853,17 +594,6 @@ export const newObject = async ({ model, newData, user = null }, distributeChang
|
|||||||
populate: [],
|
populate: [],
|
||||||
});
|
});
|
||||||
|
|
||||||
if (model.recalculate) {
|
|
||||||
logger.debug(`Recalculating ${model.modelName}`);
|
|
||||||
await model.recalculate(created, user);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (model.stats) {
|
|
||||||
logger.debug(`Getting stats for ${model.modelName}`);
|
|
||||||
const statsData = await model.stats();
|
|
||||||
await distributeStats(statsData, parentType);
|
|
||||||
}
|
|
||||||
|
|
||||||
return created;
|
return created;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('newObject error:', error);
|
logger.error('newObject error:', error);
|
||||||
@ -895,17 +625,6 @@ export const deleteObject = async ({ model, id, user = null }, distributeChanges
|
|||||||
// Invalidate cache for this object
|
// Invalidate cache for this object
|
||||||
await deleteObjectCache({ model, id });
|
await deleteObjectCache({ model, id });
|
||||||
|
|
||||||
if (model.recalculate) {
|
|
||||||
logger.debug(`Recalculating ${model.modelName}`);
|
|
||||||
await model.recalculate(deleted, user);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (model.stats) {
|
|
||||||
logger.debug(`Getting stats for ${model.modelName}`);
|
|
||||||
const statsData = await model.stats();
|
|
||||||
await distributeStats(statsData, parentType);
|
|
||||||
}
|
|
||||||
|
|
||||||
return { deleted: true, object: deleted };
|
return { deleted: true, object: deleted };
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('deleteObject error:', error);
|
logger.error('deleteObject error:', error);
|
||||||
@ -967,8 +686,8 @@ export const flushFile = async ({ id, user }) => {
|
|||||||
// Try to delete from Ceph storage if it exists
|
// Try to delete from Ceph storage if it exists
|
||||||
if (file.extension) {
|
if (file.extension) {
|
||||||
try {
|
try {
|
||||||
const { deleteFile } = await import('./ceph.js');
|
const { deleteFile } = await import('../services/storage/ceph.js');
|
||||||
const { BUCKETS } = await import('./ceph.js');
|
const { BUCKETS } = await import('../services/storage/ceph.js');
|
||||||
const cephKey = `files/${file._id}${file.extension}`;
|
const cephKey = `files/${file._id}${file.extension}`;
|
||||||
|
|
||||||
await deleteFile(BUCKETS.FILES, cephKey);
|
await deleteFile(BUCKETS.FILES, cephKey);
|
||||||
|
|||||||
@ -1,18 +1,19 @@
|
|||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import config from '../config.js';
|
import dotenv from 'dotenv';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
|
|
||||||
const logger = log4js.getLogger('MongoDB');
|
const logger = log4js.getLogger('MongoDB');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
// Set strictQuery to false to prepare for Mongoose 7
|
// Set strictQuery to false to prepare for Mongoose 7
|
||||||
mongoose.set('strictQuery', false);
|
mongoose.set('strictQuery', false);
|
||||||
|
|
||||||
function dbConnect() {
|
function dbConnect() {
|
||||||
logger.info(`Connecting to MongoDB...`);
|
mongoose.connection.once('open', () => logger.info('Database connected.'));
|
||||||
mongoose.connection.once('open', () => logger.info('Connected to MongoDB.'));
|
|
||||||
return mongoose.connect(
|
return mongoose.connect(
|
||||||
`mongodb://${config.database.mongo.link}/farmcontrol?retryWrites=true&w=majority`,
|
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
|
||||||
{}
|
{}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +1,15 @@
|
|||||||
import { connect } from '@nats-io/transport-node';
|
import { connect } from '@nats-io/transport-node';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import config from '../config.js';
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
const NATS_HOST = process.env.NATS_HOST || 'localhost';
|
||||||
|
const NATS_PORT = process.env.NATS_PORT || 4222;
|
||||||
|
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
|
||||||
|
|
||||||
const logger = log4js.getLogger('Nats');
|
const logger = log4js.getLogger('Nats');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = LOG_LEVEL;
|
||||||
|
|
||||||
class NatsServer {
|
class NatsServer {
|
||||||
constructor() {
|
constructor() {
|
||||||
@ -11,7 +17,7 @@ class NatsServer {
|
|||||||
this.subscriptions = new Map(); // subject → { subscription, callbacks }
|
this.subscriptions = new Map(); // subject → { subscription, callbacks }
|
||||||
this.requestHandlers = new Map(); // subject → { handler, callbacks }
|
this.requestHandlers = new Map(); // subject → { handler, callbacks }
|
||||||
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
|
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
|
||||||
this.servers = [`nats://${config.database.nats.host}:${config.database.nats.port}`];
|
this.servers = [`nats://${NATS_HOST}:${NATS_PORT}`];
|
||||||
this.textEncoder = new TextEncoder();
|
this.textEncoder = new TextEncoder();
|
||||||
this.textDecoder = new TextDecoder();
|
this.textDecoder = new TextDecoder();
|
||||||
|
|
||||||
@ -37,7 +43,7 @@ class NatsServer {
|
|||||||
if (this.client.isClosed()) {
|
if (this.client.isClosed()) {
|
||||||
throw new Error('NATS client connection failed');
|
throw new Error('NATS client connection failed');
|
||||||
}
|
}
|
||||||
logger.info('Connected to NATS.');
|
logger.trace('NATS client connected successfully.');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,17 +1,25 @@
|
|||||||
import { createClient } from 'redis';
|
import { createClient } from 'redis';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import config from '../config.js';
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
|
||||||
|
const REDIS_URL = process.env.REDIS_URL;
|
||||||
|
const REDIS_HOST = process.env.REDIS_HOST || '127.0.0.1';
|
||||||
|
const REDIS_PORT = process.env.REDIS_PORT || 6379;
|
||||||
|
const REDIS_PASSWORD = process.env.REDIS_PASSWORD || undefined;
|
||||||
|
|
||||||
const logger = log4js.getLogger('Redis');
|
const logger = log4js.getLogger('Redis');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = LOG_LEVEL;
|
||||||
|
|
||||||
class RedisServer {
|
class RedisServer {
|
||||||
constructor() {
|
constructor() {
|
||||||
const url = config.database.redis.url || `redis://${config.database.redis.host}:${config.database.redis.port}`;
|
const url = REDIS_URL || `redis://${REDIS_HOST}:${REDIS_PORT}`;
|
||||||
|
|
||||||
this.client = createClient({
|
this.client = createClient({
|
||||||
url,
|
url,
|
||||||
password: config.database.redis.password || undefined,
|
password: REDIS_PASSWORD,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.client.on('error', (err) => {
|
this.client.on('error', (err) => {
|
||||||
@ -23,7 +31,6 @@ class RedisServer {
|
|||||||
|
|
||||||
async connect() {
|
async connect() {
|
||||||
if (this.connected) return;
|
if (this.connected) return;
|
||||||
logger.info('Connecting to Redis...');
|
|
||||||
await this.client.connect();
|
await this.client.connect();
|
||||||
this.connected = true;
|
this.connected = true;
|
||||||
logger.info('Connected to Redis');
|
logger.info('Connected to Redis');
|
||||||
@ -54,21 +61,6 @@ class RedisServer {
|
|||||||
await this.connect();
|
await this.connect();
|
||||||
await this.client.del(key);
|
await this.client.del(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getKeysByPattern(pattern) {
|
|
||||||
await this.connect();
|
|
||||||
const keys = [];
|
|
||||||
let cursor = '0';
|
|
||||||
do {
|
|
||||||
const result = await this.client.scan(cursor, {
|
|
||||||
MATCH: pattern,
|
|
||||||
COUNT: 100,
|
|
||||||
});
|
|
||||||
cursor = result.cursor;
|
|
||||||
keys.push(...result.keys);
|
|
||||||
} while (cursor !== '0');
|
|
||||||
return keys;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const redisServer = new RedisServer();
|
const redisServer = new RedisServer();
|
||||||
|
|||||||
42
src/database/seedData.js
Normal file
42
src/database/seedData.js
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import bcrypt from "bcrypt";
|
||||||
|
import mongoose from "mongoose";
|
||||||
|
import { userModel } from "../schemas/user.schema.js";
|
||||||
|
import { jobModel } from "../schemas/job.schema.js";
|
||||||
|
import { dbConnect } from "../mongo/index.js";
|
||||||
|
|
||||||
|
async function seedDB() {
|
||||||
|
dbConnect();
|
||||||
|
const salt = await bcrypt.genSalt(10);
|
||||||
|
const hashPassword = await bcrypt.hash("secret", salt);
|
||||||
|
|
||||||
|
const user = {
|
||||||
|
_id: new mongoose.Types.ObjectId(1),
|
||||||
|
name: "Admin",
|
||||||
|
email: "admin@jsonapi.com",
|
||||||
|
password: hashPassword,
|
||||||
|
createdAt: new Date(),
|
||||||
|
profile_image: "../../images/admin.jpg",
|
||||||
|
};
|
||||||
|
|
||||||
|
const admin = new userModel(user);
|
||||||
|
await admin.save();
|
||||||
|
|
||||||
|
const job = {
|
||||||
|
_id: new mongoose.Types.ObjectId(1),
|
||||||
|
status : {
|
||||||
|
type: "Queued"
|
||||||
|
},
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
started_at: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const newJob = new jobModel(job);
|
||||||
|
await newJob.save();
|
||||||
|
|
||||||
|
console.log("DB seeded");
|
||||||
|
}
|
||||||
|
|
||||||
|
seedDB().then(() => {
|
||||||
|
mongoose.connection.close();
|
||||||
|
});
|
||||||
31
src/index.js
31
src/index.js
@ -1,7 +1,7 @@
|
|||||||
import express from 'express';
|
import express from 'express';
|
||||||
import bodyParser from 'body-parser';
|
import bodyParser from 'body-parser';
|
||||||
import cors from 'cors';
|
import cors from 'cors';
|
||||||
import config from './config.js';
|
import dotenv from 'dotenv';
|
||||||
import { expressSession, keycloak } from './keycloak.js';
|
import { expressSession, keycloak } from './keycloak.js';
|
||||||
import { dbConnect } from './database/mongo.js';
|
import { dbConnect } from './database/mongo.js';
|
||||||
import {
|
import {
|
||||||
@ -40,21 +40,24 @@ import {
|
|||||||
} from './routes/index.js';
|
} from './routes/index.js';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
import cron from 'node-cron';
|
||||||
|
import ReseedAction from './database/ReseedAction.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import { populateUserMiddleware } from './services/misc/auth.js';
|
import { populateUserMiddleware } from './services/misc/auth.js';
|
||||||
import { natsServer } from './database/nats.js';
|
import { natsServer } from './database/nats.js';
|
||||||
import { initializeBuckets } from './database/ceph.js';
|
import { initializeBuckets } from './services/storage/ceph.js';
|
||||||
import { getEnvironment } from './config.js';
|
|
||||||
|
|
||||||
const PORT = config.server.port;
|
dotenv.config();
|
||||||
|
|
||||||
|
const PORT = process.env.PORT || 8787;
|
||||||
const app = express();
|
const app = express();
|
||||||
|
|
||||||
const logger = log4js.getLogger('App');
|
const logger = log4js.getLogger('App');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
app.use(log4js.connectLogger(logger, { level: 'trace' }));
|
app.use(log4js.connectLogger(logger, { level: 'trace' }));
|
||||||
|
|
||||||
const whitelist = [config.app.urlClient, config.app.urlElectronClient];
|
const whitelist = [process.env.APP_URL_CLIENT, process.env.APP_URL_ELECTRON_CLIENT];
|
||||||
const corsOptions = {
|
const corsOptions = {
|
||||||
origin: function (origin, callback) {
|
origin: function (origin, callback) {
|
||||||
if (!origin || whitelist.indexOf(origin) !== -1) {
|
if (!origin || whitelist.indexOf(origin) !== -1) {
|
||||||
@ -68,20 +71,18 @@ const corsOptions = {
|
|||||||
|
|
||||||
// Initialize application
|
// Initialize application
|
||||||
async function initializeApp() {
|
async function initializeApp() {
|
||||||
logger.info('Initializing application...');
|
|
||||||
logger.info(`Environment: ${getEnvironment()}`);
|
|
||||||
logger.info(`Port: ${PORT}`);
|
|
||||||
logger.info(`Log Level: ${config.server.logLevel}`);
|
|
||||||
try {
|
try {
|
||||||
// Connect to database
|
// Connect to database
|
||||||
await dbConnect();
|
dbConnect();
|
||||||
|
|
||||||
// Connect to NATS
|
// Connect to NATS
|
||||||
await natsServer.connect();
|
natsServer.connect();
|
||||||
|
logger.info('Connected to NATS');
|
||||||
|
|
||||||
// Initialize Ceph buckets
|
// Initialize Ceph buckets
|
||||||
try {
|
try {
|
||||||
await initializeBuckets();
|
await initializeBuckets();
|
||||||
|
logger.info('Ceph buckets initialized successfully');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Failed to initialize Ceph buckets:', err);
|
logger.error('Failed to initialize Ceph buckets:', err);
|
||||||
// Don't throw error - allow app to start without Ceph for development
|
// Don't throw error - allow app to start without Ceph for development
|
||||||
@ -141,5 +142,11 @@ app.use('/taxrates', taxRateRoutes);
|
|||||||
app.use('/taxrecords', taxRecordRoutes);
|
app.use('/taxrecords', taxRecordRoutes);
|
||||||
app.use('/notes', noteRoutes);
|
app.use('/notes', noteRoutes);
|
||||||
|
|
||||||
|
if (process.env.SCHEDULE_HOUR) {
|
||||||
|
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
||||||
|
ReseedAction();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Start the application
|
// Start the application
|
||||||
initializeApp();
|
initializeApp();
|
||||||
|
|||||||
@ -1,18 +1,17 @@
|
|||||||
import Keycloak from 'keycloak-connect';
|
import Keycloak from 'keycloak-connect';
|
||||||
import session from 'express-session';
|
import session from 'express-session';
|
||||||
import config, { getEnvironment } from './config.js';
|
|
||||||
import axios from 'axios';
|
|
||||||
import dotenv from 'dotenv';
|
import dotenv from 'dotenv';
|
||||||
|
import axios from 'axios';
|
||||||
|
import jwt from 'jsonwebtoken';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import NodeCache from 'node-cache';
|
import NodeCache from 'node-cache';
|
||||||
import { userModel } from './database/schemas/management/user.schema.js';
|
import { userModel } from './schemas/management/user.schema.js';
|
||||||
import { getObject } from './database/database.js';
|
import { getObject } from './database/database.js';
|
||||||
import { hostModel } from './database/schemas/management/host.schema.js';
|
import { hostModel } from './schemas/management/host.schema.js';
|
||||||
|
|
||||||
const logger = log4js.getLogger('Keycloak');
|
|
||||||
logger.level = config.server.logLevel || 'info';
|
|
||||||
|
|
||||||
dotenv.config();
|
dotenv.config();
|
||||||
|
const logger = log4js.getLogger('Keycloak');
|
||||||
|
logger.level = process.env.LOG_LEVEL || 'info';
|
||||||
|
|
||||||
// Initialize NodeCache with 5-minute TTL
|
// Initialize NodeCache with 5-minute TTL
|
||||||
const userCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
const userCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
||||||
@ -57,10 +56,10 @@ const lookupUser = async (preferredUsername) => {
|
|||||||
|
|
||||||
// Initialize Keycloak
|
// Initialize Keycloak
|
||||||
const keycloakConfig = {
|
const keycloakConfig = {
|
||||||
realm: config.auth.keycloak.realm,
|
realm: process.env.KEYCLOAK_REALM || 'farm-control',
|
||||||
'auth-server-url': config.auth.keycloak.url,
|
'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
|
||||||
'ssl-required': getEnvironment() === 'production' ? 'external' : 'none',
|
'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
|
||||||
resource: config.auth.keycloak.clientId,
|
resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
|
||||||
'confidential-port': 0,
|
'confidential-port': 0,
|
||||||
'bearer-only': true,
|
'bearer-only': true,
|
||||||
'public-client': false,
|
'public-client': false,
|
||||||
@ -96,10 +95,10 @@ const isAuthenticated = async (req, res, next) => {
|
|||||||
try {
|
try {
|
||||||
// Verify token with Keycloak introspection endpoint
|
// Verify token with Keycloak introspection endpoint
|
||||||
const response = await axios.post(
|
const response = await axios.post(
|
||||||
`${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token/introspect`,
|
`${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token/introspect`,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
token: token,
|
token: token,
|
||||||
client_id: config.auth.keycloak.clientId,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
|
|||||||
@ -15,8 +15,6 @@ import materialRoutes from './management/materials.js';
|
|||||||
import partStockRoutes from './inventory/partstocks.js';
|
import partStockRoutes from './inventory/partstocks.js';
|
||||||
import filamentStockRoutes from './inventory/filamentstocks.js';
|
import filamentStockRoutes from './inventory/filamentstocks.js';
|
||||||
import purchaseOrderRoutes from './inventory/purchaseorders.js';
|
import purchaseOrderRoutes from './inventory/purchaseorders.js';
|
||||||
import orderItemRoutes from './inventory/orderitems.js';
|
|
||||||
import shipmentRoutes from './inventory/shipments.js';
|
|
||||||
import stockEventRoutes from './inventory/stockevents.js';
|
import stockEventRoutes from './inventory/stockevents.js';
|
||||||
import stockAuditRoutes from './inventory/stockaudits.js';
|
import stockAuditRoutes from './inventory/stockaudits.js';
|
||||||
import auditLogRoutes from './management/auditlogs.js';
|
import auditLogRoutes from './management/auditlogs.js';
|
||||||
@ -49,8 +47,6 @@ export {
|
|||||||
partStockRoutes,
|
partStockRoutes,
|
||||||
filamentStockRoutes,
|
filamentStockRoutes,
|
||||||
purchaseOrderRoutes,
|
purchaseOrderRoutes,
|
||||||
orderItemRoutes,
|
|
||||||
shipmentRoutes,
|
|
||||||
stockEventRoutes,
|
stockEventRoutes,
|
||||||
stockAuditRoutes,
|
stockAuditRoutes,
|
||||||
auditLogRoutes,
|
auditLogRoutes,
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newFilamentStockRouteHandler,
|
newFilamentStockRouteHandler,
|
||||||
deleteFilamentStockRouteHandler,
|
deleteFilamentStockRouteHandler,
|
||||||
listFilamentStocksByPropertiesRouteHandler,
|
listFilamentStocksByPropertiesRouteHandler,
|
||||||
getFilamentStockStatsRouteHandler,
|
|
||||||
getFilamentStockHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/filamentstocks.js';
|
} from '../../services/inventory/filamentstocks.js';
|
||||||
|
|
||||||
// list of filament stocks
|
// list of filament stocks
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newFilamentStockRouteHandler(req, res);
|
newFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get filament stock stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getFilamentStockStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get filament stock history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getFilamentStockHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getFilamentStockRouteHandler(req, res);
|
getFilamentStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newOrderItemRouteHandler,
|
newOrderItemRouteHandler,
|
||||||
deleteOrderItemRouteHandler,
|
deleteOrderItemRouteHandler,
|
||||||
listOrderItemsByPropertiesRouteHandler,
|
listOrderItemsByPropertiesRouteHandler,
|
||||||
getOrderItemStatsRouteHandler,
|
|
||||||
getOrderItemHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/orderitems.js';
|
} from '../../services/inventory/orderitems.js';
|
||||||
|
|
||||||
// list of order items
|
// list of order items
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newOrderItemRouteHandler(req, res);
|
newOrderItemRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get order item stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getOrderItemStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get order item history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getOrderItemHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getOrderItemRouteHandler(req, res);
|
getOrderItemRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newPartStockRouteHandler,
|
newPartStockRouteHandler,
|
||||||
deletePartStockRouteHandler,
|
deletePartStockRouteHandler,
|
||||||
listPartStocksByPropertiesRouteHandler,
|
listPartStocksByPropertiesRouteHandler,
|
||||||
getPartStockStatsRouteHandler,
|
|
||||||
getPartStockHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/partstocks.js';
|
} from '../../services/inventory/partstocks.js';
|
||||||
|
|
||||||
// list of part stocks
|
// list of part stocks
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newPartStockRouteHandler(req, res);
|
newPartStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get part stock stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getPartStockStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get part stock history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getPartStockHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getPartStockRouteHandler(req, res);
|
getPartStockRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newPurchaseOrderRouteHandler,
|
newPurchaseOrderRouteHandler,
|
||||||
deletePurchaseOrderRouteHandler,
|
deletePurchaseOrderRouteHandler,
|
||||||
listPurchaseOrdersByPropertiesRouteHandler,
|
listPurchaseOrdersByPropertiesRouteHandler,
|
||||||
getPurchaseOrderStatsRouteHandler,
|
|
||||||
getPurchaseOrderHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/purchaseorders.js';
|
} from '../../services/inventory/purchaseorders.js';
|
||||||
|
|
||||||
// list of purchase orders
|
// list of purchase orders
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newPurchaseOrderRouteHandler(req, res);
|
newPurchaseOrderRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get purchase order stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getPurchaseOrderStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get purchase order history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getPurchaseOrderHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getPurchaseOrderRouteHandler(req, res);
|
getPurchaseOrderRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newShipmentRouteHandler,
|
newShipmentRouteHandler,
|
||||||
deleteShipmentRouteHandler,
|
deleteShipmentRouteHandler,
|
||||||
listShipmentsByPropertiesRouteHandler,
|
listShipmentsByPropertiesRouteHandler,
|
||||||
getShipmentStatsRouteHandler,
|
|
||||||
getShipmentHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/shipments.js';
|
} from '../../services/inventory/shipments.js';
|
||||||
|
|
||||||
// list of shipments
|
// list of shipments
|
||||||
@ -51,16 +49,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newShipmentRouteHandler(req, res);
|
newShipmentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get shipment stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getShipmentStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get shipment history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getShipmentHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getShipmentRouteHandler(req, res);
|
getShipmentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -9,8 +9,6 @@ import {
|
|||||||
newStockAuditRouteHandler,
|
newStockAuditRouteHandler,
|
||||||
updateStockAuditRouteHandler,
|
updateStockAuditRouteHandler,
|
||||||
deleteStockAuditRouteHandler,
|
deleteStockAuditRouteHandler,
|
||||||
getStockAuditStatsRouteHandler,
|
|
||||||
getStockAuditHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/stockaudits.js';
|
} from '../../services/inventory/stockaudits.js';
|
||||||
|
|
||||||
// List stock audits
|
// List stock audits
|
||||||
@ -38,16 +36,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newStockAuditRouteHandler(req, res);
|
newStockAuditRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get stock audit stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getStockAuditStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get stock audit history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getStockAuditHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get specific stock audit
|
// Get specific stock audit
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getStockAuditRouteHandler(req, res);
|
getStockAuditRouteHandler(req, res);
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
editStockEventRouteHandler,
|
editStockEventRouteHandler,
|
||||||
deleteStockEventRouteHandler,
|
deleteStockEventRouteHandler,
|
||||||
listStockEventsByPropertiesRouteHandler,
|
listStockEventsByPropertiesRouteHandler,
|
||||||
getStockEventStatsRouteHandler,
|
|
||||||
getStockEventHistoryRouteHandler,
|
|
||||||
} from '../../services/inventory/stockevents.js';
|
} from '../../services/inventory/stockevents.js';
|
||||||
|
|
||||||
// list of stock events
|
// list of stock events
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newStockEventRouteHandler(req, res);
|
newStockEventRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get stock event stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getStockEventStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get stock event history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getStockEventHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getStockEventRouteHandler(req, res);
|
getStockEventRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -3,8 +3,6 @@ import { isAuthenticated } from '../../keycloak.js';
|
|||||||
import {
|
import {
|
||||||
listAuditLogsRouteHandler,
|
listAuditLogsRouteHandler,
|
||||||
getAuditLogRouteHandler,
|
getAuditLogRouteHandler,
|
||||||
getAuditLogStatsRouteHandler,
|
|
||||||
getAuditLogHistoryRouteHandler,
|
|
||||||
} from '../../services/management/auditlogs.js';
|
} from '../../services/management/auditlogs.js';
|
||||||
import { parseFilter } from '../../utils.js';
|
import { parseFilter } from '../../utils.js';
|
||||||
|
|
||||||
@ -29,16 +27,6 @@ router.get('/', isAuthenticated, async (req, res) => {
|
|||||||
listAuditLogsRouteHandler(req, res, page, limit, filter, sort, order);
|
listAuditLogsRouteHandler(req, res, page, limit, filter, sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get audit log stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getAuditLogStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get audit log history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getAuditLogHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @route GET /api/auditlogs/:id
|
* @route GET /api/auditlogs/:id
|
||||||
* @desc Get a single audit log by ID
|
* @desc Get a single audit log by ID
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newCourierRouteHandler,
|
newCourierRouteHandler,
|
||||||
deleteCourierRouteHandler,
|
deleteCourierRouteHandler,
|
||||||
listCouriersByPropertiesRouteHandler,
|
listCouriersByPropertiesRouteHandler,
|
||||||
getCourierStatsRouteHandler,
|
|
||||||
getCourierHistoryRouteHandler,
|
|
||||||
} from '../../services/management/courier.js';
|
} from '../../services/management/courier.js';
|
||||||
|
|
||||||
// list of couriers
|
// list of couriers
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newCourierRouteHandler(req, res);
|
newCourierRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get courier stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getCourierStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get courier history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getCourierHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getCourierRouteHandler(req, res);
|
getCourierRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newCourierServiceRouteHandler,
|
newCourierServiceRouteHandler,
|
||||||
deleteCourierServiceRouteHandler,
|
deleteCourierServiceRouteHandler,
|
||||||
listCourierServicesByPropertiesRouteHandler,
|
listCourierServicesByPropertiesRouteHandler,
|
||||||
getCourierServiceStatsRouteHandler,
|
|
||||||
getCourierServiceHistoryRouteHandler,
|
|
||||||
} from '../../services/management/courierservice.js';
|
} from '../../services/management/courierservice.js';
|
||||||
|
|
||||||
// list of courier services
|
// list of courier services
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newCourierServiceRouteHandler(req, res);
|
newCourierServiceRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get courier service stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getCourierServiceStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get courierservice history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getCourierServiceHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getCourierServiceRouteHandler(req, res);
|
getCourierServiceRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newDocumentJobRouteHandler,
|
newDocumentJobRouteHandler,
|
||||||
deleteDocumentJobRouteHandler,
|
deleteDocumentJobRouteHandler,
|
||||||
listDocumentJobsByPropertiesRouteHandler,
|
listDocumentJobsByPropertiesRouteHandler,
|
||||||
getDocumentJobStatsRouteHandler,
|
|
||||||
getDocumentJobHistoryRouteHandler,
|
|
||||||
} from '../../services/management/documentjobs.js';
|
} from '../../services/management/documentjobs.js';
|
||||||
|
|
||||||
// list of document jobs
|
// list of document jobs
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newDocumentJobRouteHandler(req, res);
|
newDocumentJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get document job stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentJobStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get documentjobs history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentJobHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getDocumentJobRouteHandler(req, res);
|
getDocumentJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newDocumentPrinterRouteHandler,
|
newDocumentPrinterRouteHandler,
|
||||||
deleteDocumentPrinterRouteHandler,
|
deleteDocumentPrinterRouteHandler,
|
||||||
listDocumentPrintersByPropertiesRouteHandler,
|
listDocumentPrintersByPropertiesRouteHandler,
|
||||||
getDocumentPrinterStatsRouteHandler,
|
|
||||||
getDocumentPrinterHistoryRouteHandler,
|
|
||||||
} from '../../services/management/documentprinters.js';
|
} from '../../services/management/documentprinters.js';
|
||||||
|
|
||||||
// list of document printers
|
// list of document printers
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newDocumentPrinterRouteHandler(req, res);
|
newDocumentPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get document printer stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentPrinterStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get documentprinters history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentPrinterHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getDocumentPrinterRouteHandler(req, res);
|
getDocumentPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newDocumentSizeRouteHandler,
|
newDocumentSizeRouteHandler,
|
||||||
deleteDocumentSizeRouteHandler,
|
deleteDocumentSizeRouteHandler,
|
||||||
listDocumentSizesByPropertiesRouteHandler,
|
listDocumentSizesByPropertiesRouteHandler,
|
||||||
getDocumentSizeStatsRouteHandler,
|
|
||||||
getDocumentSizeHistoryRouteHandler,
|
|
||||||
} from '../../services/management/documentsizes.js';
|
} from '../../services/management/documentsizes.js';
|
||||||
|
|
||||||
// list of document sizes
|
// list of document sizes
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newDocumentSizeRouteHandler(req, res);
|
newDocumentSizeRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get document size stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentSizeStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get documentsizes history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentSizeHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getDocumentSizeRouteHandler(req, res);
|
getDocumentSizeRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newDocumentTemplateRouteHandler,
|
newDocumentTemplateRouteHandler,
|
||||||
deleteDocumentTemplateRouteHandler,
|
deleteDocumentTemplateRouteHandler,
|
||||||
listDocumentTemplatesByPropertiesRouteHandler,
|
listDocumentTemplatesByPropertiesRouteHandler,
|
||||||
getDocumentTemplateStatsRouteHandler,
|
|
||||||
getDocumentTemplateHistoryRouteHandler,
|
|
||||||
} from '../../services/management/documenttemplates.js';
|
} from '../../services/management/documenttemplates.js';
|
||||||
|
|
||||||
// list of document templates
|
// list of document templates
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newDocumentTemplateRouteHandler(req, res);
|
newDocumentTemplateRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get document template stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentTemplateStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get document template history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getDocumentTemplateHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getDocumentTemplateRouteHandler(req, res);
|
getDocumentTemplateRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -9,8 +9,6 @@ import {
|
|||||||
getFilamentRouteHandler,
|
getFilamentRouteHandler,
|
||||||
editFilamentRouteHandler,
|
editFilamentRouteHandler,
|
||||||
newFilamentRouteHandler,
|
newFilamentRouteHandler,
|
||||||
getFilamentStatsRouteHandler,
|
|
||||||
getFilamentHistoryRouteHandler,
|
|
||||||
} from '../../services/management/filaments.js';
|
} from '../../services/management/filaments.js';
|
||||||
|
|
||||||
// list of filaments
|
// list of filaments
|
||||||
@ -52,16 +50,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newFilamentRouteHandler(req, res);
|
newFilamentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get filament stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getFilamentStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get filaments history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getFilamentHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getFilamentRouteHandler(req, res);
|
getFilamentRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -12,8 +12,6 @@ import {
|
|||||||
flushFileRouteHandler,
|
flushFileRouteHandler,
|
||||||
deleteFileRouteHandler,
|
deleteFileRouteHandler,
|
||||||
listFilesByPropertiesRouteHandler,
|
listFilesByPropertiesRouteHandler,
|
||||||
getFileStatsRouteHandler,
|
|
||||||
getFileHistoryRouteHandler,
|
|
||||||
} from '../../services/management/files.js';
|
} from '../../services/management/files.js';
|
||||||
|
|
||||||
// list of files
|
// list of files
|
||||||
@ -35,16 +33,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newFileRouteHandler(req, res);
|
newFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get file stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getFileStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get file history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getFileHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.delete('/:id/flush', isAuthenticated, (req, res) => {
|
router.delete('/:id/flush', isAuthenticated, (req, res) => {
|
||||||
flushFileRouteHandler(req, res);
|
flushFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newHostRouteHandler,
|
newHostRouteHandler,
|
||||||
deleteHostRouteHandler,
|
deleteHostRouteHandler,
|
||||||
listHostsByPropertiesRouteHandler,
|
listHostsByPropertiesRouteHandler,
|
||||||
getHostStatsRouteHandler,
|
|
||||||
getHostHistoryRouteHandler,
|
|
||||||
} from '../../services/management/hosts.js';
|
} from '../../services/management/hosts.js';
|
||||||
|
|
||||||
// list of hosts
|
// list of hosts
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newHostRouteHandler(req, res);
|
newHostRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get host stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getHostStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get hosts history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getHostHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getHostRouteHandler(req, res);
|
getHostRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -8,8 +8,6 @@ import {
|
|||||||
getMaterialRouteHandler,
|
getMaterialRouteHandler,
|
||||||
editMaterialRouteHandler,
|
editMaterialRouteHandler,
|
||||||
newMaterialRouteHandler,
|
newMaterialRouteHandler,
|
||||||
getMaterialStatsRouteHandler,
|
|
||||||
getMaterialHistoryRouteHandler,
|
|
||||||
} from '../../services/management/materials.js';
|
} from '../../services/management/materials.js';
|
||||||
|
|
||||||
// list of materials
|
// list of materials
|
||||||
@ -36,16 +34,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newMaterialRouteHandler(req, res);
|
newMaterialRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get material stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getMaterialStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get materials history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getMaterialHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getMaterialRouteHandler(req, res);
|
getMaterialRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newNoteTypeRouteHandler,
|
newNoteTypeRouteHandler,
|
||||||
deleteNoteTypeRouteHandler,
|
deleteNoteTypeRouteHandler,
|
||||||
listNoteTypesByPropertiesRouteHandler,
|
listNoteTypesByPropertiesRouteHandler,
|
||||||
getNoteTypeStatsRouteHandler,
|
|
||||||
getNoteTypeHistoryRouteHandler,
|
|
||||||
} from '../../services/management/notetypes.js';
|
} from '../../services/management/notetypes.js';
|
||||||
|
|
||||||
// list of note types
|
// list of note types
|
||||||
@ -37,16 +35,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newNoteTypeRouteHandler(req, res);
|
newNoteTypeRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get note type stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getNoteTypeStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get notetypes history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getNoteTypeHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getNoteTypeRouteHandler(req, res);
|
getNoteTypeRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newPartRouteHandler,
|
newPartRouteHandler,
|
||||||
deletePartRouteHandler,
|
deletePartRouteHandler,
|
||||||
listPartsByPropertiesRouteHandler,
|
listPartsByPropertiesRouteHandler,
|
||||||
getPartStatsRouteHandler,
|
|
||||||
getPartHistoryRouteHandler,
|
|
||||||
} from '../../services/management/parts.js';
|
} from '../../services/management/parts.js';
|
||||||
|
|
||||||
// list of parts
|
// list of parts
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newPartRouteHandler(req, res);
|
newPartRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get part stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getPartStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get parts history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getPartHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getPartRouteHandler(req, res);
|
getPartRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newProductRouteHandler,
|
newProductRouteHandler,
|
||||||
deleteProductRouteHandler,
|
deleteProductRouteHandler,
|
||||||
listProductsByPropertiesRouteHandler,
|
listProductsByPropertiesRouteHandler,
|
||||||
getProductStatsRouteHandler,
|
|
||||||
getProductHistoryRouteHandler,
|
|
||||||
} from '../../services/management/products.js';
|
} from '../../services/management/products.js';
|
||||||
|
|
||||||
// list of products
|
// list of products
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newProductRouteHandler(req, res);
|
newProductRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get product stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getProductStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get products history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getProductHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getProductRouteHandler(req, res);
|
getProductRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newTaxRateRouteHandler,
|
newTaxRateRouteHandler,
|
||||||
deleteTaxRateRouteHandler,
|
deleteTaxRateRouteHandler,
|
||||||
listTaxRatesByPropertiesRouteHandler,
|
listTaxRatesByPropertiesRouteHandler,
|
||||||
getTaxRateStatsRouteHandler,
|
|
||||||
getTaxRateHistoryRouteHandler,
|
|
||||||
} from '../../services/management/taxrates.js';
|
} from '../../services/management/taxrates.js';
|
||||||
|
|
||||||
// list of tax rates
|
// list of tax rates
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newTaxRateRouteHandler(req, res);
|
newTaxRateRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get tax rate stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getTaxRateStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get tax rate history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getTaxRateHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getTaxRateRouteHandler(req, res);
|
getTaxRateRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newTaxRecordRouteHandler,
|
newTaxRecordRouteHandler,
|
||||||
deleteTaxRecordRouteHandler,
|
deleteTaxRecordRouteHandler,
|
||||||
listTaxRecordsByPropertiesRouteHandler,
|
listTaxRecordsByPropertiesRouteHandler,
|
||||||
getTaxRecordStatsRouteHandler,
|
|
||||||
getTaxRecordHistoryRouteHandler,
|
|
||||||
} from '../../services/management/taxrecords.js';
|
} from '../../services/management/taxrecords.js';
|
||||||
|
|
||||||
// list of tax records
|
// list of tax records
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newTaxRecordRouteHandler(req, res);
|
newTaxRecordRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get tax record stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getTaxRecordStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get tax record history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getTaxRecordHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getTaxRecordRouteHandler(req, res);
|
getTaxRecordRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -8,8 +8,6 @@ import {
|
|||||||
listUsersByPropertiesRouteHandler,
|
listUsersByPropertiesRouteHandler,
|
||||||
getUserRouteHandler,
|
getUserRouteHandler,
|
||||||
editUserRouteHandler,
|
editUserRouteHandler,
|
||||||
getUserStatsRouteHandler,
|
|
||||||
getUserHistoryRouteHandler,
|
|
||||||
} from '../../services/management/users.js';
|
} from '../../services/management/users.js';
|
||||||
|
|
||||||
// list of document templates
|
// list of document templates
|
||||||
@ -31,16 +29,6 @@ router.get('/properties', isAuthenticated, (req, res) => {
|
|||||||
listUsersByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
listUsersByPropertiesRouteHandler(req, res, properties, filter, masterFilter);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get user stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getUserStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get user history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getUserHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getUserRouteHandler(req, res);
|
getUserRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -10,8 +10,6 @@ import {
|
|||||||
newVendorRouteHandler,
|
newVendorRouteHandler,
|
||||||
deleteVendorRouteHandler,
|
deleteVendorRouteHandler,
|
||||||
listVendorsByPropertiesRouteHandler,
|
listVendorsByPropertiesRouteHandler,
|
||||||
getVendorStatsRouteHandler,
|
|
||||||
getVendorHistoryRouteHandler,
|
|
||||||
} from '../../services/management/vendors.js';
|
} from '../../services/management/vendors.js';
|
||||||
|
|
||||||
// list of vendors
|
// list of vendors
|
||||||
@ -33,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newVendorRouteHandler(req, res);
|
newVendorRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get vendor stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getVendorStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get vendors history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getVendorHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getVendorRouteHandler(req, res);
|
getVendorRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -6,11 +6,8 @@ import {
|
|||||||
editNoteRouteHandler,
|
editNoteRouteHandler,
|
||||||
newNoteRouteHandler,
|
newNoteRouteHandler,
|
||||||
deleteNoteRouteHandler,
|
deleteNoteRouteHandler,
|
||||||
listNotesByPropertiesRouteHandler,
|
|
||||||
getNoteStatsRouteHandler,
|
|
||||||
getNoteHistoryRouteHandler,
|
|
||||||
} from '../../services/misc/notes.js';
|
} from '../../services/misc/notes.js';
|
||||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
import { getFilter } from '../../utils.js';
|
||||||
|
|
||||||
const router = express.Router();
|
const router = express.Router();
|
||||||
|
|
||||||
@ -34,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newNoteRouteHandler(req, res);
|
newNoteRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get note stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getNoteStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get note history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getNoteHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getNoteRouteHandler(req, res);
|
getNoteRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import {
|
|||||||
newGCodeFileRouteHandler,
|
newGCodeFileRouteHandler,
|
||||||
listGCodeFilesByPropertiesRouteHandler,
|
listGCodeFilesByPropertiesRouteHandler,
|
||||||
getGCodeFileContentRouteHandler,
|
getGCodeFileContentRouteHandler,
|
||||||
getGCodeFileStatsRouteHandler,
|
|
||||||
} from '../../services/production/gcodefiles.js';
|
} from '../../services/production/gcodefiles.js';
|
||||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||||
|
|
||||||
@ -33,11 +32,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newGCodeFileRouteHandler(req, res);
|
newGCodeFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get gcodeFile stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getGCodeFileStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getGCodeFileRouteHandler(req, res);
|
getGCodeFileRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import {
|
|||||||
newJobRouteHandler,
|
newJobRouteHandler,
|
||||||
deleteJobRouteHandler,
|
deleteJobRouteHandler,
|
||||||
getJobStatsRouteHandler,
|
getJobStatsRouteHandler,
|
||||||
getJobHistoryRouteHandler,
|
|
||||||
} from '../../services/production/jobs.js';
|
} from '../../services/production/jobs.js';
|
||||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||||
|
|
||||||
@ -32,16 +31,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newJobRouteHandler(req, res);
|
newJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get job stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getJobStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get job history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getJobHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getJobRouteHandler(req, res);
|
getJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
@ -50,4 +39,9 @@ router.delete('/:id', isAuthenticated, async (req, res) => {
|
|||||||
deleteJobRouteHandler(req, res);
|
deleteJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// get printer stats
|
||||||
|
router.get('/stats', isAuthenticated, (req, res) => {
|
||||||
|
getJobStatsRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@ -9,7 +9,6 @@ import {
|
|||||||
newPrinterRouteHandler,
|
newPrinterRouteHandler,
|
||||||
getPrinterStatsRouteHandler,
|
getPrinterStatsRouteHandler,
|
||||||
listPrintersByPropertiesRouteHandler,
|
listPrintersByPropertiesRouteHandler,
|
||||||
getPrinterHistoryRouteHandler,
|
|
||||||
} from '../../services/production/printers.js';
|
} from '../../services/production/printers.js';
|
||||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||||
|
|
||||||
@ -33,11 +32,6 @@ router.post('/', isAuthenticated, (req, res) => {
|
|||||||
newPrinterRouteHandler(req, res);
|
newPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get printer history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getPrinterHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get printer stats
|
// get printer stats
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
router.get('/stats', isAuthenticated, (req, res) => {
|
||||||
getPrinterStatsRouteHandler(req, res);
|
getPrinterStatsRouteHandler(req, res);
|
||||||
|
|||||||
@ -6,8 +6,6 @@ import {
|
|||||||
listSubJobsRouteHandler,
|
listSubJobsRouteHandler,
|
||||||
listSubJobsByPropertiesRouteHandler,
|
listSubJobsByPropertiesRouteHandler,
|
||||||
getSubJobRouteHandler,
|
getSubJobRouteHandler,
|
||||||
getSubJobStatsRouteHandler,
|
|
||||||
getSubJobHistoryRouteHandler,
|
|
||||||
} from '../../services/production/subjobs.js';
|
} from '../../services/production/subjobs.js';
|
||||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||||
|
|
||||||
@ -26,16 +24,6 @@ router.get('/properties', isAuthenticated, (req, res) => {
|
|||||||
listSubJobsByPropertiesRouteHandler(req, res, properties, filter);
|
listSubJobsByPropertiesRouteHandler(req, res, properties, filter);
|
||||||
});
|
});
|
||||||
|
|
||||||
// get sub job stats
|
|
||||||
router.get('/stats', isAuthenticated, (req, res) => {
|
|
||||||
getSubJobStatsRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
// get sub job history
|
|
||||||
router.get('/history', isAuthenticated, (req, res) => {
|
|
||||||
getSubJobHistoryRouteHandler(req, res);
|
|
||||||
});
|
|
||||||
|
|
||||||
router.get('/:id', isAuthenticated, (req, res) => {
|
router.get('/:id', isAuthenticated, (req, res) => {
|
||||||
getSubJobRouteHandler(req, res);
|
getSubJobRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { filamentStockModel } from '../../database/schemas/inventory/filamentstock.schema.js';
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Filament Stocks');
|
const logger = log4js.getLogger('Filament Stocks');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listFilamentStocksRouteHandler = async (
|
export const listFilamentStocksRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -157,25 +157,3 @@ export const deleteFilamentStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFilamentStockStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: filamentStockModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching filament stock stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Filament stock stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getFilamentStockHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: filamentStockModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching filament stock history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Filament stock history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { orderItemModel } from '../../database/schemas/inventory/orderitem.schema.js';
|
import { orderItemModel } from '../../schemas/inventory/orderitem.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Order Items');
|
const logger = log4js.getLogger('Order Items');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listOrderItemsRouteHandler = async (
|
export const listOrderItemsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -45,11 +45,11 @@ export const listOrderItemsRouteHandler = async (
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'item',
|
path: 'item',
|
||||||
populate: { path: 'costTaxRate', strictPopulate: false },
|
populate: { path: 'costTaxRate' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'item',
|
path: 'item',
|
||||||
populate: { path: 'priceTaxRate', strictPopulate: false },
|
populate: { path: 'priceTaxRate' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
@ -104,13 +104,11 @@ export const getOrderItemRouteHandler = async (req, res) => {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'item',
|
path: 'item',
|
||||||
populate: { path: 'costTaxRate', strictPopulate: false },
|
populate: { path: 'costTaxRate' },
|
||||||
strictPopulate: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
path: 'item',
|
path: 'item',
|
||||||
populate: { path: 'priceTaxRate', strictPopulate: false },
|
populate: { path: 'priceTaxRate' },
|
||||||
strictPopulate: false,
|
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
@ -130,10 +128,9 @@ export const editOrderItemRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
const updateData = {
|
const updateData = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
purchaseOrder: req.body.purchaseOrder,
|
||||||
itemType: req.body.itemType,
|
itemType: req.body.itemType,
|
||||||
item: req.body.item,
|
item: req.body.item,
|
||||||
orderType: req.body.orderType,
|
|
||||||
order: req.body.order,
|
|
||||||
syncAmount: req.body.syncAmount,
|
syncAmount: req.body.syncAmount,
|
||||||
itemAmount: req.body.itemAmount,
|
itemAmount: req.body.itemAmount,
|
||||||
quantity: req.body.quantity,
|
quantity: req.body.quantity,
|
||||||
@ -164,7 +161,6 @@ export const newOrderItemRouteHandler = async (req, res) => {
|
|||||||
const newData = {
|
const newData = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
purchaseOrder: req.body.purchaseOrder,
|
purchaseOrder: req.body.purchaseOrder,
|
||||||
state: { type: 'draft' },
|
|
||||||
itemType: req.body.itemType,
|
itemType: req.body.itemType,
|
||||||
item: req.body.item,
|
item: req.body.item,
|
||||||
orderType: req.body.orderType,
|
orderType: req.body.orderType,
|
||||||
@ -211,25 +207,3 @@ export const deleteOrderItemRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getOrderItemStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: orderItemModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching order item stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Order item stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getOrderItemHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: orderItemModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching order item history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Order item history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { partStockModel } from '../../database/schemas/inventory/partstock.schema.js';
|
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Part Stocks');
|
const logger = log4js.getLogger('Part Stocks');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPartStocksRouteHandler = async (
|
export const listPartStocksRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -157,25 +157,3 @@ export const deletePartStockRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getPartStockStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: partStockModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching part stock stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Part stock stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getPartStockHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: partStockModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching part stock history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Part stock history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { purchaseOrderModel } from '../../database/schemas/inventory/purchaseorder.schema.js';
|
import { purchaseOrderModel } from '../../schemas/inventory/purchaseorder.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,12 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Purchase Orders');
|
const logger = log4js.getLogger('Purchase Orders');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPurchaseOrdersRouteHandler = async (
|
export const listPurchaseOrdersRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -79,7 +78,7 @@ export const getPurchaseOrderRouteHandler = async (req, res) => {
|
|||||||
const result = await getObject({
|
const result = await getObject({
|
||||||
model: purchaseOrderModel,
|
model: purchaseOrderModel,
|
||||||
id,
|
id,
|
||||||
populate: ['vendor'],
|
populate: ['vendor', 'items.item', 'items.taxRate'],
|
||||||
});
|
});
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
logger.warn(`Purchase Order not found with supplied id.`);
|
logger.warn(`Purchase Order not found with supplied id.`);
|
||||||
@ -98,6 +97,8 @@ export const editPurchaseOrderRouteHandler = async (req, res) => {
|
|||||||
const updateData = {
|
const updateData = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
vendor: req.body.vendor,
|
vendor: req.body.vendor,
|
||||||
|
items: req.body.items,
|
||||||
|
cost: req.body.cost,
|
||||||
};
|
};
|
||||||
// Create audit log before updating
|
// Create audit log before updating
|
||||||
const result = await editObject({
|
const result = await editObject({
|
||||||
@ -122,6 +123,8 @@ export const newPurchaseOrderRouteHandler = async (req, res) => {
|
|||||||
const newData = {
|
const newData = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
vendor: req.body.vendor,
|
vendor: req.body.vendor,
|
||||||
|
items: req.body.items,
|
||||||
|
cost: req.body.cost,
|
||||||
};
|
};
|
||||||
const result = await newObject({
|
const result = await newObject({
|
||||||
model: purchaseOrderModel,
|
model: purchaseOrderModel,
|
||||||
@ -158,25 +161,3 @@ export const deletePurchaseOrderRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getPurchaseOrderStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: purchaseOrderModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching purchase order stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Purchase order stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getPurchaseOrderHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: purchaseOrderModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching purchase order history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Purchase order history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { shipmentModel } from '../../database/schemas/inventory/shipment.schema.js';
|
import { shipmentModel } from '../../schemas/inventory/shipment.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Shipments');
|
const logger = log4js.getLogger('Shipments');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listShipmentsRouteHandler = async (
|
export const listShipmentsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -175,25 +175,3 @@ export const deleteShipmentRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getShipmentStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: shipmentModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching shipment stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Shipment stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getShipmentHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: shipmentModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching shipment history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Shipment history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,12 +1,13 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { stockAuditModel } from '../../database/schemas/inventory/stockaudit.schema.js';
|
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { getAuditLogs } from '../../utils.js';
|
import { getAuditLogs } from '../../utils.js';
|
||||||
import { getModelStats, getModelHistory } from '../../database/database.js';
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Stock Audits');
|
const logger = log4js.getLogger('Stock Audits');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listStockAuditsRouteHandler = async (
|
export const listStockAuditsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -167,25 +168,3 @@ export const deleteStockAuditRouteHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getStockAuditStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: stockAuditModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching stock audit stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Stock audit stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getStockAuditHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: stockAuditModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching stock audit history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Stock audit history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { stockEventModel } from '../../database/schemas/inventory/stockevent.schema.js';
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Stock Events');
|
const logger = log4js.getLogger('Stock Events');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listStockEventsRouteHandler = async (
|
export const listStockEventsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -165,25 +165,3 @@ export const deleteStockEventRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getStockEventStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: stockEventModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching stock event stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Stock event stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getStockEventHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: stockEventModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching stock event history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Stock event history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { auditLogModel } from '../../database/schemas/management/auditlog.schema.js';
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { getModelStats, getModelHistory } from '../../database/database.js';
|
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
const logger = log4js.getLogger('AuditLogs');
|
const logger = log4js.getLogger('AuditLogs');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listAuditLogsRouteHandler = async (
|
export const listAuditLogsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -88,25 +88,3 @@ export const getAuditLogRouteHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getAuditLogStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: auditLogModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching audit log stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Audit log stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getAuditLogHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: auditLogModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching audit log history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Audit log history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { courierModel } from '../../database/schemas/management/courier.schema.js';
|
import { courierModel } from '../../schemas/management/courier.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Couriers');
|
const logger = log4js.getLogger('Couriers');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listCouriersRouteHandler = async (
|
export const listCouriersRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -162,25 +162,3 @@ export const deleteCourierRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getCourierStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: courierModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching courier stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Courier stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getCourierHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: courierModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching courier history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Courier history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { courierServiceModel } from '../../database/schemas/management/courierservice.schema.js';
|
import { courierServiceModel } from '../../schemas/management/courierservice.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('CourierServices');
|
const logger = log4js.getLogger('CourierServices');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listCourierServicesRouteHandler = async (
|
export const listCourierServicesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -165,25 +165,3 @@ export const deleteCourierServiceRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getCourierServiceStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: courierServiceModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching courier service stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Courier service stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getCourierServiceHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: courierServiceModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching courier service history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Courier service history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { documentJobModel } from '../../database/schemas/management/documentjob.schema.js';
|
import { documentJobModel } from '../../schemas/management/documentjob.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Document Jobs');
|
const logger = log4js.getLogger('Document Jobs');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listDocumentJobsRouteHandler = async (
|
export const listDocumentJobsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -158,25 +158,3 @@ export const deleteDocumentJobRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDocumentJobStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: documentJobModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document job stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document job stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDocumentJobHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: documentJobModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document job history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document job history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { documentPrinterModel } from '../../database/schemas/management/documentprinter.schema.js';
|
import { documentPrinterModel } from '../../schemas/management/documentprinter.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Document Templates');
|
const logger = log4js.getLogger('Document Templates');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listDocumentPrintersRouteHandler = async (
|
export const listDocumentPrintersRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -167,25 +167,3 @@ export const deleteDocumentPrinterRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDocumentPrinterStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: documentPrinterModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document printer stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document printer stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDocumentPrinterHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: documentPrinterModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document printer history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document printer history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { documentSizeModel } from '../../database/schemas/management/documentsize.schema.js';
|
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Document Sizes');
|
const logger = log4js.getLogger('Document Sizes');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listDocumentSizesRouteHandler = async (
|
export const listDocumentSizesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -158,25 +158,3 @@ export const deleteDocumentSizeRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDocumentSizeStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: documentSizeModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document size stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document size stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDocumentSizeHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: documentSizeModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document size history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document size history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { documentTemplateModel } from '../../database/schemas/management/documenttemplate.schema.js';
|
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Document Templates');
|
const logger = log4js.getLogger('Document Templates');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listDocumentTemplatesRouteHandler = async (
|
export const listDocumentTemplatesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -184,25 +184,3 @@ export const deleteDocumentTemplateRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getDocumentTemplateStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: documentTemplateModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document template stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document template stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDocumentTemplateHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: documentTemplateModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching document template history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Document template history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv, { populate } from 'dotenv';
|
||||||
import { filamentModel } from '../../database/schemas/management/filament.schema.js';
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -8,12 +8,11 @@ import {
|
|||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
const logger = log4js.getLogger('Filaments');
|
const logger = log4js.getLogger('Filaments');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listFilamentsRouteHandler = async (
|
export const listFilamentsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -35,7 +34,7 @@ export const listFilamentsRouteHandler = async (
|
|||||||
search,
|
search,
|
||||||
sort,
|
sort,
|
||||||
order,
|
order,
|
||||||
populate: ['vendor', 'costTaxRate'],
|
populate: ['vendor'],
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
@ -76,7 +75,7 @@ export const getFilamentRouteHandler = async (req, res) => {
|
|||||||
const result = await getObject({
|
const result = await getObject({
|
||||||
model: filamentModel,
|
model: filamentModel,
|
||||||
id,
|
id,
|
||||||
populate: ['vendor', 'costTaxRate'],
|
populate: 'vendor',
|
||||||
});
|
});
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
@ -102,8 +101,6 @@ export const editFilamentRouteHandler = async (req, res) => {
|
|||||||
vendor: req.body.vendor,
|
vendor: req.body.vendor,
|
||||||
type: req.body.type,
|
type: req.body.type,
|
||||||
cost: req.body.cost,
|
cost: req.body.cost,
|
||||||
costTaxRate: req.body.costTaxRate,
|
|
||||||
costWithTax: req.body.costWithTax,
|
|
||||||
diameter: req.body.diameter,
|
diameter: req.body.diameter,
|
||||||
density: req.body.density,
|
density: req.body.density,
|
||||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
@ -138,8 +135,6 @@ export const newFilamentRouteHandler = async (req, res) => {
|
|||||||
vendor: req.body.vendor,
|
vendor: req.body.vendor,
|
||||||
type: req.body.type,
|
type: req.body.type,
|
||||||
cost: req.body.cost,
|
cost: req.body.cost,
|
||||||
costTaxRate: req.body.costTaxRate,
|
|
||||||
costWithTax: req.body.costWithTax,
|
|
||||||
diameter: req.body.diameter,
|
diameter: req.body.diameter,
|
||||||
density: req.body.density,
|
density: req.body.density,
|
||||||
emptySpoolWeight: req.body.emptySpoolWeight,
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
@ -159,25 +154,3 @@ export const newFilamentRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFilamentStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: filamentModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching filament stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Filament stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getFilamentHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: filamentModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching filament history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Filament history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { fileModel } from '../../database/schemas/management/file.schema.js';
|
import { fileModel } from '../../schemas/management/file.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import multer from 'multer';
|
import multer from 'multer';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
@ -13,19 +13,18 @@ import {
|
|||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
flushFile,
|
flushFile,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
import {
|
import {
|
||||||
uploadFile,
|
uploadFile,
|
||||||
downloadFile,
|
downloadFile,
|
||||||
deleteFile as deleteCephFile,
|
deleteFile as deleteCephFile,
|
||||||
BUCKETS,
|
BUCKETS,
|
||||||
} from '../../database/ceph.js';
|
} from '../storage/ceph.js';
|
||||||
import { getFileMeta } from '../../utils.js';
|
import { getFileMeta } from '../../utils.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Files');
|
const logger = log4js.getLogger('Files');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Set storage engine to memory for Ceph upload
|
// Set storage engine to memory for Ceph upload
|
||||||
const fileStorage = multer.memoryStorage();
|
const fileStorage = multer.memoryStorage();
|
||||||
@ -348,7 +347,10 @@ export const getFileContentRouteHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to local file system for backward compatibility
|
// Fallback to local file system for backward compatibility
|
||||||
const filePath = path.join(config.storage.fileStorage, file.fileName || file.name);
|
const filePath = path.join(
|
||||||
|
process.env.FILE_STORAGE || './uploads',
|
||||||
|
file.fileName || file.name
|
||||||
|
);
|
||||||
|
|
||||||
// Read the file
|
// Read the file
|
||||||
fs.readFile(filePath, (err, data) => {
|
fs.readFile(filePath, (err, data) => {
|
||||||
@ -419,25 +421,3 @@ export const parseFileHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFileStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: fileModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching file stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('File stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getFileHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: fileModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching file history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('File history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { hostModel } from '../../database/schemas/management/host.schema.js';
|
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Hosts');
|
const logger = log4js.getLogger('Hosts');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listHostsRouteHandler = async (
|
export const listHostsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -159,25 +159,3 @@ export const deleteHostRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getHostStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: hostModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching host stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Host stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getHostHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: hostModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching host history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Host history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { materialModel } from '../../database/schemas/management/material.schema.js';
|
import { materialModel } from '../../schemas/management/material.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { getModelStats, getModelHistory } from '../../database/database.js';
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Materials');
|
const logger = log4js.getLogger('Materials');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listMaterialsRouteHandler = async (
|
export const listMaterialsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -127,25 +128,3 @@ export const newMaterialRouteHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getMaterialStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: materialModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching material stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Material stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getMaterialHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: materialModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching material history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Material history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
|
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Note Types');
|
const logger = log4js.getLogger('Note Types');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listNoteTypesRouteHandler = async (
|
export const listNoteTypesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -158,25 +158,3 @@ export const deleteNoteTypeRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getNoteTypeStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: noteTypeModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching note type stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Note type stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getNoteTypeHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: noteTypeModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching note type history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Note type history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { partModel } from '../../database/schemas/management/part.schema.js';
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Parts');
|
const logger = log4js.getLogger('Parts');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPartsRouteHandler = async (
|
export const listPartsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -53,20 +53,7 @@ export const listPartsByPropertiesRouteHandler = async (req, res, properties = '
|
|||||||
model: partModel,
|
model: partModel,
|
||||||
properties,
|
properties,
|
||||||
filter,
|
filter,
|
||||||
populate: [
|
populate: ['vendor', 'priceTaxRate', 'costTaxRate'],
|
||||||
{
|
|
||||||
path: 'vendor',
|
|
||||||
from: 'vendors',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'priceTaxRate',
|
|
||||||
from: 'taxrates',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: 'costTaxRate',
|
|
||||||
from: 'taxrates',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
@ -184,25 +171,3 @@ export const deletePartRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getPartStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: partModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching part stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Part stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getPartHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: partModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching part history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Part history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { productModel } from '../../database/schemas/management/product.schema.js';
|
import { productModel } from '../../schemas/management/product.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Products');
|
const logger = log4js.getLogger('Products');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listProductsRouteHandler = async (
|
export const listProductsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -170,25 +170,3 @@ export const deleteProductRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getProductStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: productModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching product stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Product stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getProductHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: productModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching product history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Product history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,25 +1,26 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { jobModel } from '../../database/schemas/production/job.schema.js';
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import { printerModel } from '../../database/schemas/production/printer.schema.js';
|
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||||
import { filamentModel } from '../../database/schemas/management/filament.schema.js';
|
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||||
import { gcodeFileModel } from '../../database/schemas/production/gcodefile.schema.js';
|
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||||
import { partModel } from '../../database/schemas/management/part.schema.js';
|
import { partModel } from '../../schemas/management/part.schema.js';
|
||||||
import { productModel } from '../../database/schemas/management/product.schema.js';
|
import { productModel } from '../../schemas/management/product.schema.js';
|
||||||
import { vendorModel } from '../../database/schemas/management/vendor.schema.js';
|
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||||
import { filamentStockModel } from '../../database/schemas/inventory/filamentstock.schema.js';
|
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||||
import { stockEventModel } from '../../database/schemas/inventory/stockevent.schema.js';
|
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||||
import { stockAuditModel } from '../../database/schemas/inventory/stockaudit.schema.js';
|
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||||
import { partStockModel } from '../../database/schemas/inventory/partstock.schema.js';
|
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||||
import { auditLogModel } from '../../database/schemas/management/auditlog.schema.js';
|
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||||
import { userModel } from '../../database/schemas/management/user.schema.js';
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
|
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||||
import { noteModel } from '../../database/schemas/misc/note.schema.js';
|
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Jobs');
|
const logger = log4js.getLogger('Jobs');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Map prefixes to models and id fields
|
// Map prefixes to models and id fields
|
||||||
const PREFIX_MODEL_MAP = {
|
const PREFIX_MODEL_MAP = {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { taxRateModel } from '../../database/schemas/management/taxrates.schema.js';
|
import { taxRateModel } from '../../schemas/management/taxrates.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('TaxRates');
|
const logger = log4js.getLogger('TaxRates');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listTaxRatesRouteHandler = async (
|
export const listTaxRatesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -166,25 +166,3 @@ export const deleteTaxRateRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTaxRateStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: taxRateModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching tax rate stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Tax rate stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTaxRateHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: taxRateModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching tax rate history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Tax rate history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { taxRecordModel } from '../../database/schemas/management/taxrecord.schema.js';
|
import { taxRecordModel } from '../../schemas/management/taxrecord.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('TaxRecords');
|
const logger = log4js.getLogger('TaxRecords');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listTaxRecordsRouteHandler = async (
|
export const listTaxRecordsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -162,25 +162,3 @@ export const deleteTaxRecordRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTaxRecordStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: taxRecordModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching tax record stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Tax record stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getTaxRecordHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: taxRecordModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching tax record history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Tax record history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { userModel } from '../../database/schemas/management/user.schema.js';
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -7,12 +7,12 @@ import {
|
|||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getObject,
|
getObject,
|
||||||
editObject,
|
editObject,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Users');
|
const logger = log4js.getLogger('Users');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listUsersRouteHandler = async (
|
export const listUsersRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -117,25 +117,3 @@ export const editUserRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getUserStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: userModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching user stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('User stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getUserHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: userModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching user history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('User history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { vendorModel } from '../../database/schemas/management/vendor.schema.js';
|
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import {
|
import {
|
||||||
@ -9,11 +9,11 @@ import {
|
|||||||
editObject,
|
editObject,
|
||||||
newObject,
|
newObject,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Vendors');
|
const logger = log4js.getLogger('Vendors');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listVendorsRouteHandler = async (
|
export const listVendorsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -166,25 +166,3 @@ export const deleteVendorRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getVendorStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: vendorModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching vendor stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Vendor stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getVendorHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: vendorModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching vendor history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Vendor history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,15 +1,16 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { keycloak } from '../../keycloak.js';
|
import { keycloak } from '../../keycloak.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import axios from 'axios';
|
import axios from 'axios';
|
||||||
import { userModel } from '../../database/schemas/management/user.schema.js';
|
import { userModel } from '../../schemas/management/user.schema.js';
|
||||||
import { readFileSync } from 'fs';
|
import { readFileSync } from 'fs';
|
||||||
import { resolve } from 'path';
|
import { resolve } from 'path';
|
||||||
import NodeCache from 'node-cache';
|
import NodeCache from 'node-cache';
|
||||||
import jwt from 'jsonwebtoken';
|
import jwt from 'jsonwebtoken';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Auth');
|
const logger = log4js.getLogger('Auth');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Initialize NodeCache with 5-minute TTL for token-based user lookup
|
// Initialize NodeCache with 5-minute TTL for token-based user lookup
|
||||||
const tokenUserCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
const tokenUserCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
|
||||||
@ -31,16 +32,16 @@ const lookupUserByToken = async (token) => {
|
|||||||
// Check cache first
|
// Check cache first
|
||||||
const cachedUser = tokenUserCache.get(token);
|
const cachedUser = tokenUserCache.get(token);
|
||||||
if (cachedUser) {
|
if (cachedUser) {
|
||||||
logger.trace(`User found in token cache for token: ${token.substring(0, 20)}...`);
|
logger.debug(`User found in token cache for token: ${token.substring(0, 20)}...`);
|
||||||
return cachedUser;
|
return cachedUser;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not in cache, decode token and lookup user
|
// If not in cache, decode token and lookup user
|
||||||
logger.trace(`User not in token cache, decoding token: ${token.substring(0, 20)}...`);
|
logger.debug(`User not in token cache, decoding token: ${token.substring(0, 20)}...`);
|
||||||
const decodedToken = jwt.decode(token);
|
const decodedToken = jwt.decode(token);
|
||||||
|
|
||||||
if (!decodedToken || !decodedToken.preferred_username) {
|
if (!decodedToken || !decodedToken.preferred_username) {
|
||||||
logger.trace('Invalid token or missing preferred_username');
|
logger.warn('Invalid token or missing preferred_username');
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,7 +51,7 @@ const lookupUserByToken = async (token) => {
|
|||||||
if (user) {
|
if (user) {
|
||||||
// Store in cache using token as key
|
// Store in cache using token as key
|
||||||
tokenUserCache.set(token, user);
|
tokenUserCache.set(token, user);
|
||||||
logger.trace(`User stored in token cache for token: ${token.substring(0, 20)}...`);
|
logger.debug(`User stored in token cache for token: ${token.substring(0, 20)}...`);
|
||||||
return user;
|
return user;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -83,27 +84,27 @@ export const loginRouteHandler = (req, res, redirectType = 'web') => {
|
|||||||
const redirectUrl = req.query.redirect_uri || '/production/overview';
|
const redirectUrl = req.query.redirect_uri || '/production/overview';
|
||||||
|
|
||||||
// Store the original URL to redirect after login
|
// Store the original URL to redirect after login
|
||||||
const authUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/auth`;
|
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
|
||||||
const callBackState = `/auth/${redirectType}/callback`;
|
const callBackState = `/auth/${redirectType}/callback`;
|
||||||
const callbackUrl = `${config.app.urlApi}${callBackState}`;
|
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
|
||||||
const state = encodeURIComponent(redirectUrl);
|
const state = encodeURIComponent(redirectUrl);
|
||||||
|
|
||||||
logger.warn(req.query.redirect_uri);
|
logger.warn(req.query.redirect_uri);
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${authUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
|
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Function to fetch user from Keycloak and store in database and session
|
// Function to fetch user from Keycloak and store in database and session
|
||||||
const fetchAndStoreUser = async (req, token) => {
|
const fetchAndStoreUser = async (req, token) => {
|
||||||
const userInfoUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/userinfo`;
|
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await axios.post(
|
const response = await axios.post(
|
||||||
userInfoUrl,
|
userInfoUrl,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
client_id: config.auth.keycloak.clientId,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
@ -153,14 +154,14 @@ export const loginTokenRouteHandler = async (req, res, redirectType = 'web') =>
|
|||||||
// Otherwise, start the request and store the promise
|
// Otherwise, start the request and store the promise
|
||||||
const tokenPromise = (async () => {
|
const tokenPromise = (async () => {
|
||||||
const callBackState = `/auth/${redirectType}/callback`;
|
const callBackState = `/auth/${redirectType}/callback`;
|
||||||
const callbackUrl = `${config.app.urlApi}${callBackState}`;
|
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
|
||||||
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
|
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||||
|
|
||||||
const response = await axios.post(
|
const response = await axios.post(
|
||||||
tokenUrl,
|
tokenUrl,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
grant_type: 'authorization_code',
|
grant_type: 'authorization_code',
|
||||||
client_id: config.auth.keycloak.clientId,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
code: code,
|
code: code,
|
||||||
redirect_uri: callbackUrl,
|
redirect_uri: callbackUrl,
|
||||||
@ -210,16 +211,16 @@ export const loginCallbackRouteHandler = async (req, res, redirectType = 'web')
|
|||||||
var appUrl;
|
var appUrl;
|
||||||
switch (redirectType) {
|
switch (redirectType) {
|
||||||
case 'web':
|
case 'web':
|
||||||
appUrl = config.app.urlClient;
|
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
|
||||||
break;
|
break;
|
||||||
case 'app-scheme':
|
case 'app-scheme':
|
||||||
appUrl = 'farmcontrol://app';
|
appUrl = 'farmcontrol://app';
|
||||||
break;
|
break;
|
||||||
case 'app-localhost':
|
case 'app-localhost':
|
||||||
appUrl = config.app.devAuthClient;
|
appUrl = process.env.APP_DEV_AUTH_CLIENT || 'http://localhost:3500';
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
appUrl = config.app.urlClient;
|
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
const redirectUriRaw = `${appUrl}${state}`;
|
const redirectUriRaw = `${appUrl}${state}`;
|
||||||
@ -324,12 +325,12 @@ export const logoutRouteHandler = (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Construct the Keycloak logout URL with the redirect URI
|
// Construct the Keycloak logout URL with the redirect URI
|
||||||
const logoutUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/logout`;
|
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
|
||||||
const encodedRedirectUri = encodeURIComponent(`${config.app.urlClient}${redirectUrl}`);
|
const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
|
||||||
|
|
||||||
// Redirect to Keycloak logout with the redirect URI
|
// Redirect to Keycloak logout with the redirect URI
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${logoutUrl}?client_id=${config.auth.keycloak.clientId}&post_logout_redirect_uri=${encodedRedirectUri}`
|
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@ -364,21 +365,21 @@ export const getUserInfoHandler = (req, res) => {
|
|||||||
// Register route - Since we're using Keycloak, registration should be handled there
|
// Register route - Since we're using Keycloak, registration should be handled there
|
||||||
// This endpoint will redirect to Keycloak's registration page
|
// This endpoint will redirect to Keycloak's registration page
|
||||||
export const registerRouteHandler = (req, res) => {
|
export const registerRouteHandler = (req, res) => {
|
||||||
const registrationUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/registrations`;
|
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
|
||||||
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
|
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${registrationUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
|
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
// Forgot password handler - redirect to Keycloak's reset password page
|
// Forgot password handler - redirect to Keycloak's reset password page
|
||||||
export const forgotPasswordRouteHandler = (req, res) => {
|
export const forgotPasswordRouteHandler = (req, res) => {
|
||||||
const resetUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/login-actions/reset-credentials`;
|
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
|
||||||
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
|
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
|
||||||
|
|
||||||
res.redirect(
|
res.redirect(
|
||||||
`${resetUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
|
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -393,14 +394,14 @@ export const refreshTokenRouteHandler = (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const refreshToken = req.session['keycloak-token'].refresh_token;
|
const refreshToken = req.session['keycloak-token'].refresh_token;
|
||||||
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
|
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
|
||||||
|
|
||||||
axios
|
axios
|
||||||
.post(
|
.post(
|
||||||
tokenUrl,
|
tokenUrl,
|
||||||
new URLSearchParams({
|
new URLSearchParams({
|
||||||
grant_type: 'refresh_token',
|
grant_type: 'refresh_token',
|
||||||
client_id: config.auth.keycloak.clientId,
|
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||||
refresh_token: refreshToken,
|
refresh_token: refreshToken,
|
||||||
}).toString(),
|
}).toString(),
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { models } from '../../database/schemas/models.js';
|
import { models } from '../../schemas/models.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get all models from the PREFIX_MODEL_MAP
|
* Get all models from the PREFIX_MODEL_MAP
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { noteModel } from '../../database/schemas/misc/note.schema.js';
|
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import {
|
||||||
deleteObject,
|
deleteObject,
|
||||||
@ -9,13 +9,13 @@ import {
|
|||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
newObject,
|
newObject,
|
||||||
recursivelyDeleteChildObjects,
|
recursivelyDeleteChildObjects,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Notes');
|
const logger = log4js.getLogger('Notes');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listNotesRouteHandler = async (
|
export const listNotesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -73,7 +73,7 @@ export const getNoteRouteHandler = async (req, res) => {
|
|||||||
const result = await getObject({
|
const result = await getObject({
|
||||||
model: noteModel,
|
model: noteModel,
|
||||||
id,
|
id,
|
||||||
populate: ['noteType', 'user', 'parent'],
|
populate: ['noteType', 'user'],
|
||||||
});
|
});
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
logger.warn(`Note not found with supplied id.`);
|
logger.warn(`Note not found with supplied id.`);
|
||||||
@ -161,25 +161,3 @@ export const deleteNoteRouteHandler = async (req, res) => {
|
|||||||
status: 'ok',
|
status: 'ok',
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getNoteStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: noteModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching note stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Note stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getNoteHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: noteModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching note history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Note history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { getAllModels, getModelByPrefix } from './model.js';
|
import { getAllModels, getModelByPrefix } from './model.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Spotlight');
|
const logger = log4js.getLogger('Spotlight');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Helper function to build search filter from query parameters
|
// Helper function to build search filter from query parameters
|
||||||
const buildSearchFilter = (params) => {
|
const buildSearchFilter = (params) => {
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { gcodeFileModel } from '../../database/schemas/production/gcodefile.schema.js';
|
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import {
|
||||||
deleteObject,
|
deleteObject,
|
||||||
@ -12,8 +12,10 @@ import {
|
|||||||
import { getFileContentRouteHandler } from '../management/files.js';
|
import { getFileContentRouteHandler } from '../management/files.js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('GCodeFiles');
|
const logger = log4js.getLogger('GCodeFiles');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listGCodeFilesRouteHandler = async (
|
export const listGCodeFilesRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
import { jobModel } from '../../database/schemas/production/job.schema.js';
|
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import {
|
||||||
deleteObject,
|
deleteObject,
|
||||||
@ -8,12 +8,12 @@ import {
|
|||||||
listObjects,
|
listObjects,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
newObject,
|
newObject,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Jobs');
|
const logger = log4js.getLogger('Jobs');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listJobsRouteHandler = async (
|
export const listJobsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -161,24 +161,26 @@ export const deleteJobRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getJobStatsRouteHandler = async (req, res) => {
|
export const getJobStatsRouteHandler = async (req, res) => {
|
||||||
console.log('Getting job stats');
|
try {
|
||||||
const result = await getModelStats({ model: jobModel });
|
const stats = await jobModel.aggregate([
|
||||||
if (result?.error) {
|
{
|
||||||
logger.error('Error fetching job stats:', result.error);
|
$group: {
|
||||||
return res.status(result.code).send(result);
|
_id: '$state.type',
|
||||||
}
|
count: { $sum: 1 },
|
||||||
logger.trace('Job stats:', result);
|
},
|
||||||
res.send(result);
|
},
|
||||||
};
|
]);
|
||||||
|
|
||||||
export const getJobHistoryRouteHandler = async (req, res) => {
|
// Transform the results into a more readable format
|
||||||
const from = req.query.from;
|
const formattedStats = stats.reduce((acc, curr) => {
|
||||||
const to = req.query.to;
|
acc[curr._id] = curr.count;
|
||||||
const result = await getModelHistory({ model: jobModel, from, to });
|
return acc;
|
||||||
if (result?.error) {
|
}, {});
|
||||||
logger.error('Error fetching job history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
logger.trace('Print job stats by state:', formattedStats);
|
||||||
|
res.send(formattedStats);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching print job stats:', error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
logger.trace('Job history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { printerModel } from '../../database/schemas/production/printer.schema.js';
|
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import {
|
||||||
deleteObject,
|
deleteObject,
|
||||||
@ -8,13 +8,13 @@ import {
|
|||||||
listObjects,
|
listObjects,
|
||||||
listObjectsByProperties,
|
listObjectsByProperties,
|
||||||
newObject,
|
newObject,
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
} from '../../database/database.js';
|
||||||
import mongoose from 'mongoose';
|
import mongoose from 'mongoose';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Printers');
|
const logger = log4js.getLogger('Printers');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listPrintersRouteHandler = async (
|
export const listPrintersRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -169,24 +169,26 @@ export const deletePrinterRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getPrinterStatsRouteHandler = async (req, res) => {
|
export const getPrinterStatsRouteHandler = async (req, res) => {
|
||||||
const result = await getModelStats({ model: printerModel });
|
try {
|
||||||
console.log(result);
|
const stats = await printerModel.aggregate([
|
||||||
if (!result) {
|
{
|
||||||
logger.error('Error fetching printer stats:', result.error);
|
$group: {
|
||||||
return res.status(result.code).send(result);
|
_id: '$state.type',
|
||||||
}
|
count: { $sum: 1 },
|
||||||
logger.trace('Printer stats:', result);
|
},
|
||||||
res.send(result);
|
},
|
||||||
};
|
]);
|
||||||
|
|
||||||
export const getPrinterHistoryRouteHandler = async (req, res) => {
|
// Transform the results into a more readable format
|
||||||
const from = req.query.from;
|
const formattedStats = stats.reduce((acc, curr) => {
|
||||||
const to = req.query.to;
|
acc[curr._id] = curr.count;
|
||||||
const result = await getModelHistory({ model: printerModel, from, to });
|
return acc;
|
||||||
if (result?.error) {
|
}, {});
|
||||||
logger.error('Error fetching printer history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
logger.trace('Printer stats by state:', formattedStats);
|
||||||
|
res.send(formattedStats);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error fetching printer stats:', error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
logger.trace('Printer history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,15 +1,11 @@
|
|||||||
import config from '../../config.js';
|
import dotenv from 'dotenv';
|
||||||
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
|
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import {
|
import { getObject, listObjects, listObjectsByProperties } from '../../database/database.js';
|
||||||
getObject,
|
dotenv.config();
|
||||||
listObjects,
|
|
||||||
listObjectsByProperties,
|
|
||||||
getModelStats,
|
|
||||||
getModelHistory,
|
|
||||||
} from '../../database/database.js';
|
|
||||||
const logger = log4js.getLogger('Sub Jobs');
|
const logger = log4js.getLogger('Sub Jobs');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listSubJobsRouteHandler = async (
|
export const listSubJobsRouteHandler = async (
|
||||||
req,
|
req,
|
||||||
@ -31,7 +27,7 @@ export const listSubJobsRouteHandler = async (
|
|||||||
search,
|
search,
|
||||||
sort,
|
sort,
|
||||||
order,
|
order,
|
||||||
populate: ['printer', 'job'],
|
populate: ['printer'],
|
||||||
});
|
});
|
||||||
|
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
@ -73,7 +69,6 @@ export const getSubJobRouteHandler = async (req, res) => {
|
|||||||
const result = await getObject({
|
const result = await getObject({
|
||||||
model: subJobModel,
|
model: subJobModel,
|
||||||
id,
|
id,
|
||||||
populate: ['printer'],
|
|
||||||
});
|
});
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
logger.warn(`Sub job not found with supplied id.`);
|
logger.warn(`Sub job not found with supplied id.`);
|
||||||
@ -82,25 +77,3 @@ export const getSubJobRouteHandler = async (req, res) => {
|
|||||||
logger.debug(`Retreived sub job with ID: ${id}`);
|
logger.debug(`Retreived sub job with ID: ${id}`);
|
||||||
res.send(result);
|
res.send(result);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getSubJobStatsRouteHandler = async (req, res) => {
|
|
||||||
const result = await getModelStats({ model: subJobModel });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching sub job stats:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Sub job stats:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getSubJobHistoryRouteHandler = async (req, res) => {
|
|
||||||
const from = req.query.from;
|
|
||||||
const to = req.query.to;
|
|
||||||
const result = await getModelHistory({ model: subJobModel, from, to });
|
|
||||||
if (result?.error) {
|
|
||||||
logger.error('Error fetching sub job history:', result.error);
|
|
||||||
return res.status(result.code).send(result);
|
|
||||||
}
|
|
||||||
logger.trace('Sub job history:', result);
|
|
||||||
res.send(result);
|
|
||||||
};
|
|
||||||
|
|||||||
@ -11,27 +11,29 @@ import {
|
|||||||
} from '@aws-sdk/client-s3';
|
} from '@aws-sdk/client-s3';
|
||||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import config from '../config.js';
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('CephStorage');
|
const logger = log4js.getLogger('CephStorage');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
// Configure AWS SDK v3 for Ceph (S3-compatible)
|
// Configure AWS SDK v3 for Ceph (S3-compatible)
|
||||||
const s3Config = {
|
const s3Config = {
|
||||||
credentials: {
|
credentials: {
|
||||||
accessKeyId: config.storage.ceph.accessKeyId,
|
accessKeyId: process.env.CEPH_ACCESS_KEY_ID,
|
||||||
secretAccessKey: config.storage.ceph.secretAccessKey,
|
secretAccessKey: process.env.CEPH_SECRET_ACCESS_KEY,
|
||||||
},
|
},
|
||||||
endpoint: config.storage.ceph.endpoint, // e.g., 'http://ceph-gateway:7480'
|
endpoint: process.env.CEPH_ENDPOINT, // e.g., 'http://ceph-gateway:7480'
|
||||||
forcePathStyle: true, // Required for Ceph (renamed from s3ForcePathStyle)
|
forcePathStyle: true, // Required for Ceph (renamed from s3ForcePathStyle)
|
||||||
region: config.storage.ceph.region,
|
region: process.env.CEPH_REGION || 'us-east-1',
|
||||||
};
|
};
|
||||||
|
|
||||||
const s3Client = new S3Client(s3Config);
|
const s3Client = new S3Client(s3Config);
|
||||||
|
|
||||||
// Default bucket names for different file types
|
// Default bucket names for different file types
|
||||||
const BUCKETS = {
|
const BUCKETS = {
|
||||||
FILES: config.storage.ceph.filesBucket,
|
FILES: process.env.CEPH_FILES_BUCKET || 'farmcontrol',
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -39,7 +41,6 @@ const BUCKETS = {
|
|||||||
*/
|
*/
|
||||||
export const initializeBuckets = async () => {
|
export const initializeBuckets = async () => {
|
||||||
try {
|
try {
|
||||||
logger.info('Initializing Ceph buckets...');
|
|
||||||
for (const [type, bucketName] of Object.entries(BUCKETS)) {
|
for (const [type, bucketName] of Object.entries(BUCKETS)) {
|
||||||
try {
|
try {
|
||||||
await s3Client.send(new HeadBucketCommand({ Bucket: bucketName }));
|
await s3Client.send(new HeadBucketCommand({ Bucket: bucketName }));
|
||||||
@ -53,7 +54,6 @@ export const initializeBuckets = async () => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info('Ceph buckets initialized successfully.');
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error initializing buckets:', error);
|
logger.error('Error initializing buckets:', error);
|
||||||
throw error;
|
throw error;
|
||||||
@ -80,7 +80,7 @@ export const uploadFile = async (bucket, key, body, contentType, metadata = {})
|
|||||||
};
|
};
|
||||||
|
|
||||||
await s3Client.send(new PutObjectCommand(params));
|
await s3Client.send(new PutObjectCommand(params));
|
||||||
const result = { Location: `${config.storage.ceph.endpoint}/${bucket}/${key}` };
|
const result = { Location: `${process.env.CEPH_ENDPOINT}/${bucket}/${key}` };
|
||||||
logger.debug(`File uploaded successfully: ${key} to bucket ${bucket}`);
|
logger.debug(`File uploaded successfully: ${key} to bucket ${bucket}`);
|
||||||
return result;
|
return result;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
37
src/utils.js
37
src/utils.js
@ -1,14 +1,24 @@
|
|||||||
import { ObjectId } from 'mongodb';
|
import { ObjectId } from 'mongodb';
|
||||||
import { auditLogModel } from './database/schemas/management/auditlog.schema.js';
|
import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
||||||
import exifr from 'exifr';
|
import exifr from 'exifr';
|
||||||
|
import { etcdServer } from './database/etcd.js';
|
||||||
import { natsServer } from './database/nats.js';
|
import { natsServer } from './database/nats.js';
|
||||||
import log4js from 'log4js';
|
import log4js from 'log4js';
|
||||||
import config from './config.js';
|
import dotenv from 'dotenv';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import canonicalize from 'canonical-json';
|
import canonicalize from 'canonical-json';
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
const logger = log4js.getLogger('Utils');
|
const logger = log4js.getLogger('Utils');
|
||||||
logger.level = config.server.logLevel;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
|
import { customAlphabet } from 'nanoid';
|
||||||
|
|
||||||
|
const ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
|
||||||
|
const generateId = () => {
|
||||||
|
// 10 characters
|
||||||
|
return customAlphabet(ALPHABET, 12);
|
||||||
|
};
|
||||||
|
|
||||||
function buildWildcardRegexPattern(input) {
|
function buildWildcardRegexPattern(input) {
|
||||||
// Escape all regex special chars except * (which we treat as a wildcard)
|
// Escape all regex special chars except * (which we treat as a wildcard)
|
||||||
@ -424,10 +434,6 @@ async function distributeUpdate(value, id, type) {
|
|||||||
await natsServer.publish(`${type}s.${id}.object`, value);
|
await natsServer.publish(`${type}s.${id}.object`, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function distributeStats(value, type) {
|
|
||||||
await natsServer.publish(`${type}s.stats`, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function distributeNew(value, type) {
|
async function distributeNew(value, type) {
|
||||||
await natsServer.publish(`${type}s.new`, value);
|
await natsServer.publish(`${type}s.new`, value);
|
||||||
}
|
}
|
||||||
@ -687,21 +693,6 @@ function jsonToCacheKey(obj) {
|
|||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getQueryToCacheKey({ model, id, populate }) {
|
|
||||||
const populateKey = [];
|
|
||||||
if (populate) {
|
|
||||||
const populateArray = Array.isArray(populate) ? populate : [populate];
|
|
||||||
for (const pop of populateArray) {
|
|
||||||
if (typeof pop === 'string') {
|
|
||||||
populateKey.push(pop);
|
|
||||||
} else if (typeof pop === 'object' && pop.path) {
|
|
||||||
populateKey.push(pop.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return `${model}:${id?.toString()}-${populateKey.join(',')}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
export {
|
||||||
parseFilter,
|
parseFilter,
|
||||||
convertToCamelCase,
|
convertToCamelCase,
|
||||||
@ -712,7 +703,6 @@ export {
|
|||||||
flatternObjectIds,
|
flatternObjectIds,
|
||||||
expandObjectIds,
|
expandObjectIds,
|
||||||
distributeUpdate,
|
distributeUpdate,
|
||||||
distributeStats,
|
|
||||||
distributeNew,
|
distributeNew,
|
||||||
distributeDelete,
|
distributeDelete,
|
||||||
distributeChildUpdate,
|
distributeChildUpdate,
|
||||||
@ -724,4 +714,5 @@ export {
|
|||||||
modelHasRef,
|
modelHasRef,
|
||||||
getFieldsByRef,
|
getFieldsByRef,
|
||||||
jsonToCacheKey,
|
jsonToCacheKey,
|
||||||
|
generateId,
|
||||||
};
|
};
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user