Refactored config system to have both production and development configs.

This commit is contained in:
Tom Butcher 2025-12-13 23:01:03 +00:00
parent 8e0c991a58
commit c3b1cdead6
43 changed files with 292 additions and 227 deletions

103
config.json Normal file
View File

@ -0,0 +1,103 @@
{
"development": {
"server": {
"port": 8787,
"logLevel": "debug"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": [],
"sessionSecret": "REDACTED"
},
"app": {
"urlClient": "http://localhost:3000",
"urlElectronClient": "http://localhost:3000",
"urlApi": "http://localhost:8787",
"devAuthClient": "http://localhost:3500"
},
"database": {
"mongo": {
"url": "mongodb://127.0.0.1:27017/farmcontrol",
"link": "127.0.0.1:27017"
},
"redis": {
"url": "",
"host": "localhost",
"port": 6379,
"password": "",
"cacheTtl": 30
},
"nats": {
"host": "localhost",
"port": 4222
}
},
"storage": {
"fileStorage": "./uploads",
"ceph": {
"accessKeyId": "minioadmin",
"secretAccessKey": "minioadmin123",
"endpoint": "http://127.0.0.1:9000",
"region": "us-east-1",
"filesBucket": "farmcontrol"
}
},
"otpExpiryMins": 0.5
},
"production": {
"server": {
"port": 8080,
"logLevel": "info"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": [],
"sessionSecret": "REDACTED"
},
"app": {
"urlClient": "http://localhost:3000",
"urlElectronClient": "http://localhost:3000",
"urlApi": "http://localhost:8080",
"devAuthClient": "http://localhost:3500"
},
"database": {
"mongo": {
"url": "mongodb://localhost:27017/farmcontrol",
"link": "localhost:27017"
},
"redis": {
"url": "",
"host": "localhost",
"port": 6379,
"password": "",
"cacheTtl": 30
},
"nats": {
"host": "localhost",
"port": 4222
}
},
"storage": {
"fileStorage": "./uploads",
"ceph": {
"accessKeyId": "minioadmin",
"secretAccessKey": "minioadmin123",
"endpoint": "http://127.0.0.1:9000",
"region": "us-east-1",
"filesBucket": "farmcontrol"
}
}
}
}

42
src/config.js Normal file
View File

@ -0,0 +1,42 @@
// config.js - Configuration handling
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
// Configure paths relative to this file
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const CONFIG_PATH = path.resolve(__dirname, '../config.json');
// Determine environment
const NODE_ENV = process.env.NODE_ENV || 'development';
// Load config file
function loadConfig() {
try {
if (!fs.existsSync(CONFIG_PATH)) {
throw new Error(`Configuration file not found at ${CONFIG_PATH}`);
}
const configData = fs.readFileSync(CONFIG_PATH, 'utf8');
const config = JSON.parse(configData);
if (!config[NODE_ENV]) {
throw new Error(`Configuration for environment '${NODE_ENV}' not found in config.json`);
}
return config[NODE_ENV];
} catch (err) {
console.error('Error loading config:', err);
throw err;
}
}
// Get current environment
export function getEnvironment() {
return NODE_ENV;
}
// Export singleton config instance
const config = loadConfig();
export default config;

View File

@ -11,29 +11,27 @@ import {
} from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
import config from '../config.js';
const logger = log4js.getLogger('CephStorage');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Configure AWS SDK v3 for Ceph (S3-compatible)
const s3Config = {
credentials: {
accessKeyId: process.env.CEPH_ACCESS_KEY_ID,
secretAccessKey: process.env.CEPH_SECRET_ACCESS_KEY,
accessKeyId: config.storage.ceph.accessKeyId,
secretAccessKey: config.storage.ceph.secretAccessKey,
},
endpoint: process.env.CEPH_ENDPOINT, // e.g., 'http://ceph-gateway:7480'
endpoint: config.storage.ceph.endpoint, // e.g., 'http://ceph-gateway:7480'
forcePathStyle: true, // Required for Ceph (renamed from s3ForcePathStyle)
region: process.env.CEPH_REGION || 'us-east-1',
region: config.storage.ceph.region,
};
const s3Client = new S3Client(s3Config);
// Default bucket names for different file types
const BUCKETS = {
FILES: process.env.CEPH_FILES_BUCKET || 'farmcontrol',
FILES: config.storage.ceph.filesBucket,
};
/**
@ -41,6 +39,7 @@ const BUCKETS = {
*/
export const initializeBuckets = async () => {
try {
logger.info('Initializing Ceph buckets...');
for (const [type, bucketName] of Object.entries(BUCKETS)) {
try {
await s3Client.send(new HeadBucketCommand({ Bucket: bucketName }));
@ -54,6 +53,7 @@ export const initializeBuckets = async () => {
}
}
}
logger.info('Ceph buckets initialized successfully.');
} catch (error) {
logger.error('Error initializing buckets:', error);
throw error;
@ -80,7 +80,7 @@ export const uploadFile = async (bucket, key, body, contentType, metadata = {})
};
await s3Client.send(new PutObjectCommand(params));
const result = { Location: `${process.env.CEPH_ENDPOINT}/${bucket}/${key}` };
const result = { Location: `${config.storage.ceph.endpoint}/${bucket}/${key}` };
logger.debug(`File uploaded successfully: ${key} to bucket ${bucket}`);
return result;
} catch (error) {

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../config.js';
import { fileModel } from './schemas/management/file.schema.js';
import _ from 'lodash';
import {
@ -24,15 +24,13 @@ import { getAllModels } from '../services/misc/model.js';
import { redisServer } from './redis.js';
import { auditLogModel } from './schemas/management/auditlog.schema.js';
dotenv.config();
const logger = log4js.getLogger('Database');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
const cacheLogger = log4js.getLogger('DatabaseCache');
cacheLogger.level = process.env.LOG_LEVEL;
cacheLogger.level = config.server.logLevel;
const CACHE_TTL_SECONDS = parseInt(process.env.REDIS_CACHE_TTL || '30', 10);
const CACHE_TTL_SECONDS = parseInt(config.database.redis.cacheTtl || '30', 10);
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
if (!model || !id) return undefined;
@ -969,8 +967,8 @@ export const flushFile = async ({ id, user }) => {
// Try to delete from Ceph storage if it exists
if (file.extension) {
try {
const { deleteFile } = await import('../services/storage/ceph.js');
const { BUCKETS } = await import('../services/storage/ceph.js');
const { deleteFile } = await import('./ceph.js');
const { BUCKETS } = await import('./ceph.js');
const cephKey = `files/${file._id}${file.extension}`;
await deleteFile(BUCKETS.FILES, cephKey);

View File

@ -1,19 +1,18 @@
import mongoose from 'mongoose';
import dotenv from 'dotenv';
import config from '../config.js';
import log4js from 'log4js';
const logger = log4js.getLogger('MongoDB');
logger.level = process.env.LOG_LEVEL;
dotenv.config();
logger.level = config.server.logLevel;
// Set strictQuery to false to prepare for Mongoose 7
mongoose.set('strictQuery', false);
function dbConnect() {
mongoose.connection.once('open', () => logger.info('Database connected.'));
logger.info(`Connecting to MongoDB...`);
mongoose.connection.once('open', () => logger.info('Connected to MongoDB.'));
return mongoose.connect(
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
`mongodb://${config.database.mongo.link}/farmcontrol?retryWrites=true&w=majority`,
{}
);
}

View File

@ -1,15 +1,9 @@
import { connect } from '@nats-io/transport-node';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const NATS_HOST = process.env.NATS_HOST || 'localhost';
const NATS_PORT = process.env.NATS_PORT || 4222;
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
import config from '../config.js';
const logger = log4js.getLogger('Nats');
logger.level = LOG_LEVEL;
logger.level = config.server.logLevel;
class NatsServer {
constructor() {
@ -17,7 +11,7 @@ class NatsServer {
this.subscriptions = new Map(); // subject → { subscription, callbacks }
this.requestHandlers = new Map(); // subject → { handler, callbacks }
this.queuedSubscriptions = new Map(); // subject → { subscription, callbacks, queue }
this.servers = [`nats://${NATS_HOST}:${NATS_PORT}`];
this.servers = [`nats://${config.database.nats.host}:${config.database.nats.port}`];
this.textEncoder = new TextEncoder();
this.textDecoder = new TextDecoder();
@ -43,7 +37,7 @@ class NatsServer {
if (this.client.isClosed()) {
throw new Error('NATS client connection failed');
}
logger.trace('NATS client connected successfully.');
logger.info('Connected to NATS.');
} catch (error) {
throw error;
}

View File

@ -1,25 +1,17 @@
import { createClient } from 'redis';
import log4js from 'log4js';
import dotenv from 'dotenv';
dotenv.config();
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
const REDIS_URL = process.env.REDIS_URL;
const REDIS_HOST = process.env.REDIS_HOST || '127.0.0.1';
const REDIS_PORT = process.env.REDIS_PORT || 6379;
const REDIS_PASSWORD = process.env.REDIS_PASSWORD || undefined;
import config from '../config.js';
const logger = log4js.getLogger('Redis');
logger.level = LOG_LEVEL;
logger.level = config.server.logLevel;
class RedisServer {
constructor() {
const url = REDIS_URL || `redis://${REDIS_HOST}:${REDIS_PORT}`;
const url = config.database.redis.url || `redis://${config.database.redis.host}:${config.database.redis.port}`;
this.client = createClient({
url,
password: REDIS_PASSWORD,
password: config.database.redis.password || undefined,
});
this.client.on('error', (err) => {

View File

@ -1,7 +1,7 @@
import express from 'express';
import bodyParser from 'body-parser';
import cors from 'cors';
import dotenv from 'dotenv';
import config from './config.js';
import { expressSession, keycloak } from './keycloak.js';
import { dbConnect } from './database/mongo.js';
import {
@ -43,19 +43,18 @@ import * as fs from 'fs';
import log4js from 'log4js';
import { populateUserMiddleware } from './services/misc/auth.js';
import { natsServer } from './database/nats.js';
import { initializeBuckets } from './services/storage/ceph.js';
import { initializeBuckets } from './database/ceph.js';
import { getEnvironment } from './config.js';
dotenv.config();
const PORT = process.env.PORT || 8787;
const PORT = config.server.port;
const app = express();
const logger = log4js.getLogger('App');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
app.use(log4js.connectLogger(logger, { level: 'trace' }));
const whitelist = [process.env.APP_URL_CLIENT, process.env.APP_URL_ELECTRON_CLIENT];
const whitelist = [config.app.urlClient, config.app.urlElectronClient];
const corsOptions = {
origin: function (origin, callback) {
if (!origin || whitelist.indexOf(origin) !== -1) {
@ -69,18 +68,20 @@ const corsOptions = {
// Initialize application
async function initializeApp() {
logger.info('Initializing application...');
logger.info(`Environment: ${getEnvironment()}`);
logger.info(`Port: ${PORT}`);
logger.info(`Log Level: ${config.server.logLevel}`);
try {
// Connect to database
dbConnect();
await dbConnect();
// Connect to NATS
natsServer.connect();
logger.info('Connected to NATS');
await natsServer.connect();
// Initialize Ceph buckets
try {
await initializeBuckets();
logger.info('Ceph buckets initialized successfully');
} catch (err) {
logger.error('Failed to initialize Ceph buckets:', err);
// Don't throw error - allow app to start without Ceph for development

View File

@ -1,6 +1,6 @@
import Keycloak from 'keycloak-connect';
import session from 'express-session';
import dotenv from 'dotenv';
import config, { getEnvironment } from './config.js';
import axios from 'axios';
import jwt from 'jsonwebtoken';
import log4js from 'log4js';
@ -9,9 +9,8 @@ import { userModel } from './database/schemas/management/user.schema.js';
import { getObject } from './database/database.js';
import { hostModel } from './database/schemas/management/host.schema.js';
dotenv.config();
const logger = log4js.getLogger('Keycloak');
logger.level = process.env.LOG_LEVEL || 'info';
logger.level = config.server.logLevel || 'info';
// Initialize NodeCache with 5-minute TTL
const userCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
@ -56,24 +55,24 @@ const lookupUser = async (preferredUsername) => {
// Initialize Keycloak
const keycloakConfig = {
realm: process.env.KEYCLOAK_REALM || 'farm-control',
'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
realm: config.auth.keycloak.realm,
'auth-server-url': config.auth.keycloak.url,
'ssl-required': getEnvironment() === 'production' ? 'external' : 'none',
resource: config.auth.keycloak.clientId,
'confidential-port': 0,
'bearer-only': true,
'public-client': false,
'use-resource-role-mappings': true,
'verify-token-audience': true,
credentials: {
secret: process.env.KEYCLOAK_CLIENT_SECRET,
secret: config.auth.keycloak.clientSecret,
},
};
const memoryStore = new session.MemoryStore();
var expressSession = session({
secret: process.env.SESSION_SECRET || 'REDACTED',
secret: config.auth.sessionSecret,
resave: false,
saveUninitialized: true, // Set this to true to ensure session is initialized
store: memoryStore,
@ -95,11 +94,11 @@ const isAuthenticated = async (req, res, next) => {
try {
// Verify token with Keycloak introspection endpoint
const response = await axios.post(
`${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token/introspect`,
`${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token/introspect`,
new URLSearchParams({
token: token,
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
client_id: config.auth.keycloak.clientId,
client_secret: config.auth.keycloak.clientSecret,
}),
{
headers: {

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { filamentStockModel } from '../../database/schemas/inventory/filamentstock.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Filament Stocks');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listFilamentStocksRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { orderItemModel } from '../../database/schemas/inventory/orderitem.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Order Items');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listOrderItemsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { partStockModel } from '../../database/schemas/inventory/partstock.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Part Stocks');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPartStocksRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { purchaseOrderModel } from '../../database/schemas/inventory/purchaseorder.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,9 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Purchase Orders');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPurchaseOrdersRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { shipmentModel } from '../../database/schemas/inventory/shipment.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Shipments');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listShipmentsRouteHandler = async (
req,

View File

@ -1,14 +1,12 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { stockAuditModel } from '../../database/schemas/inventory/stockaudit.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getAuditLogs } from '../../utils.js';
import { getModelStats, getModelHistory } from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Stock Audits');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listStockAuditsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { stockEventModel } from '../../database/schemas/inventory/stockevent.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Stock Events');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listStockEventsRouteHandler = async (
req,

View File

@ -1,12 +1,11 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { auditLogModel } from '../../database/schemas/management/auditlog.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getModelStats, getModelHistory } from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('AuditLogs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listAuditLogsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { courierModel } from '../../database/schemas/management/courier.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Couriers');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listCouriersRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { courierServiceModel } from '../../database/schemas/management/courierservice.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('CourierServices');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listCourierServicesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { documentJobModel } from '../../database/schemas/management/documentjob.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentJobsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { documentPrinterModel } from '../../database/schemas/management/documentprinter.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Templates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentPrintersRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { documentSizeModel } from '../../database/schemas/management/documentsize.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Sizes');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentSizesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { documentTemplateModel } from '../../database/schemas/management/documenttemplate.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Document Templates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listDocumentTemplatesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv, { populate } from 'dotenv';
import config from '../../config.js';
import { filamentModel } from '../../database/schemas/management/filament.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,9 +12,8 @@ import {
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Filaments');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listFilamentsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { fileModel } from '../../database/schemas/management/file.schema.js';
import log4js from 'log4js';
import multer from 'multer';
@ -21,12 +21,11 @@ import {
downloadFile,
deleteFile as deleteCephFile,
BUCKETS,
} from '../storage/ceph.js';
} from '../../database/ceph.js';
import { getFileMeta } from '../../utils.js';
dotenv.config();
const logger = log4js.getLogger('Files');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Set storage engine to memory for Ceph upload
const fileStorage = multer.memoryStorage();
@ -349,10 +348,7 @@ export const getFileContentRouteHandler = async (req, res) => {
}
// Fallback to local file system for backward compatibility
const filePath = path.join(
process.env.FILE_STORAGE || './uploads',
file.fileName || file.name
);
const filePath = path.join(config.storage.fileStorage, file.fileName || file.name);
// Read the file
fs.readFile(filePath, (err, data) => {

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { hostModel } from '../../database/schemas/management/host.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Hosts');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listHostsRouteHandler = async (
req,

View File

@ -1,12 +1,10 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { materialModel } from '../../database/schemas/management/material.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getModelStats, getModelHistory } from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Materials');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listMaterialsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Note Types');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listNoteTypesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { partModel } from '../../database/schemas/management/part.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Parts');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPartsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { productModel } from '../../database/schemas/management/product.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Products');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listProductsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { jobModel } from '../../database/schemas/production/job.schema.js';
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
import log4js from 'log4js';
@ -17,10 +17,9 @@ import { userModel } from '../../database/schemas/management/user.schema.js';
import { noteTypeModel } from '../../database/schemas/management/notetype.schema.js';
import { noteModel } from '../../database/schemas/misc/note.schema.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Map prefixes to models and id fields
const PREFIX_MODEL_MAP = {

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { taxRateModel } from '../../database/schemas/management/taxrates.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('TaxRates');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listTaxRatesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { taxRecordModel } from '../../database/schemas/management/taxrecord.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('TaxRecords');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listTaxRecordsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { userModel } from '../../database/schemas/management/user.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -11,10 +11,8 @@ import {
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Users');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listUsersRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { vendorModel } from '../../database/schemas/management/vendor.schema.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
@ -12,10 +12,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Vendors');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listVendorsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { keycloak } from '../../keycloak.js';
import log4js from 'log4js';
import axios from 'axios';
@ -7,10 +7,9 @@ import { readFileSync } from 'fs';
import { resolve } from 'path';
import NodeCache from 'node-cache';
import jwt from 'jsonwebtoken';
dotenv.config();
const logger = log4js.getLogger('Auth');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Initialize NodeCache with 5-minute TTL for token-based user lookup
const tokenUserCache = new NodeCache({ stdTTL: 300 }); // 300 seconds = 5 minutes
@ -84,28 +83,28 @@ export const loginRouteHandler = (req, res, redirectType = 'web') => {
const redirectUrl = req.query.redirect_uri || '/production/overview';
// Store the original URL to redirect after login
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
const authUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/auth`;
const callBackState = `/auth/${redirectType}/callback`;
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
const callbackUrl = `${config.app.urlApi}${callBackState}`;
const state = encodeURIComponent(redirectUrl);
logger.warn(req.query.redirect_uri);
res.redirect(
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
`${authUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
);
};
// Function to fetch user from Keycloak and store in database and session
const fetchAndStoreUser = async (req, token) => {
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
const userInfoUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/userinfo`;
try {
const response = await axios.post(
userInfoUrl,
new URLSearchParams({
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
client_id: config.auth.keycloak.clientId,
client_secret: config.auth.keycloak.clientSecret,
}),
{
headers: {
@ -154,15 +153,15 @@ export const loginTokenRouteHandler = async (req, res, redirectType = 'web') =>
// Otherwise, start the request and store the promise
const tokenPromise = (async () => {
const callBackState = `/auth/${redirectType}/callback`;
const callbackUrl = `${process.env.APP_URL_API}${callBackState}`;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const callbackUrl = `${config.app.urlApi}${callBackState}`;
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
const response = await axios.post(
tokenUrl,
new URLSearchParams({
grant_type: 'authorization_code',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
client_id: config.auth.keycloak.clientId,
client_secret: config.auth.keycloak.clientSecret,
code: code,
redirect_uri: callbackUrl,
}).toString(),
@ -211,16 +210,16 @@ export const loginCallbackRouteHandler = async (req, res, redirectType = 'web')
var appUrl;
switch (redirectType) {
case 'web':
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
appUrl = config.app.urlClient;
break;
case 'app-scheme':
appUrl = 'farmcontrol://app';
break;
case 'app-localhost':
appUrl = process.env.APP_DEV_AUTH_CLIENT || 'http://localhost:3500';
appUrl = config.app.devAuthClient;
break;
default:
appUrl = process.env.APP_URL_CLIENT || 'http://localhost:3000';
appUrl = config.app.urlClient;
break;
}
const redirectUriRaw = `${appUrl}${state}`;
@ -325,12 +324,12 @@ export const logoutRouteHandler = (req, res) => {
}
// Construct the Keycloak logout URL with the redirect URI
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
const logoutUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(`${config.app.urlClient}${redirectUrl}`);
// Redirect to Keycloak logout with the redirect URI
res.redirect(
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
`${logoutUrl}?client_id=${config.auth.keycloak.clientId}&post_logout_redirect_uri=${encodedRedirectUri}`
);
});
};
@ -365,21 +364,21 @@ export const getUserInfoHandler = (req, res) => {
// Register route - Since we're using Keycloak, registration should be handled there
// This endpoint will redirect to Keycloak's registration page
export const registerRouteHandler = (req, res) => {
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
const registrationUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
res.redirect(
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
`${registrationUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
);
};
// Forgot password handler - redirect to Keycloak's reset password page
export const forgotPasswordRouteHandler = (req, res) => {
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
const resetUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(config.app.urlClient + '/auth/login');
res.redirect(
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
`${resetUrl}?client_id=${config.auth.keycloak.clientId}&redirect_uri=${redirectUri}`
);
};
@ -394,15 +393,15 @@ export const refreshTokenRouteHandler = (req, res) => {
}
const refreshToken = req.session['keycloak-token'].refresh_token;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const tokenUrl = `${config.auth.keycloak.url}/realms/${config.auth.keycloak.realm}/protocol/openid-connect/token`;
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: 'refresh_token',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
client_id: config.auth.keycloak.clientId,
client_secret: config.auth.keycloak.clientSecret,
refresh_token: refreshToken,
}).toString(),
{

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { noteModel } from '../../database/schemas/misc/note.schema.js';
import log4js from 'log4js';
import {
@ -14,10 +14,8 @@ import {
} from '../../database/database.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Notes');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listNotesRouteHandler = async (
req,

View File

@ -1,11 +1,10 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import log4js from 'log4js';
import mongoose from 'mongoose';
import { getAllModels, getModelByPrefix } from './model.js';
dotenv.config();
const logger = log4js.getLogger('Spotlight');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
// Helper function to build search filter from query parameters
const buildSearchFilter = (params) => {

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { gcodeFileModel } from '../../database/schemas/production/gcodefile.schema.js';
import log4js from 'log4js';
import {
@ -12,10 +12,8 @@ import {
import { getFileContentRouteHandler } from '../management/files.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('GCodeFiles');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listGCodeFilesRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import mongoose from 'mongoose';
import { jobModel } from '../../database/schemas/production/job.schema.js';
import log4js from 'log4js';
@ -12,10 +12,8 @@ import {
getModelHistory,
} from '../../database/database.js';
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
dotenv.config();
const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listJobsRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { printerModel } from '../../database/schemas/production/printer.schema.js';
import log4js from 'log4js';
import {
@ -13,10 +13,8 @@ import {
} from '../../database/database.js';
import mongoose from 'mongoose';
dotenv.config();
const logger = log4js.getLogger('Printers');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listPrintersRouteHandler = async (
req,

View File

@ -1,4 +1,4 @@
import dotenv from 'dotenv';
import config from '../../config.js';
import { subJobModel } from '../../database/schemas/production/subjob.schema.js';
import log4js from 'log4js';
import {
@ -8,10 +8,8 @@ import {
getModelStats,
getModelHistory,
} from '../../database/database.js';
dotenv.config();
const logger = log4js.getLogger('Sub Jobs');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
export const listSubJobsRouteHandler = async (
req,

View File

@ -3,13 +3,12 @@ import { auditLogModel } from './database/schemas/management/auditlog.schema.js'
import exifr from 'exifr';
import { natsServer } from './database/nats.js';
import log4js from 'log4js';
import dotenv from 'dotenv';
import config from './config.js';
import crypto from 'crypto';
import canonicalize from 'canonical-json';
dotenv.config();
const logger = log4js.getLogger('Utils');
logger.level = process.env.LOG_LEVEL;
logger.level = config.server.logLevel;
function buildWildcardRegexPattern(input) {
// Escape all regex special chars except * (which we treat as a wildcard)