From 362265da723c516e8c595fc6d4c25c069944f386 Mon Sep 17 00:00:00 2001 From: Tom Butcher Date: Mon, 24 Nov 2025 03:35:26 +0000 Subject: [PATCH] Refactor caching mechanism to utilize Redis for improved performance and reliability - Replaced in-memory caching with Redis for object and list caching in database operations. - Introduced a new RedisServer class for managing Redis connections and operations. - Updated cache retrieval and update functions to handle asynchronous operations with Redis. - Enhanced logging for cache operations to improve traceability of cache hits, misses, and errors. - Adjusted configuration to include Redis settings in config.json. --- config.json | 99 +++++++++++------------ src/database/database.js | 169 ++++++++++++++++++++++----------------- src/database/redis.js | 65 +++++++++++++++ 3 files changed, 210 insertions(+), 123 deletions(-) create mode 100644 src/database/redis.js diff --git a/config.json b/config.json index d103ab2..0fcbe46 100644 --- a/config.json +++ b/config.json @@ -1,53 +1,54 @@ { - "development": { - "server": { - "port": 9090, - "logLevel": "trace" - }, - "auth": { - "enabled": true, - "keycloak": { - "url": "https://auth.tombutcher.work", - "realm": "master", - "clientId": "farmcontrol-client", - "clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF" - }, - "requiredRoles": [] - }, - "database": { - "etcd": { - "host": "localhost", - "port": 2379 - }, - "mongo": { - "url": "mongodb://192.168.68.53:27017/farmcontrol" - } - }, - "otpExpiryMins": 0.5 + "development": { + "server": { + "port": 9090, + "logLevel": "debug" }, - "production": { - "server": { - "port": 8081, - "logLevel": "info" - }, - "auth": { - "enabled": true, - "keycloak": { - "url": "https://auth.tombutcher.work", - "realm": "master", - "clientId": "farmcontrol-client", - "clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF" - }, - "requiredRoles": [] - }, - "database": { - "etcd": { - "host": "localhost", - "port": 2379 - }, - "mongo": { - "url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol" - } - } + "auth": { + "enabled": true, + "keycloak": { + "url": "https://auth.tombutcher.work", + "realm": "master", + "clientId": "farmcontrol-client", + "clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF" + }, + "requiredRoles": [] + }, + "database": { + "etcd": { + "host": "localhost", + "port": 2379 + }, + "mongo": { + "url": "mongodb://127.0.0.1:27017/farmcontrol" + }, + "redis": { "host": "localhost", "port": 6379, "password": "" } + }, + "otpExpiryMins": 0.5 + }, + "production": { + "server": { + "port": 8081, + "logLevel": "info" + }, + "auth": { + "enabled": true, + "keycloak": { + "url": "https://auth.tombutcher.work", + "realm": "master", + "clientId": "farmcontrol-client", + "clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF" + }, + "requiredRoles": [] + }, + "database": { + "etcd": { + "host": "localhost", + "port": 2379 + }, + "mongo": { + "url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol" + } } + } } diff --git a/src/database/database.js b/src/database/database.js index c7a138e..b072eff 100644 --- a/src/database/database.js +++ b/src/database/database.js @@ -1,5 +1,4 @@ import _ from 'lodash'; -import NodeCache from 'node-cache'; import { deleteAuditLog, expandObjectIds, @@ -10,8 +9,8 @@ import { } from './utils.js'; import log4js from 'log4js'; import { loadConfig } from '../config.js'; -import { userModel } from './schemas/management/user.schema.js'; import { jsonToCacheKey } from '../utils.js'; +import { redisServer } from './redis.js'; const config = loadConfig(); @@ -20,43 +19,40 @@ const cacheLogger = log4js.getLogger('Local Cache'); logger.level = config.server.logLevel; cacheLogger.level = config.server.logLevel; -const objectCache = new NodeCache({ - stdTTL: 30, // 30 sec expiration - checkperiod: 600, // 30 sec periodic cleanup - useClones: false // Don't clone objects for better performance -}); -const listCache = new NodeCache({ - stdTTL: 30, // 30 sec expiration - checkperiod: 600, // 30 sec periodic cleanup - useClones: false // Don't clone objects for better performance -}); +// Default cache TTL in seconds (similar to previous in-memory cache) +const CACHE_TTL_SECONDS = config.database?.redis?.ttlSeconds || 5; -export const retrieveObjectCache = ({ model, id, populate = [] }) => { +export const retrieveObjectCache = async ({ model, id, populate = [] }) => { const cacheKeyObject = { model: model.modelName, - id, - populate + id: id?.toString() }; const cacheKey = jsonToCacheKey(cacheKeyObject); - cacheLogger.trace('Retrieving:'); - const cachedObject = objectCache.get(cacheKey); + cacheLogger.trace('Retrieving:', cacheKeyObject); - if (cachedObject == undefined) { - cacheLogger.trace('Miss:', cacheKeyObject); + try { + const cachedObject = await redisServer.getKey(cacheKey); + + if (cachedObject == null) { + cacheLogger.trace('Miss:', cacheKeyObject); + return undefined; + } + + cacheLogger.trace('Hit:', { + model: model.modelName, + id: cacheKeyObject.id + }); + + return cachedObject; + } catch (err) { + cacheLogger.error('Error retrieving object from Redis cache:', err); return undefined; } - - cacheLogger.trace('Hit:', { - model: model.modelName, - id - }); - - return cachedObject; }; -export const retrieveListCache = ({ +export const retrieveListCache = async ({ model, populate = [], filter = {}, @@ -66,7 +62,6 @@ export const retrieveListCache = ({ }) => { const cacheKeyObject = { model: model.modelName, - id, populate, filter, sort, @@ -74,61 +69,77 @@ export const retrieveListCache = ({ order }; - cacheLogger.trace('Retrieving:', cacheKeyObject); - const cacheKey = jsonToCacheKey(cacheKeyObject); - const cachedList = listCache.get(cacheKey); + cacheLogger.trace('Retrieving:', cacheKeyObject); - if (cachedList != undefined) { - cacheLogger.trace('Hit:', { - ...cacheKeyObject, - length: cachedList.length + try { + const cachedList = await redisServer.getKey(cacheKey); + + if (cachedList != null) { + cacheLogger.trace('Hit:', { + ...cacheKeyObject, + length: cachedList.length + }); + return cachedList; + } + + cacheLogger.trace('Miss:', { + model: model.modelName }); - return cachedList; + return undefined; + } catch (err) { + cacheLogger.error('Error retrieving list from Redis cache:', err); + return undefined; } - - cacheLogger.trace('Miss:', { - model: model.modelName - }); - return undefined; }; -export const updateObjectCache = ({ model, id, object, populate = [] }) => { +export const updateObjectCache = async ({ model, id, object }) => { const cacheKeyObject = { model: model.modelName, - id, - populate + id: id?.toString() }; const cacheKey = jsonToCacheKey(cacheKeyObject); cacheLogger.trace('Updating:', cacheKeyObject); - const cachedObject = objectCache.get(cacheKey) || {}; - const mergedObject = _.merge(cachedObject, object); + try { + const cachedObject = (await redisServer.getKey(cacheKey)) || {}; + const mergedObject = _.merge(cachedObject, object); - objectCache.set(cacheKey, mergedObject); + await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS); + cacheLogger.trace('Updated:', { ...cacheKeyObject }); - cacheLogger.trace('Updated:', { ...cacheKeyObject }); - - return mergedObject; + return mergedObject; + } catch (err) { + cacheLogger.error('Error updating object in Redis cache:', err); + // Fallback to returning the provided object if cache fails + return object; + } }; -export const deleteObjectCache = ({ model, id }) => { +export const deleteObjectCache = async ({ model, id }) => { + const cacheKeyObject = { + model: model.modelName, + id: id?.toString() + }; + cacheLogger.trace('Deleting:', { - model: model.modelName, - id + ...cacheKeyObject }); - modelCache.del(id); + try { + // Note: we currently delete the non-populated key; populated variants will expire via TTL. + const cacheKey = jsonToCacheKey({ ...cacheKeyObject, populate: [] }); + await redisServer.deleteKey(cacheKey); - cacheLogger.trace('Deleted:', { - model: model.modelName, - id - }); - - return mergedObject; + cacheLogger.trace('Deleted:', { + ...cacheKeyObject + }); + } catch (err) { + cacheLogger.error('Error deleting object from Redis cache:', err); + } }; export const updateListCache = ({ @@ -156,14 +167,20 @@ export const updateListCache = ({ const cacheKey = jsonToCacheKey(cacheKeyObject); - listCache.set(cacheKey, objects); + return (async () => { + try { + await redisServer.setKey(cacheKey, objects, CACHE_TTL_SECONDS); - cacheLogger.trace('Updated:', { - ...cacheKeyObject, - length: objects.length - }); + cacheLogger.trace('Updated:', { + ...cacheKeyObject, + length: objects.length + }); + } catch (err) { + cacheLogger.error('Error updating list in Redis cache:', err); + } - return objects; + return objects; + })(); }; // Reusable function to list objects with aggregation, filtering, search, sorting, and pagination @@ -188,7 +205,7 @@ export const listObjects = async ({ }); if (cached == true) { - const objectsCache = retrieveObjectsCache({ + const objectsCache = await retrieveListCache({ model, populate, filter, @@ -234,7 +251,7 @@ export const listObjects = async ({ } // Handle select (projection) - if (project != {}) { + if (project && Object.keys(project).length > 0) { query = query.select(project); } @@ -286,7 +303,7 @@ export const getObject = async ({ }); if (cached == true) { - const cachedObject = retrieveObjectCache({ model, id, populate }); + const cachedObject = await retrieveObjectCache({ model, id, populate }); if (cachedObject != undefined) { return cachedObject; } @@ -350,30 +367,33 @@ export const editObject = async ({ const parentType = model.modelName ? model.modelName : 'unknown'; // Fetch the and update object var query = model.findByIdAndUpdate(id, updateData).lean(); + var newQuery = model.findById(id).lean(); if (populate) { if (Array.isArray(populate)) { for (const pop of populate) { query = query.populate(pop); + newQuery = newQuery.populate(pop); } } else if (typeof populate === 'string' || typeof populate === 'object') { query = query.populate(populate); + newQuery = newQuery.populate(populate); } } const previousObject = await query; - - if (!previousObject) { + const newObject = await newQuery; + if (!previousObject || !newObject) { return { error: `${parentType} not found.`, code: 404 }; } - const previousExpandedObject = expandObjectIds(previousObject); + const newExpandedObject = expandObjectIds(newObject); if (owner != undefined && ownerType != undefined) { // Audit log before update await editAuditLog( previousExpandedObject, - { ...previousExpandedObject, ...updateData }, + newExpandedObject, id, parentType, owner, @@ -387,7 +407,8 @@ export const editObject = async ({ updateObjectCache({ model: model, id: id.toString(), - object: { ...previousExpandedObject, ...updateData } + object: { ...previousExpandedObject, ...updateData }, + populate }); return { ...previousExpandedObject, ...updateData }; diff --git a/src/database/redis.js b/src/database/redis.js new file mode 100644 index 0000000..321c501 --- /dev/null +++ b/src/database/redis.js @@ -0,0 +1,65 @@ +import { createClient } from 'redis'; +import log4js from 'log4js'; +import { loadConfig } from '../config.js'; + +const config = loadConfig(); +const logger = log4js.getLogger('Redis'); +logger.level = config.server.logLevel; + +class RedisServer { + constructor() { + const redisConfig = config.database?.redis || {}; + const host = redisConfig.host || '127.0.0.1'; + const port = redisConfig.port || 6379; + const password = redisConfig.password || undefined; + const url = redisConfig.url || `redis://${host}:${port}`; + + this.client = createClient({ + url, + password + }); + + this.client.on('error', err => { + logger.error('Redis Client Error', err); + }); + + this.connected = false; + } + + async connect() { + if (this.connected) return; + await this.client.connect(); + this.connected = true; + logger.info('Connected to Redis'); + } + + async setKey(key, value, ttlSeconds) { + await this.connect(); + const payload = typeof value === 'string' ? value : JSON.stringify(value); + if (ttlSeconds) { + await this.client.set(key, payload, { EX: ttlSeconds }); + } else { + await this.client.set(key, payload); + } + } + + async getKey(key) { + await this.connect(); + const value = await this.client.get(key); + if (value == null) return null; + try { + return JSON.parse(value); + } catch { + return value; + } + } + + async deleteKey(key) { + await this.connect(); + await this.client.del(key); + } +} + +const redisServer = new RedisServer(); + +export { RedisServer, redisServer };