Refactor caching mechanism to utilize Redis for improved performance and reliability

- Replaced in-memory caching with Redis for object and list caching in database operations.
- Introduced a new RedisServer class for managing Redis connections and operations.
- Updated cache retrieval and update functions to handle asynchronous operations with Redis.
- Enhanced logging for cache operations to improve traceability of cache hits, misses, and errors.
- Adjusted configuration to include Redis settings in config.json.
This commit is contained in:
Tom Butcher 2025-11-24 03:35:26 +00:00
parent 6be53349b5
commit 362265da72
3 changed files with 210 additions and 123 deletions

View File

@ -1,53 +1,54 @@
{ {
"development": { "development": {
"server": { "server": {
"port": 9090, "port": 9090,
"logLevel": "trace" "logLevel": "debug"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://192.168.68.53:27017/farmcontrol"
}
},
"otpExpiryMins": 0.5
}, },
"production": { "auth": {
"server": { "enabled": true,
"port": 8081, "keycloak": {
"logLevel": "info" "url": "https://auth.tombutcher.work",
}, "realm": "master",
"auth": { "clientId": "farmcontrol-client",
"enabled": true, "clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
"keycloak": { },
"url": "https://auth.tombutcher.work", "requiredRoles": []
"realm": "master", },
"clientId": "farmcontrol-client", "database": {
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF" "etcd": {
}, "host": "localhost",
"requiredRoles": [] "port": 2379
}, },
"database": { "mongo": {
"etcd": { "url": "mongodb://127.0.0.1:27017/farmcontrol"
"host": "localhost", },
"port": 2379 "redis": { "host": "localhost", "port": 6379, "password": "" }
}, },
"mongo": { "otpExpiryMins": 0.5
"url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol" },
} "production": {
} "server": {
"port": 8081,
"logLevel": "info"
},
"auth": {
"enabled": true,
"keycloak": {
"url": "https://auth.tombutcher.work",
"realm": "master",
"clientId": "farmcontrol-client",
"clientSecret": "GPyh59xctRX83yfKWb83ShK6VEwHIvLF"
},
"requiredRoles": []
},
"database": {
"etcd": {
"host": "localhost",
"port": 2379
},
"mongo": {
"url": "mongodb://farmcontrol.tombutcher.local:27017/farmcontrol"
}
} }
}
} }

View File

@ -1,5 +1,4 @@
import _ from 'lodash'; import _ from 'lodash';
import NodeCache from 'node-cache';
import { import {
deleteAuditLog, deleteAuditLog,
expandObjectIds, expandObjectIds,
@ -10,8 +9,8 @@ import {
} from './utils.js'; } from './utils.js';
import log4js from 'log4js'; import log4js from 'log4js';
import { loadConfig } from '../config.js'; import { loadConfig } from '../config.js';
import { userModel } from './schemas/management/user.schema.js';
import { jsonToCacheKey } from '../utils.js'; import { jsonToCacheKey } from '../utils.js';
import { redisServer } from './redis.js';
const config = loadConfig(); const config = loadConfig();
@ -20,43 +19,40 @@ const cacheLogger = log4js.getLogger('Local Cache');
logger.level = config.server.logLevel; logger.level = config.server.logLevel;
cacheLogger.level = config.server.logLevel; cacheLogger.level = config.server.logLevel;
const objectCache = new NodeCache({ // Default cache TTL in seconds (similar to previous in-memory cache)
stdTTL: 30, // 30 sec expiration const CACHE_TTL_SECONDS = config.database?.redis?.ttlSeconds || 5;
checkperiod: 600, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
const listCache = new NodeCache({
stdTTL: 30, // 30 sec expiration
checkperiod: 600, // 30 sec periodic cleanup
useClones: false // Don't clone objects for better performance
});
export const retrieveObjectCache = ({ model, id, populate = [] }) => { export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
const cacheKeyObject = { const cacheKeyObject = {
model: model.modelName, model: model.modelName,
id, id: id?.toString()
populate
}; };
const cacheKey = jsonToCacheKey(cacheKeyObject); const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Retrieving:'); cacheLogger.trace('Retrieving:', cacheKeyObject);
const cachedObject = objectCache.get(cacheKey);
if (cachedObject == undefined) { try {
cacheLogger.trace('Miss:', cacheKeyObject); const cachedObject = await redisServer.getKey(cacheKey);
if (cachedObject == null) {
cacheLogger.trace('Miss:', cacheKeyObject);
return undefined;
}
cacheLogger.trace('Hit:', {
model: model.modelName,
id: cacheKeyObject.id
});
return cachedObject;
} catch (err) {
cacheLogger.error('Error retrieving object from Redis cache:', err);
return undefined; return undefined;
} }
cacheLogger.trace('Hit:', {
model: model.modelName,
id
});
return cachedObject;
}; };
export const retrieveListCache = ({ export const retrieveListCache = async ({
model, model,
populate = [], populate = [],
filter = {}, filter = {},
@ -66,7 +62,6 @@ export const retrieveListCache = ({
}) => { }) => {
const cacheKeyObject = { const cacheKeyObject = {
model: model.modelName, model: model.modelName,
id,
populate, populate,
filter, filter,
sort, sort,
@ -74,61 +69,77 @@ export const retrieveListCache = ({
order order
}; };
cacheLogger.trace('Retrieving:', cacheKeyObject);
const cacheKey = jsonToCacheKey(cacheKeyObject); const cacheKey = jsonToCacheKey(cacheKeyObject);
const cachedList = listCache.get(cacheKey); cacheLogger.trace('Retrieving:', cacheKeyObject);
if (cachedList != undefined) { try {
cacheLogger.trace('Hit:', { const cachedList = await redisServer.getKey(cacheKey);
...cacheKeyObject,
length: cachedList.length if (cachedList != null) {
cacheLogger.trace('Hit:', {
...cacheKeyObject,
length: cachedList.length
});
return cachedList;
}
cacheLogger.trace('Miss:', {
model: model.modelName
}); });
return cachedList; return undefined;
} catch (err) {
cacheLogger.error('Error retrieving list from Redis cache:', err);
return undefined;
} }
cacheLogger.trace('Miss:', {
model: model.modelName
});
return undefined;
}; };
export const updateObjectCache = ({ model, id, object, populate = [] }) => { export const updateObjectCache = async ({ model, id, object }) => {
const cacheKeyObject = { const cacheKeyObject = {
model: model.modelName, model: model.modelName,
id, id: id?.toString()
populate
}; };
const cacheKey = jsonToCacheKey(cacheKeyObject); const cacheKey = jsonToCacheKey(cacheKeyObject);
cacheLogger.trace('Updating:', cacheKeyObject); cacheLogger.trace('Updating:', cacheKeyObject);
const cachedObject = objectCache.get(cacheKey) || {}; try {
const mergedObject = _.merge(cachedObject, object); const cachedObject = (await redisServer.getKey(cacheKey)) || {};
const mergedObject = _.merge(cachedObject, object);
objectCache.set(cacheKey, mergedObject); await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
cacheLogger.trace('Updated:', { ...cacheKeyObject });
cacheLogger.trace('Updated:', { ...cacheKeyObject }); return mergedObject;
} catch (err) {
return mergedObject; cacheLogger.error('Error updating object in Redis cache:', err);
// Fallback to returning the provided object if cache fails
return object;
}
}; };
export const deleteObjectCache = ({ model, id }) => { export const deleteObjectCache = async ({ model, id }) => {
const cacheKeyObject = {
model: model.modelName,
id: id?.toString()
};
cacheLogger.trace('Deleting:', { cacheLogger.trace('Deleting:', {
model: model.modelName, ...cacheKeyObject
id
}); });
modelCache.del(id); try {
// Note: we currently delete the non-populated key; populated variants will expire via TTL.
const cacheKey = jsonToCacheKey({ ...cacheKeyObject, populate: [] });
await redisServer.deleteKey(cacheKey);
cacheLogger.trace('Deleted:', { cacheLogger.trace('Deleted:', {
model: model.modelName, ...cacheKeyObject
id });
}); } catch (err) {
cacheLogger.error('Error deleting object from Redis cache:', err);
return mergedObject; }
}; };
export const updateListCache = ({ export const updateListCache = ({
@ -156,14 +167,20 @@ export const updateListCache = ({
const cacheKey = jsonToCacheKey(cacheKeyObject); const cacheKey = jsonToCacheKey(cacheKeyObject);
listCache.set(cacheKey, objects); return (async () => {
try {
await redisServer.setKey(cacheKey, objects, CACHE_TTL_SECONDS);
cacheLogger.trace('Updated:', { cacheLogger.trace('Updated:', {
...cacheKeyObject, ...cacheKeyObject,
length: objects.length length: objects.length
}); });
} catch (err) {
cacheLogger.error('Error updating list in Redis cache:', err);
}
return objects; return objects;
})();
}; };
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination // Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
@ -188,7 +205,7 @@ export const listObjects = async ({
}); });
if (cached == true) { if (cached == true) {
const objectsCache = retrieveObjectsCache({ const objectsCache = await retrieveListCache({
model, model,
populate, populate,
filter, filter,
@ -234,7 +251,7 @@ export const listObjects = async ({
} }
// Handle select (projection) // Handle select (projection)
if (project != {}) { if (project && Object.keys(project).length > 0) {
query = query.select(project); query = query.select(project);
} }
@ -286,7 +303,7 @@ export const getObject = async ({
}); });
if (cached == true) { if (cached == true) {
const cachedObject = retrieveObjectCache({ model, id, populate }); const cachedObject = await retrieveObjectCache({ model, id, populate });
if (cachedObject != undefined) { if (cachedObject != undefined) {
return cachedObject; return cachedObject;
} }
@ -350,30 +367,33 @@ export const editObject = async ({
const parentType = model.modelName ? model.modelName : 'unknown'; const parentType = model.modelName ? model.modelName : 'unknown';
// Fetch the and update object // Fetch the and update object
var query = model.findByIdAndUpdate(id, updateData).lean(); var query = model.findByIdAndUpdate(id, updateData).lean();
var newQuery = model.findById(id).lean();
if (populate) { if (populate) {
if (Array.isArray(populate)) { if (Array.isArray(populate)) {
for (const pop of populate) { for (const pop of populate) {
query = query.populate(pop); query = query.populate(pop);
newQuery = newQuery.populate(pop);
} }
} else if (typeof populate === 'string' || typeof populate === 'object') { } else if (typeof populate === 'string' || typeof populate === 'object') {
query = query.populate(populate); query = query.populate(populate);
newQuery = newQuery.populate(populate);
} }
} }
const previousObject = await query; const previousObject = await query;
const newObject = await newQuery;
if (!previousObject) { if (!previousObject || !newObject) {
return { error: `${parentType} not found.`, code: 404 }; return { error: `${parentType} not found.`, code: 404 };
} }
const previousExpandedObject = expandObjectIds(previousObject); const previousExpandedObject = expandObjectIds(previousObject);
const newExpandedObject = expandObjectIds(newObject);
if (owner != undefined && ownerType != undefined) { if (owner != undefined && ownerType != undefined) {
// Audit log before update // Audit log before update
await editAuditLog( await editAuditLog(
previousExpandedObject, previousExpandedObject,
{ ...previousExpandedObject, ...updateData }, newExpandedObject,
id, id,
parentType, parentType,
owner, owner,
@ -387,7 +407,8 @@ export const editObject = async ({
updateObjectCache({ updateObjectCache({
model: model, model: model,
id: id.toString(), id: id.toString(),
object: { ...previousExpandedObject, ...updateData } object: { ...previousExpandedObject, ...updateData },
populate
}); });
return { ...previousExpandedObject, ...updateData }; return { ...previousExpandedObject, ...updateData };

65
src/database/redis.js Normal file
View File

@ -0,0 +1,65 @@
import { createClient } from 'redis';
import log4js from 'log4js';
import { loadConfig } from '../config.js';
const config = loadConfig();
const logger = log4js.getLogger('Redis');
logger.level = config.server.logLevel;
class RedisServer {
constructor() {
const redisConfig = config.database?.redis || {};
const host = redisConfig.host || '127.0.0.1';
const port = redisConfig.port || 6379;
const password = redisConfig.password || undefined;
const url = redisConfig.url || `redis://${host}:${port}`;
this.client = createClient({
url,
password
});
this.client.on('error', err => {
logger.error('Redis Client Error', err);
});
this.connected = false;
}
async connect() {
if (this.connected) return;
await this.client.connect();
this.connected = true;
logger.info('Connected to Redis');
}
async setKey(key, value, ttlSeconds) {
await this.connect();
const payload = typeof value === 'string' ? value : JSON.stringify(value);
if (ttlSeconds) {
await this.client.set(key, payload, { EX: ttlSeconds });
} else {
await this.client.set(key, payload);
}
}
async getKey(key) {
await this.connect();
const value = await this.client.get(key);
if (value == null) return null;
try {
return JSON.parse(value);
} catch {
return value;
}
}
async deleteKey(key) {
await this.connect();
await this.client.del(key);
}
}
const redisServer = new RedisServer();
export { RedisServer, redisServer };