Refactor caching mechanism to utilize Redis for improved performance and reliability
- Replaced in-memory caching with Redis for object and list caching in database operations. - Introduced a new RedisServer class for managing Redis connections and operations. - Updated cache retrieval and update functions to handle asynchronous operations with Redis. - Enhanced logging for cache operations to improve traceability of cache hits, misses, and errors. - Adjusted configuration to include Redis settings in config.json.
This commit is contained in:
parent
6be53349b5
commit
362265da72
@ -2,7 +2,7 @@
|
||||
"development": {
|
||||
"server": {
|
||||
"port": 9090,
|
||||
"logLevel": "trace"
|
||||
"logLevel": "debug"
|
||||
},
|
||||
"auth": {
|
||||
"enabled": true,
|
||||
@ -20,8 +20,9 @@
|
||||
"port": 2379
|
||||
},
|
||||
"mongo": {
|
||||
"url": "mongodb://192.168.68.53:27017/farmcontrol"
|
||||
}
|
||||
"url": "mongodb://127.0.0.1:27017/farmcontrol"
|
||||
},
|
||||
"redis": { "host": "localhost", "port": 6379, "password": "" }
|
||||
},
|
||||
"otpExpiryMins": 0.5
|
||||
},
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import _ from 'lodash';
|
||||
import NodeCache from 'node-cache';
|
||||
import {
|
||||
deleteAuditLog,
|
||||
expandObjectIds,
|
||||
@ -10,8 +9,8 @@ import {
|
||||
} from './utils.js';
|
||||
import log4js from 'log4js';
|
||||
import { loadConfig } from '../config.js';
|
||||
import { userModel } from './schemas/management/user.schema.js';
|
||||
import { jsonToCacheKey } from '../utils.js';
|
||||
import { redisServer } from './redis.js';
|
||||
|
||||
const config = loadConfig();
|
||||
|
||||
@ -20,43 +19,40 @@ const cacheLogger = log4js.getLogger('Local Cache');
|
||||
logger.level = config.server.logLevel;
|
||||
cacheLogger.level = config.server.logLevel;
|
||||
|
||||
const objectCache = new NodeCache({
|
||||
stdTTL: 30, // 30 sec expiration
|
||||
checkperiod: 600, // 30 sec periodic cleanup
|
||||
useClones: false // Don't clone objects for better performance
|
||||
});
|
||||
const listCache = new NodeCache({
|
||||
stdTTL: 30, // 30 sec expiration
|
||||
checkperiod: 600, // 30 sec periodic cleanup
|
||||
useClones: false // Don't clone objects for better performance
|
||||
});
|
||||
// Default cache TTL in seconds (similar to previous in-memory cache)
|
||||
const CACHE_TTL_SECONDS = config.database?.redis?.ttlSeconds || 5;
|
||||
|
||||
export const retrieveObjectCache = ({ model, id, populate = [] }) => {
|
||||
export const retrieveObjectCache = async ({ model, id, populate = [] }) => {
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id,
|
||||
populate
|
||||
id: id?.toString()
|
||||
};
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
cacheLogger.trace('Retrieving:');
|
||||
const cachedObject = objectCache.get(cacheKey);
|
||||
cacheLogger.trace('Retrieving:', cacheKeyObject);
|
||||
|
||||
if (cachedObject == undefined) {
|
||||
try {
|
||||
const cachedObject = await redisServer.getKey(cacheKey);
|
||||
|
||||
if (cachedObject == null) {
|
||||
cacheLogger.trace('Miss:', cacheKeyObject);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
cacheLogger.trace('Hit:', {
|
||||
model: model.modelName,
|
||||
id
|
||||
id: cacheKeyObject.id
|
||||
});
|
||||
|
||||
return cachedObject;
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error retrieving object from Redis cache:', err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const retrieveListCache = ({
|
||||
export const retrieveListCache = async ({
|
||||
model,
|
||||
populate = [],
|
||||
filter = {},
|
||||
@ -66,7 +62,6 @@ export const retrieveListCache = ({
|
||||
}) => {
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id,
|
||||
populate,
|
||||
filter,
|
||||
sort,
|
||||
@ -74,13 +69,14 @@ export const retrieveListCache = ({
|
||||
order
|
||||
};
|
||||
|
||||
cacheLogger.trace('Retrieving:', cacheKeyObject);
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
const cachedList = listCache.get(cacheKey);
|
||||
cacheLogger.trace('Retrieving:', cacheKeyObject);
|
||||
|
||||
if (cachedList != undefined) {
|
||||
try {
|
||||
const cachedList = await redisServer.getKey(cacheKey);
|
||||
|
||||
if (cachedList != null) {
|
||||
cacheLogger.trace('Hit:', {
|
||||
...cacheKeyObject,
|
||||
length: cachedList.length
|
||||
@ -92,43 +88,58 @@ export const retrieveListCache = ({
|
||||
model: model.modelName
|
||||
});
|
||||
return undefined;
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error retrieving list from Redis cache:', err);
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
export const updateObjectCache = ({ model, id, object, populate = [] }) => {
|
||||
export const updateObjectCache = async ({ model, id, object }) => {
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id,
|
||||
populate
|
||||
id: id?.toString()
|
||||
};
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
cacheLogger.trace('Updating:', cacheKeyObject);
|
||||
|
||||
const cachedObject = objectCache.get(cacheKey) || {};
|
||||
try {
|
||||
const cachedObject = (await redisServer.getKey(cacheKey)) || {};
|
||||
const mergedObject = _.merge(cachedObject, object);
|
||||
|
||||
objectCache.set(cacheKey, mergedObject);
|
||||
|
||||
await redisServer.setKey(cacheKey, mergedObject, CACHE_TTL_SECONDS);
|
||||
cacheLogger.trace('Updated:', { ...cacheKeyObject });
|
||||
|
||||
return mergedObject;
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error updating object in Redis cache:', err);
|
||||
// Fallback to returning the provided object if cache fails
|
||||
return object;
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteObjectCache = ({ model, id }) => {
|
||||
cacheLogger.trace('Deleting:', {
|
||||
export const deleteObjectCache = async ({ model, id }) => {
|
||||
const cacheKeyObject = {
|
||||
model: model.modelName,
|
||||
id
|
||||
id: id?.toString()
|
||||
};
|
||||
|
||||
cacheLogger.trace('Deleting:', {
|
||||
...cacheKeyObject
|
||||
});
|
||||
|
||||
modelCache.del(id);
|
||||
try {
|
||||
// Note: we currently delete the non-populated key; populated variants will expire via TTL.
|
||||
const cacheKey = jsonToCacheKey({ ...cacheKeyObject, populate: [] });
|
||||
await redisServer.deleteKey(cacheKey);
|
||||
|
||||
cacheLogger.trace('Deleted:', {
|
||||
model: model.modelName,
|
||||
id
|
||||
...cacheKeyObject
|
||||
});
|
||||
|
||||
return mergedObject;
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error deleting object from Redis cache:', err);
|
||||
}
|
||||
};
|
||||
|
||||
export const updateListCache = ({
|
||||
@ -156,14 +167,20 @@ export const updateListCache = ({
|
||||
|
||||
const cacheKey = jsonToCacheKey(cacheKeyObject);
|
||||
|
||||
listCache.set(cacheKey, objects);
|
||||
return (async () => {
|
||||
try {
|
||||
await redisServer.setKey(cacheKey, objects, CACHE_TTL_SECONDS);
|
||||
|
||||
cacheLogger.trace('Updated:', {
|
||||
...cacheKeyObject,
|
||||
length: objects.length
|
||||
});
|
||||
} catch (err) {
|
||||
cacheLogger.error('Error updating list in Redis cache:', err);
|
||||
}
|
||||
|
||||
return objects;
|
||||
})();
|
||||
};
|
||||
|
||||
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
||||
@ -188,7 +205,7 @@ export const listObjects = async ({
|
||||
});
|
||||
|
||||
if (cached == true) {
|
||||
const objectsCache = retrieveObjectsCache({
|
||||
const objectsCache = await retrieveListCache({
|
||||
model,
|
||||
populate,
|
||||
filter,
|
||||
@ -234,7 +251,7 @@ export const listObjects = async ({
|
||||
}
|
||||
|
||||
// Handle select (projection)
|
||||
if (project != {}) {
|
||||
if (project && Object.keys(project).length > 0) {
|
||||
query = query.select(project);
|
||||
}
|
||||
|
||||
@ -286,7 +303,7 @@ export const getObject = async ({
|
||||
});
|
||||
|
||||
if (cached == true) {
|
||||
const cachedObject = retrieveObjectCache({ model, id, populate });
|
||||
const cachedObject = await retrieveObjectCache({ model, id, populate });
|
||||
if (cachedObject != undefined) {
|
||||
return cachedObject;
|
||||
}
|
||||
@ -350,30 +367,33 @@ export const editObject = async ({
|
||||
const parentType = model.modelName ? model.modelName : 'unknown';
|
||||
// Fetch the and update object
|
||||
var query = model.findByIdAndUpdate(id, updateData).lean();
|
||||
var newQuery = model.findById(id).lean();
|
||||
|
||||
if (populate) {
|
||||
if (Array.isArray(populate)) {
|
||||
for (const pop of populate) {
|
||||
query = query.populate(pop);
|
||||
newQuery = newQuery.populate(pop);
|
||||
}
|
||||
} else if (typeof populate === 'string' || typeof populate === 'object') {
|
||||
query = query.populate(populate);
|
||||
newQuery = newQuery.populate(populate);
|
||||
}
|
||||
}
|
||||
|
||||
const previousObject = await query;
|
||||
|
||||
if (!previousObject) {
|
||||
const newObject = await newQuery;
|
||||
if (!previousObject || !newObject) {
|
||||
return { error: `${parentType} not found.`, code: 404 };
|
||||
}
|
||||
|
||||
const previousExpandedObject = expandObjectIds(previousObject);
|
||||
const newExpandedObject = expandObjectIds(newObject);
|
||||
|
||||
if (owner != undefined && ownerType != undefined) {
|
||||
// Audit log before update
|
||||
await editAuditLog(
|
||||
previousExpandedObject,
|
||||
{ ...previousExpandedObject, ...updateData },
|
||||
newExpandedObject,
|
||||
id,
|
||||
parentType,
|
||||
owner,
|
||||
@ -387,7 +407,8 @@ export const editObject = async ({
|
||||
updateObjectCache({
|
||||
model: model,
|
||||
id: id.toString(),
|
||||
object: { ...previousExpandedObject, ...updateData }
|
||||
object: { ...previousExpandedObject, ...updateData },
|
||||
populate
|
||||
});
|
||||
|
||||
return { ...previousExpandedObject, ...updateData };
|
||||
|
||||
65
src/database/redis.js
Normal file
65
src/database/redis.js
Normal file
@ -0,0 +1,65 @@
|
||||
import { createClient } from 'redis';
|
||||
import log4js from 'log4js';
|
||||
import { loadConfig } from '../config.js';
|
||||
|
||||
const config = loadConfig();
|
||||
const logger = log4js.getLogger('Redis');
|
||||
logger.level = config.server.logLevel;
|
||||
|
||||
class RedisServer {
|
||||
constructor() {
|
||||
const redisConfig = config.database?.redis || {};
|
||||
const host = redisConfig.host || '127.0.0.1';
|
||||
const port = redisConfig.port || 6379;
|
||||
const password = redisConfig.password || undefined;
|
||||
const url = redisConfig.url || `redis://${host}:${port}`;
|
||||
|
||||
this.client = createClient({
|
||||
url,
|
||||
password
|
||||
});
|
||||
|
||||
this.client.on('error', err => {
|
||||
logger.error('Redis Client Error', err);
|
||||
});
|
||||
|
||||
this.connected = false;
|
||||
}
|
||||
|
||||
async connect() {
|
||||
if (this.connected) return;
|
||||
await this.client.connect();
|
||||
this.connected = true;
|
||||
logger.info('Connected to Redis');
|
||||
}
|
||||
|
||||
async setKey(key, value, ttlSeconds) {
|
||||
await this.connect();
|
||||
const payload = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
if (ttlSeconds) {
|
||||
await this.client.set(key, payload, { EX: ttlSeconds });
|
||||
} else {
|
||||
await this.client.set(key, payload);
|
||||
}
|
||||
}
|
||||
|
||||
async getKey(key) {
|
||||
await this.connect();
|
||||
const value = await this.client.get(key);
|
||||
if (value == null) return null;
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
async deleteKey(key) {
|
||||
await this.connect();
|
||||
await this.client.del(key);
|
||||
}
|
||||
}
|
||||
|
||||
const redisServer = new RedisServer();
|
||||
|
||||
export { RedisServer, redisServer };
|
||||
Loading…
x
Reference in New Issue
Block a user