Large amount of refactoring.
This commit is contained in:
parent
97773c2ab2
commit
97b77f5155
4300
package-lock.json
generated
4300
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -4,6 +4,8 @@
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.0.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.0.0",
|
||||
"@nats-io/transport-node": "^3.1.0",
|
||||
"axios": "^1.11.0",
|
||||
"bcrypt": "^6.0.0",
|
||||
@ -11,6 +13,7 @@
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^17.2.1",
|
||||
"etcd3": "^1.1.2",
|
||||
"exifr": "^7.1.3",
|
||||
"express": "^5.1.0",
|
||||
"express-session": "^1.18.2",
|
||||
"i": "^0.3.7",
|
||||
|
||||
@ -1,11 +1,19 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { deleteAuditLog, distributeDelete, expandObjectIds } from '../utils.js';
|
||||
import { fileModel } from '../schemas/management/file.schema.js';
|
||||
import {
|
||||
deleteAuditLog,
|
||||
distributeDelete,
|
||||
expandObjectIds,
|
||||
modelHasRef,
|
||||
getFieldsByRef,
|
||||
} from '../utils.js';
|
||||
import log4js from 'log4js';
|
||||
import { editAuditLog, distributeUpdate, newAuditLog, distributeNew } from '../utils.js';
|
||||
import { getAllModels } from '../services/misc/model.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Filament Stocks');
|
||||
const logger = log4js.getLogger('Database');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Reusable function to list objects with aggregation, filtering, search, sorting, and pagination
|
||||
@ -262,6 +270,17 @@ export const listObjectsByProperties = async ({
|
||||
export const getObject = async ({ model, id, populate }) => {
|
||||
try {
|
||||
let query = model.findById(id).lean();
|
||||
|
||||
// Auto-populate file references if the model has them
|
||||
if (modelHasRef(model, 'file')) {
|
||||
const fileFields = getFieldsByRef(model, 'file');
|
||||
|
||||
// Populate all file reference fields
|
||||
for (const field of fileFields) {
|
||||
query = query.populate(field);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle populate (array or single value)
|
||||
if (populate) {
|
||||
if (Array.isArray(populate)) {
|
||||
@ -284,6 +303,54 @@ export const getObject = async ({ model, id, populate }) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const listObjectDependencies = async ({ model, id }) => {
|
||||
try {
|
||||
const dependencies = [];
|
||||
const parentModelName = model?.modelName;
|
||||
if (!parentModelName || !id) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const allModelEntries = getAllModels();
|
||||
|
||||
for (const entry of allModelEntries) {
|
||||
const targetModel = entry?.model;
|
||||
if (!targetModel || !targetModel.schema) continue;
|
||||
|
||||
const referencingPaths = [];
|
||||
|
||||
targetModel.schema.eachPath((pathName, schemaType) => {
|
||||
const directRef = schemaType?.options?.ref;
|
||||
const arrayRef = schemaType?.caster?.options?.ref;
|
||||
const refName = directRef || arrayRef;
|
||||
if (refName === parentModelName) {
|
||||
referencingPaths.push(pathName);
|
||||
}
|
||||
});
|
||||
|
||||
if (referencingPaths.length === 0) continue;
|
||||
|
||||
for (const pathName of referencingPaths) {
|
||||
const filter = { [pathName]: id };
|
||||
const results = await targetModel.find(filter).lean();
|
||||
for (const doc of results) {
|
||||
const object = expandObjectIds(doc);
|
||||
dependencies.push({
|
||||
objectType: targetModel.modelName,
|
||||
_id: object._id,
|
||||
name: object?.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dependencies;
|
||||
} catch (error) {
|
||||
logger.error('listObjectDependencies error:', error);
|
||||
return { error: error.message, code: 500 };
|
||||
}
|
||||
};
|
||||
|
||||
// Reusable function to edit an object by ID, with audit logging and distribution
|
||||
export const editObject = async ({ model, id, updateData, user, populate }) => {
|
||||
try {
|
||||
@ -309,6 +376,33 @@ export const editObject = async ({ model, id, updateData, user, populate }) => {
|
||||
}
|
||||
|
||||
const previousExpandedObject = expandObjectIds(previousObject);
|
||||
|
||||
// Check if any model parameters have ref: 'file' and flush files if so
|
||||
if (modelHasRef(model, 'file')) {
|
||||
logger.debug(`Model ${model.modelName} has file references, checking for files to flush`);
|
||||
const fileFields = getFieldsByRef(model, 'file');
|
||||
|
||||
for (const fieldName of fileFields) {
|
||||
const fieldValue = previousExpandedObject[fieldName];
|
||||
|
||||
if (fieldValue) {
|
||||
if (Array.isArray(fieldValue)) {
|
||||
// Handle file arrays
|
||||
for (const fileRef of fieldValue) {
|
||||
if (fileRef && fileRef._id) {
|
||||
logger.debug(`Flushing file from array field ${fieldName}: ${fileRef._id}`);
|
||||
await flushFile({ id: fileRef._id, user });
|
||||
}
|
||||
}
|
||||
} else if (fieldValue._id) {
|
||||
// Handle single file reference
|
||||
logger.debug(`Flushing file from field ${fieldName}: ${fieldValue._id}`);
|
||||
await flushFile({ id: fieldValue._id, user });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Audit log before update
|
||||
await editAuditLog(
|
||||
previousExpandedObject,
|
||||
@ -375,6 +469,92 @@ export const deleteObject = async ({ model, id, user = null }, distributeChanges
|
||||
}
|
||||
};
|
||||
|
||||
export const flushFile = async ({ id, user }) => {
|
||||
try {
|
||||
logger.info(`Starting file deletion process for file ID: ${id}`);
|
||||
|
||||
// First, check if the file exists
|
||||
const file = await fileModel.findById(id).lean();
|
||||
if (!file) {
|
||||
logger.warn(`File with ID ${id} not found`);
|
||||
return {
|
||||
error: 'File not found',
|
||||
code: 404,
|
||||
};
|
||||
}
|
||||
|
||||
logger.info(`Found file: ${file.name} (${file._id})`);
|
||||
|
||||
// Check if this file has any dependencies
|
||||
const dependencies = await listObjectDependencies({
|
||||
model: fileModel,
|
||||
id: file._id,
|
||||
});
|
||||
|
||||
if (dependencies.length > 0) {
|
||||
logger.info(
|
||||
`File ${file._id} (${file.name}) has ${dependencies.length} dependencies, cannot delete`
|
||||
);
|
||||
return {
|
||||
error: 'File has dependencies and cannot be deleted',
|
||||
code: 409,
|
||||
dependencies: dependencies.length,
|
||||
dependencyDetails: dependencies,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug(`File ${file._id} (${file.name}) has no dependencies, proceeding with deletion`);
|
||||
|
||||
// Delete from database first
|
||||
const deleteResult = await deleteObject({
|
||||
model: fileModel,
|
||||
id: file._id,
|
||||
user,
|
||||
});
|
||||
|
||||
if (deleteResult.error) {
|
||||
logger.error(`Failed to delete file ${file._id} from database:`, deleteResult.error);
|
||||
return {
|
||||
error: deleteResult.error,
|
||||
code: deleteResult.code || 500,
|
||||
};
|
||||
}
|
||||
|
||||
// Try to delete from Ceph storage if it exists
|
||||
if (file.extension) {
|
||||
try {
|
||||
const { deleteFile } = await import('../services/storage/ceph.js');
|
||||
const { BUCKETS } = await import('../services/storage/ceph.js');
|
||||
const cephKey = `files/${file._id}${file.extension}`;
|
||||
|
||||
await deleteFile(BUCKETS.FILES, cephKey);
|
||||
logger.debug(`Deleted file from Ceph storage: ${cephKey}`);
|
||||
} catch (cephError) {
|
||||
logger.warn(`Failed to delete file ${file._id} from Ceph storage:`, cephError.message);
|
||||
// Don't treat Ceph deletion failure as a critical error since DB record is already deleted
|
||||
}
|
||||
}
|
||||
|
||||
const result = {
|
||||
success: true,
|
||||
deletedFile: {
|
||||
fileId: file._id,
|
||||
fileName: file.name,
|
||||
deletedAt: new Date(),
|
||||
},
|
||||
};
|
||||
|
||||
logger.info(`Successfully deleted file: ${file.name} (${file._id})`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('Error in flushFile:', error);
|
||||
return {
|
||||
error: error.message,
|
||||
code: 500,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
// Helper function to recursively delete objects and their children
|
||||
export const recursivelyDeleteChildObjects = async (
|
||||
{ model, id, user = null },
|
||||
|
||||
38
src/index.js
38
src/index.js
@ -7,6 +7,7 @@ import { dbConnect } from './database/mongo.js';
|
||||
import {
|
||||
authRoutes,
|
||||
userRoutes,
|
||||
fileRoutes,
|
||||
printerRoutes,
|
||||
jobRoutes,
|
||||
subJobRoutes,
|
||||
@ -38,6 +39,7 @@ import log4js from 'log4js';
|
||||
import { etcdServer } from './database/etcd.js';
|
||||
import { populateUserMiddleware } from './services/misc/auth.js';
|
||||
import { natsServer } from './database/nats.js';
|
||||
import { initializeBuckets } from './services/storage/ceph.js';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
@ -61,26 +63,38 @@ const corsOptions = {
|
||||
credentials: true,
|
||||
};
|
||||
|
||||
// Initialize application
|
||||
async function initializeApp() {
|
||||
try {
|
||||
// Connect to database
|
||||
dbConnect();
|
||||
|
||||
// Connect to Etcd (await)
|
||||
try {
|
||||
// Connect to Etcd
|
||||
etcdServer.connect();
|
||||
logger.info('Connected to Etcd');
|
||||
} catch (err) {
|
||||
logger.error('Failed to connect to Etcd:', err);
|
||||
throw err;
|
||||
}
|
||||
|
||||
// Connect to NATS (await)
|
||||
try {
|
||||
// Connect to NATS
|
||||
natsServer.connect();
|
||||
logger.info('Connected to NATS');
|
||||
|
||||
// Initialize Ceph buckets
|
||||
try {
|
||||
await initializeBuckets();
|
||||
logger.info('Ceph buckets initialized successfully');
|
||||
} catch (err) {
|
||||
logger.error('Failed to connect to NATS:', err);
|
||||
throw err;
|
||||
logger.error('Failed to initialize Ceph buckets:', err);
|
||||
// Don't throw error - allow app to start without Ceph for development
|
||||
}
|
||||
|
||||
// Start server
|
||||
app.listen(PORT, () => logger.info(`Server listening to port ${PORT}`));
|
||||
} catch (err) {
|
||||
logger.error('Failed to initialize application:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Configure middleware
|
||||
app.use(cors(corsOptions));
|
||||
app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
|
||||
app.use(express.json());
|
||||
@ -95,6 +109,7 @@ app.get('/', function (req, res) {
|
||||
|
||||
app.use('/auth', authRoutes);
|
||||
app.use('/users', userRoutes);
|
||||
app.use('/files', fileRoutes);
|
||||
app.use('/spotlight', spotlightRoutes);
|
||||
app.use('/printers', printerRoutes);
|
||||
app.use('/hosts', hostRoutes);
|
||||
@ -124,4 +139,5 @@ if (process.env.SCHEDULE_HOUR) {
|
||||
});
|
||||
}
|
||||
|
||||
app.listen(PORT, () => logger.info(`Server listening to port ${PORT}`));
|
||||
// Start the application
|
||||
initializeApp();
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
import userRoutes from './management/users.js';
|
||||
import fileRoutes from './management/files.js';
|
||||
import authRoutes from './misc/auth.js';
|
||||
import printerRoutes from './production/printers.js';
|
||||
import hostRoutes from './management/hosts.js';
|
||||
@ -25,6 +26,7 @@ import noteRoutes from './misc/notes.js';
|
||||
|
||||
export {
|
||||
userRoutes,
|
||||
fileRoutes,
|
||||
authRoutes,
|
||||
printerRoutes,
|
||||
hostRoutes,
|
||||
|
||||
56
src/routes/management/files.js
Normal file
56
src/routes/management/files.js
Normal file
@ -0,0 +1,56 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listFilesRouteHandler,
|
||||
getFileRouteHandler,
|
||||
getFileContentRouteHandler,
|
||||
editFileRouteHandler,
|
||||
newFileRouteHandler,
|
||||
flushFileRouteHandler,
|
||||
deleteFileRouteHandler,
|
||||
listFilesByPropertiesRouteHandler,
|
||||
} from '../../services/management/files.js';
|
||||
|
||||
// list of files
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['_id', 'name', 'type', 'size'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listFilesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['type'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listFilesByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id/flush', isAuthenticated, (req, res) => {
|
||||
flushFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
@ -1,6 +1,6 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -8,28 +8,23 @@ import {
|
||||
getPartRouteHandler,
|
||||
editPartRouteHandler,
|
||||
newPartRouteHandler,
|
||||
uploadPartFileContentRouteHandler,
|
||||
getPartFileContentRouteHandler,
|
||||
deletePartRouteHandler,
|
||||
listPartsByPropertiesRouteHandler,
|
||||
} from '../../services/management/parts.js';
|
||||
|
||||
// list of parts
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, sort, order } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['product._id', '_id', 'name', 'globalPrice'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listPartsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
const allowedFilters = ['products', 'name', 'product._id'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listPartsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['product._id'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listPartsByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
@ -40,17 +35,12 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||
uploadPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getPartFileContentRouteHandler(req, res);
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deletePartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import express from 'express';
|
||||
import { isAuthenticated } from '../../keycloak.js';
|
||||
import { parseFilter } from '../../utils.js';
|
||||
import { getFilter, convertPropertiesString } from '../../utils.js';
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
@ -8,26 +8,23 @@ import {
|
||||
getProductRouteHandler,
|
||||
editProductRouteHandler,
|
||||
newProductRouteHandler,
|
||||
deleteProductRouteHandler,
|
||||
listProductsByPropertiesRouteHandler,
|
||||
} from '../../services/management/products.js';
|
||||
|
||||
// list of products
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['_id', 'name', 'globalPrice'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listProductsRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
const allowedFilters = ['type', 'brand', 'diameter', 'color'];
|
||||
|
||||
var filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
const parsedFilter = parseFilter(key, value);
|
||||
filter = { ...filter, ...parsedFilter };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listProductsRouteHandler(req, res, page, limit, property, filter);
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = [];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listProductsByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
@ -38,9 +35,12 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -4,32 +4,29 @@ import { isAuthenticated } from '../../keycloak.js';
|
||||
const router = express.Router();
|
||||
import {
|
||||
listGCodeFilesRouteHandler,
|
||||
listGCodeFilesByPropertiesRouteHandler,
|
||||
getGCodeFileRouteHandler,
|
||||
editGCodeFileRouteHandler,
|
||||
getGCodeFileRouteHandler,
|
||||
newGCodeFileRouteHandler,
|
||||
parseGCodeFileHandler,
|
||||
uploadGCodeFileContentRouteHandler,
|
||||
getGCodeFileContentRouteHandler,
|
||||
deleteGCodeFileRouteHandler,
|
||||
listGCodeFilesByPropertiesRouteHandler,
|
||||
} from '../../services/production/gcodefiles.js';
|
||||
import { convertPropertiesString, getFilter } from '../../utils.js';
|
||||
|
||||
// list of vendors
|
||||
// list of gcodeFiles
|
||||
router.get('/', isAuthenticated, (req, res) => {
|
||||
const { page, limit, property, search, sort, order } = req.query;
|
||||
const allowedFilters = ['_id', 'filament'];
|
||||
const allowedFilters = ['_id', 'name', 'filament._id'];
|
||||
const filter = getFilter(req.query, allowedFilters);
|
||||
listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||
});
|
||||
|
||||
router.get('/properties', isAuthenticated, (req, res) => {
|
||||
let properties = convertPropertiesString(req.query.properties);
|
||||
const allowedFilters = ['filament'];
|
||||
const allowedFilters = ['tags'];
|
||||
const filter = getFilter(req.query, allowedFilters, false);
|
||||
listGCodeFilesByPropertiesRouteHandler(req, res, properties, filter);
|
||||
});
|
||||
|
||||
// create new gcodeFile
|
||||
router.post('/', isAuthenticated, (req, res) => {
|
||||
newGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
@ -38,24 +35,9 @@ router.get('/:id', isAuthenticated, (req, res) => {
|
||||
getGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update gcodeFile info
|
||||
router.put('/:id', isAuthenticated, async (req, res) => {
|
||||
editGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.delete('/:id', isAuthenticated, async (req, res) => {
|
||||
deleteGCodeFileRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/:id/content', isAuthenticated, (req, res) => {
|
||||
uploadGCodeFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post('/content', isAuthenticated, (req, res) => {
|
||||
parseGCodeFileHandler(req, res);
|
||||
});
|
||||
|
||||
router.get('/:id/content', isAuthenticated, (req, res) => {
|
||||
getGCodeFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
||||
@ -28,6 +28,7 @@ const auditLogSchema = new Schema(
|
||||
'vendor',
|
||||
'part',
|
||||
'host',
|
||||
'file',
|
||||
'product',
|
||||
'material',
|
||||
'filament',
|
||||
|
||||
20
src/schemas/management/file.schema.js
Normal file
20
src/schemas/management/file.schema.js
Normal file
@ -0,0 +1,20 @@
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
const fileSchema = new mongoose.Schema(
|
||||
{
|
||||
name: { required: true, type: String },
|
||||
type: { required: true, type: String },
|
||||
extension: { required: true, type: String },
|
||||
size: { required: false, type: Number },
|
||||
metaData: { required: false, type: Object },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
fileSchema.virtual('id').get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
fileSchema.set('toJSON', { virtuals: true });
|
||||
|
||||
export const fileModel = mongoose.model('file', fileSchema);
|
||||
@ -11,6 +11,8 @@ const partSchema = new Schema(
|
||||
priceMode: { type: String, default: 'margin' },
|
||||
amount: { type: Number, required: false },
|
||||
margin: { type: Number, required: false },
|
||||
vendor: { type: Schema.Types.ObjectId, ref: 'vendor', required: true },
|
||||
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
@ -7,6 +7,7 @@ const userSchema = new mongoose.Schema(
|
||||
firstName: { required: false, type: String },
|
||||
lastName: { required: false, type: String },
|
||||
email: { required: true, type: String },
|
||||
profileImage: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
|
||||
},
|
||||
{ timestamps: true }
|
||||
);
|
||||
|
||||
@ -4,10 +4,10 @@ const { Schema } = mongoose;
|
||||
const gcodeFileSchema = new mongoose.Schema({
|
||||
name: { required: true, type: String },
|
||||
gcodeFileName: { required: false, type: String },
|
||||
gcodeFileInfo: { required: true, type: Object },
|
||||
size: { type: Number, required: false },
|
||||
filament: { type: Schema.Types.ObjectId, ref: 'filament', required: true },
|
||||
parts: [{ type: Schema.Types.ObjectId, ref: 'part', required: true }],
|
||||
file: { type: mongoose.SchemaTypes.ObjectId, ref: 'file', required: false },
|
||||
cost: { type: Number, required: false },
|
||||
createdAt: { type: Date },
|
||||
updatedAt: { type: Date },
|
||||
|
||||
412
src/services/management/files.js
Normal file
412
src/services/management/files.js
Normal file
@ -0,0 +1,412 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { fileModel } from '../../schemas/management/file.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
flushFile,
|
||||
} from '../../database/database.js';
|
||||
import {
|
||||
uploadFile,
|
||||
downloadFile,
|
||||
deleteFile as deleteCephFile,
|
||||
BUCKETS,
|
||||
} from '../storage/ceph.js';
|
||||
import { getFileMeta } from '../../utils.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Files');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Set storage engine to memory for Ceph upload
|
||||
const fileStorage = multer.memoryStorage();
|
||||
|
||||
// Initialise upload
|
||||
const fileUpload = multer({
|
||||
storage: fileStorage,
|
||||
limits: { fileSize: 500000000 }, // 500MB limit
|
||||
fileFilter: function (req, file, cb) {
|
||||
checkFileType(file, cb);
|
||||
},
|
||||
}).single('file'); // The name attribute of the file input in the HTML form
|
||||
|
||||
export const listFilesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: fileModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing files.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of files (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listFilesByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {},
|
||||
masterFilter = {}
|
||||
) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: fileModel,
|
||||
properties,
|
||||
filter,
|
||||
masterFilter,
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing files.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of files. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getFileRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: fileModel,
|
||||
id,
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`File not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived file with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const flushFileRouteHandler = async (req, res) => {
|
||||
const id = req.params.id;
|
||||
const result = await flushFile({ user: req.user, id });
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`File with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
};
|
||||
|
||||
const result = await editObject({
|
||||
model: fileModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing file:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited file with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newFileRouteHandler = async (req, res) => {
|
||||
try {
|
||||
fileUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
return res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
|
||||
if (req.file == undefined) {
|
||||
return res.send({
|
||||
message: 'No file selected!',
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// Create DB entry first without storage fields
|
||||
const extension = path.extname(req.file.originalname);
|
||||
const baseName = path.parse(req.file.originalname).name;
|
||||
const meta = await getFileMeta(req.file);
|
||||
const newData = {
|
||||
name: baseName,
|
||||
type: req.file.mimetype,
|
||||
extension,
|
||||
size: req.file.size,
|
||||
updatedAt: new Date(),
|
||||
metaData: {
|
||||
originalName: req.file.originalname,
|
||||
...meta,
|
||||
},
|
||||
};
|
||||
|
||||
const created = await newObject({
|
||||
model: fileModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
if (created.error) {
|
||||
logger.error('No file created:', created.error);
|
||||
return res.status(created.code).send(created);
|
||||
}
|
||||
|
||||
// Use created document _id to generate Ceph key
|
||||
const cephKey = `files/${created._id}${extension}`;
|
||||
|
||||
// Upload file to Ceph
|
||||
await uploadFile(BUCKETS.FILES, cephKey, req.file.buffer, req.file.mimetype, {
|
||||
originalName: req.file.originalname,
|
||||
uploadedBy: req.user?.username || 'unknown',
|
||||
});
|
||||
|
||||
// Do not update DB with Ceph location. Return created DB record.
|
||||
logger.debug(`New file with ID: ${created._id} created and uploaded to Ceph`);
|
||||
|
||||
res.send(created);
|
||||
} catch (createError) {
|
||||
logger.error('Error creating file record or uploading to storage:', createError);
|
||||
// If we created the DB entry but upload failed, remove the DB entry to avoid orphaned records
|
||||
try {
|
||||
if (created && created._id) {
|
||||
await deleteObject({ model: fileModel, id: created._id, user: req.user });
|
||||
}
|
||||
} catch (cleanupDbError) {
|
||||
logger.error('Error cleaning up DB record after upload failure:', cleanupDbError);
|
||||
}
|
||||
res.status(500).send({ error: createError.message });
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error in newFileRouteHandler:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`File with ID: ${id}`);
|
||||
|
||||
try {
|
||||
// First get the file to retrieve Ceph information
|
||||
const file = await getObject({
|
||||
model: fileModel,
|
||||
id,
|
||||
});
|
||||
if (!file) {
|
||||
return res.status(404).send({ error: 'File not found' });
|
||||
}
|
||||
|
||||
// Delete from Ceph if it exists there
|
||||
if (file.cephBucket && file.cephKey) {
|
||||
try {
|
||||
await deleteCephFile(file.cephBucket, file.cephKey);
|
||||
logger.debug(`Deleted file from Ceph: ${file.cephKey}`);
|
||||
} catch (cephError) {
|
||||
logger.warn(`Failed to delete file from Ceph: ${cephError.message}`);
|
||||
// Continue with database deletion even if Ceph deletion fails
|
||||
}
|
||||
}
|
||||
|
||||
const result = await deleteObject({
|
||||
model: fileModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No file deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted file with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
} catch (error) {
|
||||
logger.error('Error in deleteFileRouteHandler:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
// Check file type
|
||||
function checkFileType(file, cb) {
|
||||
// Allow all file types for general file management
|
||||
// You can customize this to restrict specific file types if needed
|
||||
const allowedTypes = /.*/; // Allow all file types
|
||||
|
||||
if (allowedTypes.test(file.mimetype)) {
|
||||
console.log(file);
|
||||
return cb(null, true);
|
||||
} else {
|
||||
cb('Error: File type not allowed!');
|
||||
}
|
||||
}
|
||||
|
||||
export const getFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const id = req.params.id;
|
||||
|
||||
const file = await getObject({
|
||||
model: fileModel,
|
||||
id,
|
||||
});
|
||||
|
||||
if (!file) {
|
||||
logger.warn(`File not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'File not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Returning file contents with ID: ${id}:`);
|
||||
|
||||
// Check if file is stored in Ceph
|
||||
if (file._id && file.extension) {
|
||||
const cephKey = `files/${id}${file.extension}`;
|
||||
try {
|
||||
const body = await downloadFile(BUCKETS.FILES, cephKey);
|
||||
|
||||
// Set appropriate content type and disposition
|
||||
res.set('Content-Type', file.type || 'application/octet-stream');
|
||||
res.set('Content-Disposition', `attachment; filename="${file.name}${file.extension}"`);
|
||||
|
||||
// Stream or send buffer
|
||||
if (body && typeof body.pipe === 'function') {
|
||||
// Handle stream errors
|
||||
body.on('error', (err) => {
|
||||
logger.error('Error streaming file from Ceph:', err);
|
||||
// If headers not sent, send a 500; otherwise destroy the response
|
||||
if (!res.headersSent) {
|
||||
try {
|
||||
res.status(500).send({ error: 'Error streaming file from storage.' });
|
||||
} catch (_) {
|
||||
// Ignore secondary errors
|
||||
}
|
||||
} else {
|
||||
res.destroy(err);
|
||||
}
|
||||
});
|
||||
|
||||
// If client disconnects, stop reading from source
|
||||
res.on('close', () => {
|
||||
if (typeof body.destroy === 'function') {
|
||||
body.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
body.pipe(res);
|
||||
logger.debug('Retrieved:', cephKey);
|
||||
return;
|
||||
}
|
||||
|
||||
// Unknown body type
|
||||
logger.error('Unknown Ceph body type; cannot send response');
|
||||
return res.status(500).send({ error: 'Error reading file from storage.' });
|
||||
} catch (cephError) {
|
||||
logger.error('Error downloading file from Ceph:', cephError);
|
||||
// Fall through to local filesystem fallback below
|
||||
}
|
||||
|
||||
// Fallback to local file system for backward compatibility
|
||||
const filePath = path.join(
|
||||
process.env.FILE_STORAGE || './uploads',
|
||||
file.fileName || file.name
|
||||
);
|
||||
|
||||
// Read the file
|
||||
fs.readFile(filePath, (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return res.status(404).send({ error: 'File not found!' });
|
||||
}
|
||||
return res.status(500).send({ error: 'Error reading file.' });
|
||||
}
|
||||
|
||||
res.set('Content-Type', file.type || 'application/octet-stream');
|
||||
res.set('Content-Disposition', `inline; filename="${file.name}${file.extension || ''}"`);
|
||||
return res.send(data);
|
||||
});
|
||||
} else {
|
||||
logger.error('Error fetching file:', 'No ceph bucket or key supplied.');
|
||||
res.status(500).send({ error: 'No ceph bucket or key supplied.' });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error fetching file:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const parseFileHandler = async (req, res) => {
|
||||
try {
|
||||
// Use the same upload middleware as the uploadFileContentRouteHandler
|
||||
fileUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
return res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
|
||||
if (req.file == undefined) {
|
||||
return res.send({
|
||||
message: 'No file selected!',
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// Read the file content from memory buffer
|
||||
const fileContent = req.file.buffer.toString('utf8');
|
||||
|
||||
// Return basic file info as JSON
|
||||
const fileInfo = {
|
||||
filename: req.file.originalname,
|
||||
originalName: req.file.originalname,
|
||||
size: req.file.size,
|
||||
mimetype: req.file.mimetype,
|
||||
content: fileContent.substring(0, 1000), // First 1000 characters
|
||||
};
|
||||
|
||||
res.json(fileInfo);
|
||||
} catch (parseError) {
|
||||
logger.error('Error parsing file:', parseError);
|
||||
res.status(500).send({ error: parseError.message });
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error in parseFileHandler:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
@ -2,54 +2,19 @@ import dotenv from 'dotenv';
|
||||
import { partModel } from '../../schemas/management/part.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import multer from 'multer';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { distributeNew, distributeUpdate, editAuditLog, newAuditLog } from '../../utils.js';
|
||||
|
||||
import {
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Parts');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Set storage engine
|
||||
const partsStorage = multer.diskStorage({
|
||||
destination: process.env.PART_STORAGE,
|
||||
filename: async function (req, file, cb) {
|
||||
// Retrieve custom file name from request body
|
||||
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
|
||||
// Create the final filename ensuring it ends with .g
|
||||
const finalFilename = `${customFileName}.stl`;
|
||||
|
||||
// Call callback with the final filename
|
||||
cb(null, finalFilename);
|
||||
},
|
||||
});
|
||||
|
||||
// Initialise upload
|
||||
const partUpload = multer({
|
||||
storage: partsStorage,
|
||||
limits: { fileSize: 500000000 }, // 50MB limit
|
||||
fileFilter: function (req, file, cb) {
|
||||
checkFileType(file, cb);
|
||||
},
|
||||
}).single('partFile'); // The name attribute of the file input in the HTML form
|
||||
|
||||
// Check file type
|
||||
function checkFileType(file, cb) {
|
||||
// Allowed ext
|
||||
const filetypes = /stl|stl|stl/;
|
||||
// Check ext
|
||||
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
||||
|
||||
if (extname) {
|
||||
console.log(file);
|
||||
return cb(null, true);
|
||||
} else {
|
||||
cb('Error: .stl files only!');
|
||||
}
|
||||
}
|
||||
|
||||
export const listPartsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
@ -61,265 +26,142 @@ export const listPartsRouteHandler = async (
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let part;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (property != '') {
|
||||
logger.error(property);
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'products', // The collection name (usually lowercase plural)
|
||||
localField: 'product', // The field in your current model
|
||||
foreignField: '_id', // The field in the products collection
|
||||
as: 'product', // The output field name
|
||||
},
|
||||
const result = await listObjects({
|
||||
model: partModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['vendor', 'product'],
|
||||
});
|
||||
aggregateCommand.push({ $unwind: '$product' });
|
||||
aggregateCommand.push({
|
||||
$project: {
|
||||
name: 1,
|
||||
globalPricing: 1,
|
||||
_id: 1,
|
||||
createdAt: 1,
|
||||
updatedAt: 1,
|
||||
'product._id': 1,
|
||||
'product.name': 1,
|
||||
},
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing parts.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of parts (Page ${page}, Limit ${limit}). Count: ${result.length}.`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listPartsByPropertiesRouteHandler = async (req, res, properties = '', filter = {}) => {
|
||||
const result = await listObjectsByProperties({
|
||||
model: partModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['vendor', 'product'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing parts.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (search) {
|
||||
// Add a text search match stage for name and brand fields
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$text: {
|
||||
$search: search,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add sorting if sort parameter is provided
|
||||
if (sort) {
|
||||
const sortOrder = order === 'descend' ? -1 : 1;
|
||||
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
part = await partModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||
part
|
||||
);
|
||||
res.send(part);
|
||||
} catch (error) {
|
||||
logger.error('Error listing parts:', error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
logger.debug(`List of parts. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('product');
|
||||
|
||||
if (!part) {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: partModel,
|
||||
id,
|
||||
populate: ['vendor', 'product'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
|
||||
res.send({ ...part._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Part:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived part with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel.findOne({ _id: id });
|
||||
|
||||
if (!part) {
|
||||
// Error handling
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
|
||||
try {
|
||||
const updateData = req.body;
|
||||
logger.trace(`Part with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body?.name,
|
||||
globalPricing: req.body.globalPricing,
|
||||
file: req.body?.file,
|
||||
vendor: req.body?.vendor,
|
||||
product: req.body?.product,
|
||||
margin: req.body?.margin,
|
||||
amount: req.body?.amount,
|
||||
priceMode: req.body?.priceMode,
|
||||
};
|
||||
// Create audit log before updating
|
||||
await editAuditLog(part.toObject(), updateData, id, 'part', req.user._id, 'user');
|
||||
await distributeUpdate(updateData, id, 'part');
|
||||
const result = await editObject({
|
||||
model: partModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
const result = await partModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No Part updated.');
|
||||
res.status(500).send({ error: 'No parts updated.' });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating part:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching part:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
if (result.error) {
|
||||
logger.error('Error editing part:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited part with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
if (Array.isArray(req.body)) {
|
||||
// Handle array of parts
|
||||
const partsToCreate = req.body.map((part) => ({
|
||||
createdAt: new Date(),
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: part.name,
|
||||
products: part?.products,
|
||||
fileName: part?.fileName,
|
||||
}));
|
||||
|
||||
const results = await partModel.insertMany(partsToCreate);
|
||||
if (!results.length) {
|
||||
logger.error('No parts created.');
|
||||
return res.status(500).send({ error: 'No parts created.' });
|
||||
}
|
||||
|
||||
// Create audit logs for each new part
|
||||
for (const result of results) {
|
||||
await newAuditLog(result.toObject(), result._id, 'part', req.user);
|
||||
}
|
||||
await distributeNew(null, 'part');
|
||||
return res.status(200).send(results);
|
||||
} else {
|
||||
// Handle single part
|
||||
const newPart = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
products: req.body?.products,
|
||||
fileName: req.body?.fileName,
|
||||
};
|
||||
const result = await partModel.create(newPart);
|
||||
|
||||
// Create audit log for new part
|
||||
await newAuditLog(newPart, result._id, 'part', req.user);
|
||||
await distributeNew(result._id, 'part');
|
||||
return res.status(200).send(result);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error creating part(s):', error);
|
||||
return res.status(500).send({ error: error.message });
|
||||
}
|
||||
name: req.body?.name,
|
||||
globalPricing: req.body.globalPricing,
|
||||
file: req.body?.file,
|
||||
vendor: req.body?.vendor,
|
||||
product: req.body?.product,
|
||||
margin: req.body?.margin,
|
||||
amount: req.body?.amount,
|
||||
priceMode: req.body?.priceMode,
|
||||
};
|
||||
|
||||
export const uploadPartFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const result = await newObject({
|
||||
model: partModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No part created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New part with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const deletePartRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given id
|
||||
const part = await partModel.findOne({ _id: id });
|
||||
if (!part) {
|
||||
// Error handling
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}`);
|
||||
try {
|
||||
partUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
} else {
|
||||
if (req.file == undefined) {
|
||||
res.send({
|
||||
message: 'No file selected!',
|
||||
});
|
||||
} else {
|
||||
res.send({
|
||||
status: 'OK',
|
||||
file: `${req.file.filename}`,
|
||||
|
||||
const result = await deleteObject({
|
||||
model: partModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No part deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating part:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching part:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const getPartFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!part) {
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Part not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Returning part file contents with ID: ${id}:`);
|
||||
|
||||
const filePath = path.join(process.env.PART_STORAGE, id + '.stl');
|
||||
|
||||
// Read the file
|
||||
fs.readFile(filePath, 'utf8', (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// File not found
|
||||
return res.status(404).send({ error: 'File not found!' });
|
||||
} else {
|
||||
// Other errors
|
||||
return res.status(500).send({ error: 'Error reading file.' });
|
||||
}
|
||||
}
|
||||
|
||||
// Send the file contents in the response
|
||||
res.send(data);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Part:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
|
||||
logger.debug(`Deleted part with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { productModel } from '../../schemas/management/product.schema.js';
|
||||
import { partModel } from '../../schemas/management/part.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import {
|
||||
editAuditLog,
|
||||
flatternObjectIds,
|
||||
distributeUpdate,
|
||||
newAuditLog,
|
||||
distributeNew,
|
||||
} from '../../utils.js';
|
||||
deleteObject,
|
||||
listObjects,
|
||||
getObject,
|
||||
editObject,
|
||||
newObject,
|
||||
listObjectsByProperties,
|
||||
} from '../../database/database.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Products');
|
||||
@ -21,200 +21,148 @@ export const listProductsRouteHandler = async (
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = '',
|
||||
filter = {},
|
||||
search = '',
|
||||
sort = '',
|
||||
order = 'ascend'
|
||||
) => {
|
||||
const result = await listObjects({
|
||||
model: productModel,
|
||||
page,
|
||||
limit,
|
||||
property,
|
||||
filter,
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: ['vendor'],
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing products.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of products (Page ${page}, Limit ${limit}). Count: ${result.length}.`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const listProductsByPropertiesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
properties = '',
|
||||
filter = {}
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
const result = await listObjectsByProperties({
|
||||
model: productModel,
|
||||
properties,
|
||||
filter,
|
||||
populate: ['vendor'],
|
||||
});
|
||||
|
||||
let product;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
if (result?.error) {
|
||||
logger.error('Error listing products.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
if (property != '') {
|
||||
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
|
||||
aggregateCommand.push({
|
||||
$match: {
|
||||
$or: [
|
||||
{ [property]: null },
|
||||
{ [property]: '' },
|
||||
{ [property]: [] },
|
||||
{ [property]: {} },
|
||||
{ [property]: { $exists: false } },
|
||||
],
|
||||
},
|
||||
});
|
||||
aggregateCommand.push({ $project: { _id: 1, [property]: 1 } });
|
||||
} else {
|
||||
aggregateCommand.push({
|
||||
$lookup: {
|
||||
from: 'vendors', // The name of the Filament collection
|
||||
localField: 'vendor',
|
||||
foreignField: '_id',
|
||||
as: 'vendor',
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$unwind: {
|
||||
path: '$vendor',
|
||||
preserveNullAndEmptyArrays: true, // Keep documents without a matching vendor
|
||||
},
|
||||
});
|
||||
|
||||
aggregateCommand.push({
|
||||
$addFields: {
|
||||
vendor: '$vendor',
|
||||
},
|
||||
});
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
product = await productModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(`List of products (Page ${page}, Limit ${limit}, Property ${property}):`, product);
|
||||
res.send(product);
|
||||
} catch (error) {
|
||||
logger.error('Error listing products:', error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
logger.debug(`List of products. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const getProductRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the product with the given remote address
|
||||
const product = await productModel
|
||||
.findOne({
|
||||
_id: id,
|
||||
})
|
||||
.populate('vendor');
|
||||
|
||||
if (!product) {
|
||||
const id = req.params.id;
|
||||
const result = await getObject({
|
||||
model: productModel,
|
||||
id,
|
||||
populate: ['vendor'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
|
||||
res.send({ ...product._doc });
|
||||
} catch (error) {
|
||||
logger.error('Error fetching Product:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived product with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const editProductRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
var product = null;
|
||||
|
||||
try {
|
||||
// Fetch the product with the given remote address
|
||||
product = await productModel.findOne({ _id: id });
|
||||
logger.trace(`Product with ID: ${id}`);
|
||||
|
||||
if (!product) {
|
||||
// Error handling
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching product:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
|
||||
try {
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body?.name,
|
||||
vendor: req.body?.vendor?.id,
|
||||
tags: req.body?.tags,
|
||||
version: req.body?.version,
|
||||
parts: req.body?.parts,
|
||||
margin: req.body.margin,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
};
|
||||
|
||||
// Create audit log before updating
|
||||
await editAuditLog(product.toObject(), updateData, id, 'product', req.user);
|
||||
const result = await editObject({
|
||||
model: productModel,
|
||||
id,
|
||||
updateData,
|
||||
user: req.user,
|
||||
});
|
||||
|
||||
const result = await productModel.updateOne({ _id: id }, { $set: updateData });
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No Product updated.');
|
||||
res.status(500).send({ error: 'No products updated.' });
|
||||
if (result.error) {
|
||||
logger.error('Error editing product:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
await distributeUpdate(updateData, id, 'product');
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating product:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send('OK');
|
||||
logger.debug(`Edited product with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
export const newProductRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const newProduct = {
|
||||
createdAt: new Date(),
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
vendor: req.body.vendor,
|
||||
parts: partIds,
|
||||
name: req.body?.name,
|
||||
tags: req.body?.tags,
|
||||
version: req.body?.version,
|
||||
margin: req.body.margin,
|
||||
amount: req.body.amount,
|
||||
priceMode: req.body.priceMode,
|
||||
};
|
||||
|
||||
const newProductResult = await productModel.create(flatternObjectIds(newProduct));
|
||||
|
||||
if (newProductResult.nCreated === 0) {
|
||||
logger.error('No product created.');
|
||||
res.status(500).send({ error: 'No product created.' });
|
||||
const result = await newObject({
|
||||
model: productModel,
|
||||
newData,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No product created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
const parts = req.body.parts || [];
|
||||
logger.debug(`New product with ID: ${result._id}`);
|
||||
|
||||
var partIds = [];
|
||||
|
||||
for (const part of parts) {
|
||||
const newPart = {
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
name: part.name,
|
||||
product: { _id: newProductResult._id },
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
const newPartResult = await partModel.create(flatternObjectIds(newPart));
|
||||
if (newPartResult.nCreated === 0) {
|
||||
logger.error('No parts created.');
|
||||
res.status(500).send({ error: 'No parts created.' });
|
||||
}
|
||||
partIds.push(newPartResult._id);
|
||||
export const deleteProductRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
// Create audit log for each new part
|
||||
await newAuditLog(newPart, newPartResult._id, 'part', req.user);
|
||||
logger.trace(`Product with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: productModel,
|
||||
id,
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No product deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
// Create audit log for new product
|
||||
await newAuditLog(newProduct, newProductResult._id, 'product', req.user);
|
||||
await distributeNew(newProductResult._id, 'product');
|
||||
logger.debug(`Deleted product with ID: ${result._id}`);
|
||||
|
||||
res.status(200).send({ ...newProductResult });
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating product:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -75,6 +75,7 @@ export const getUserRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: userModel,
|
||||
id,
|
||||
populate: ['profileImage'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`User not found with supplied id.`);
|
||||
@ -96,6 +97,7 @@ export const editUserRouteHandler = async (req, res) => {
|
||||
firstName: req.body.firstName,
|
||||
lastName: req.body.lastName,
|
||||
email: req.body.email,
|
||||
profileImage: req.body.profileImage,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
|
||||
@ -32,7 +32,6 @@ const lookupUserByToken = async (token) => {
|
||||
// Check cache first
|
||||
const cachedUser = tokenUserCache.get(token);
|
||||
if (cachedUser) {
|
||||
console.log(cachedUser);
|
||||
logger.debug(`User found in token cache for token: ${token.substring(0, 20)}...`);
|
||||
return cachedUser;
|
||||
}
|
||||
|
||||
73
src/services/misc/model.js
Normal file
73
src/services/misc/model.js
Normal file
@ -0,0 +1,73 @@
|
||||
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||
import { partModel } from '../../schemas/management/part.schema.js';
|
||||
import { productModel } from '../../schemas/management/product.schema.js';
|
||||
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||
import { userModel } from '../../schemas/management/user.schema.js';
|
||||
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||
|
||||
// Map prefixes to models and id fields
|
||||
const PREFIX_MODEL_MAP = {
|
||||
PRN: { model: printerModel, idField: '_id', type: 'printer' },
|
||||
FIL: { model: filamentModel, idField: '_id', type: 'filament' },
|
||||
GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodeFile' },
|
||||
JOB: { model: jobModel, idField: '_id', type: 'job' },
|
||||
PRT: { model: partModel, idField: '_id', type: 'part' },
|
||||
PRD: { model: productModel, idField: '_id', type: 'product' },
|
||||
VEN: { model: vendorModel, idField: '_id', type: 'vendor' },
|
||||
SJB: { model: subJobModel, idField: '_id', type: 'subJob' },
|
||||
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentStock' },
|
||||
SEV: { model: stockEventModel, idField: '_id', type: 'stockEvent' },
|
||||
SAU: { model: stockAuditModel, idField: '_id', type: 'stockAudit' },
|
||||
PTS: { model: partStockModel, idField: '_id', type: 'partStock' },
|
||||
PDS: { model: null, idField: '_id', type: 'productStock' }, // No productStockModel found
|
||||
ADL: { model: auditLogModel, idField: '_id', type: 'auditLog' },
|
||||
USR: { model: userModel, idField: '_id', type: 'user' },
|
||||
NTY: { model: noteTypeModel, idField: '_id', type: 'noteType' },
|
||||
NTE: { model: noteModel, idField: '_id', type: 'note' },
|
||||
DSZ: { model: documentSizeModel, idField: '_id', type: 'documentSize' },
|
||||
DTP: { model: documentTemplateModel, idField: '_id', type: 'documentTemplate' },
|
||||
HST: { model: hostModel, idField: '_id', type: 'host' },
|
||||
};
|
||||
|
||||
/**
|
||||
* Get all models from the PREFIX_MODEL_MAP
|
||||
* @returns {Array} Array of model entries with model, idField, and type properties
|
||||
*/
|
||||
export const getAllModels = () => {
|
||||
return Object.values(PREFIX_MODEL_MAP);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a model by its type name
|
||||
* @param {string} name - The type name of the model (e.g., 'printer', 'filament', 'job')
|
||||
* @returns {Object|null} The model entry or null if not found
|
||||
*/
|
||||
export const getModelByName = (name) => {
|
||||
const entry = Object.values(PREFIX_MODEL_MAP).find((entry) => entry.type === name);
|
||||
return entry || null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a model by its prefix
|
||||
* @param {string} prefix - The prefix of the model (e.g., 'PRN', 'FIL', 'JOB')
|
||||
* @returns {Object|null} The model entry or null if not found
|
||||
*/
|
||||
export const getModelByPrefix = (prefix) => {
|
||||
return PREFIX_MODEL_MAP[prefix] || null;
|
||||
};
|
||||
|
||||
// Export the PREFIX_MODEL_MAP for backward compatibility
|
||||
export { PREFIX_MODEL_MAP };
|
||||
@ -1,54 +1,12 @@
|
||||
import dotenv from 'dotenv';
|
||||
import log4js from 'log4js';
|
||||
import mongoose from 'mongoose';
|
||||
import { jobModel } from '../../schemas/production/job.schema.js';
|
||||
import { subJobModel } from '../../schemas/production/subjob.schema.js';
|
||||
import { printerModel } from '../../schemas/production/printer.schema.js';
|
||||
import { filamentModel } from '../../schemas/management/filament.schema.js';
|
||||
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||
import { partModel } from '../../schemas/management/part.schema.js';
|
||||
import { productModel } from '../../schemas/management/product.schema.js';
|
||||
import { vendorModel } from '../../schemas/management/vendor.schema.js';
|
||||
import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
|
||||
import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
|
||||
import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
|
||||
import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
|
||||
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
|
||||
import { userModel } from '../../schemas/management/user.schema.js';
|
||||
import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
|
||||
import { noteModel } from '../../schemas/misc/note.schema.js';
|
||||
import { documentSizeModel } from '../../schemas/management/documentsize.schema.js';
|
||||
import { documentTemplateModel } from '../../schemas/management/documenttemplate.schema.js';
|
||||
import { hostModel } from '../../schemas/management/host.schema.js';
|
||||
import { getAllModels, getModelByName, getModelByPrefix, PREFIX_MODEL_MAP } from './model.js';
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('Jobs');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Map prefixes to models and id fields
|
||||
const PREFIX_MODEL_MAP = {
|
||||
PRN: { model: printerModel, idField: '_id', type: 'printer' },
|
||||
FIL: { model: filamentModel, idField: '_id', type: 'filament' },
|
||||
GCF: { model: gcodeFileModel, idField: '_id', type: 'gcodeFile' },
|
||||
JOB: { model: jobModel, idField: '_id', type: 'job' },
|
||||
PRT: { model: partModel, idField: '_id', type: 'part' },
|
||||
PRD: { model: productModel, idField: '_id', type: 'product' },
|
||||
VEN: { model: vendorModel, idField: '_id', type: 'vendor' },
|
||||
SJB: { model: subJobModel, idField: '_id', type: 'subJob' },
|
||||
FLS: { model: filamentStockModel, idField: '_id', type: 'filamentStock' },
|
||||
SEV: { model: stockEventModel, idField: '_id', type: 'stockEvent' },
|
||||
SAU: { model: stockAuditModel, idField: '_id', type: 'stockAudit' },
|
||||
PTS: { model: partStockModel, idField: '_id', type: 'partStock' },
|
||||
PDS: { model: null, idField: '_id', type: 'productStock' }, // No productStockModel found
|
||||
ADL: { model: auditLogModel, idField: '_id', type: 'auditLog' },
|
||||
USR: { model: userModel, idField: '_id', type: 'user' },
|
||||
NTY: { model: noteTypeModel, idField: '_id', type: 'noteType' },
|
||||
NTE: { model: noteModel, idField: '_id', type: 'note' },
|
||||
DSZ: { model: documentSizeModel, idField: '_id', type: 'documentSize' },
|
||||
DTP: { model: documentTemplateModel, idField: '_id', type: 'documentTemplate' },
|
||||
HST: { model: hostModel, idField: '_id', type: 'host' },
|
||||
};
|
||||
|
||||
// Helper function to build search filter from query parameters
|
||||
const buildSearchFilter = (params) => {
|
||||
const filter = {};
|
||||
@ -106,7 +64,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
const suffix = query.substring(4);
|
||||
|
||||
if (delimiter == ':') {
|
||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||
const prefixEntry = getModelByPrefix(prefix);
|
||||
if (!prefixEntry || !prefixEntry.model) {
|
||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||
return;
|
||||
@ -136,7 +94,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
console.log(queryParams);
|
||||
|
||||
if (Object.keys(queryParams).length > 0) {
|
||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||
const prefixEntry = getModelByPrefix(prefix);
|
||||
console.log(prefixEntry);
|
||||
if (!prefixEntry || !prefixEntry.model) {
|
||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||
@ -166,7 +124,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
}
|
||||
|
||||
// If no query params and no prefix, search all models
|
||||
if (Object.keys(queryParams).length === 0 && (!prefix || !PREFIX_MODEL_MAP[prefix])) {
|
||||
if (Object.keys(queryParams).length === 0 && (!prefix || !getModelByPrefix(prefix))) {
|
||||
// Search all models for the query string in the 'name' field
|
||||
const searchTerm = query;
|
||||
if (!searchTerm || searchTerm.length < 3) {
|
||||
@ -174,7 +132,7 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
||||
return;
|
||||
}
|
||||
// Only use models that are not null
|
||||
const allModelEntries = Object.values(PREFIX_MODEL_MAP).filter((entry) => entry.model);
|
||||
const allModelEntries = getAllModels().filter((entry) => entry.model);
|
||||
// Run all searches in parallel
|
||||
const searchPromises = allModelEntries.map(async (entry) => {
|
||||
try {
|
||||
|
||||
@ -1,11 +1,6 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
|
||||
import log4js from 'log4js';
|
||||
import multer from 'multer';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import mongoose from 'mongoose';
|
||||
import { extractConfigBlock } from '../../utils.js';
|
||||
import {
|
||||
deleteObject,
|
||||
editObject,
|
||||
@ -14,36 +9,13 @@ import {
|
||||
listObjectsByProperties,
|
||||
newObject,
|
||||
} from '../../database/database.js';
|
||||
import mongoose from 'mongoose';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('GCodeFiles');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Set storage engine
|
||||
const gcodeStorage = multer.diskStorage({
|
||||
destination: process.env.GCODE_STORAGE,
|
||||
filename: async function (req, file, cb) {
|
||||
// Retrieve custom file name from request body
|
||||
const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
|
||||
|
||||
// Create the final filename ensuring it ends with .gcode
|
||||
const finalFilename = `${customFileName}.gcode`;
|
||||
|
||||
// Call callback with the final filename
|
||||
cb(null, finalFilename);
|
||||
},
|
||||
});
|
||||
|
||||
// Initialise upload
|
||||
const gcodeUpload = multer({
|
||||
storage: gcodeStorage,
|
||||
limits: { fileSize: 500000000 }, // 50MB limit
|
||||
fileFilter: function (req, file, cb) {
|
||||
checkFileType(file, cb);
|
||||
},
|
||||
}).single('gcodeFile'); // The name attribute of the file input in the HTML form
|
||||
|
||||
export const listGCodeFilesRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
@ -64,17 +36,16 @@ export const listGCodeFilesRouteHandler = async (
|
||||
search,
|
||||
sort,
|
||||
order,
|
||||
populate: 'filament',
|
||||
populate: ['filament'],
|
||||
});
|
||||
|
||||
console.log('req.user', req.user);
|
||||
if (result?.error) {
|
||||
logger.error('Error listing gcodefiles.');
|
||||
logger.error('Error listing gcodeFiles.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of gcodefiles (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
logger.debug(`List of gcodeFiles (Page ${page}, Limit ${limit}). Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -92,12 +63,12 @@ export const listGCodeFilesByPropertiesRouteHandler = async (
|
||||
});
|
||||
|
||||
if (result?.error) {
|
||||
logger.error('Error listing gcodefiles.');
|
||||
logger.error('Error listing gcodeFiles.');
|
||||
res.status(result.code).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`List of gcodefiles. Count: ${result.length}`);
|
||||
logger.debug(`List of gcodeFiles. Count: ${result.length}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -106,13 +77,13 @@ export const getGCodeFileRouteHandler = async (req, res) => {
|
||||
const result = await getObject({
|
||||
model: gcodeFileModel,
|
||||
id,
|
||||
populate: 'filament',
|
||||
populate: ['filament'],
|
||||
});
|
||||
if (result?.error) {
|
||||
logger.warn(`GCode file not found with supplied id.`);
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
logger.debug(`Retreived gcodefile with ID: ${id}`);
|
||||
logger.debug(`Retreived gcodeFile with ID: ${id}`);
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
@ -120,14 +91,13 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`GCode file with ID: ${id}`);
|
||||
logger.trace(`GCodeFile with ID: ${id}`);
|
||||
|
||||
const updateData = {
|
||||
updatedAt: new Date(),
|
||||
name: req.body.name,
|
||||
moonraker: req.body.moonraker,
|
||||
tags: req.body.tags,
|
||||
vendor: req.body.vendor,
|
||||
file: req.body.file,
|
||||
filament: req.body.filament,
|
||||
};
|
||||
// Create audit log before updating
|
||||
const result = await editObject({
|
||||
@ -138,12 +108,12 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
});
|
||||
|
||||
if (result.error) {
|
||||
logger.error('Error editing gcode file:', result.error);
|
||||
logger.error('Error editing gcodeFile:', result.error);
|
||||
res.status(result).send(result);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Edited gcode file with ID: ${id}`);
|
||||
logger.debug(`Edited gcodeFile with ID: ${id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
@ -151,12 +121,9 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
||||
export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
const newData = {
|
||||
updatedAt: new Date(),
|
||||
contact: req.body.contact,
|
||||
country: req.body.country,
|
||||
name: req.body.name,
|
||||
website: req.body.website,
|
||||
phone: req.body.phone,
|
||||
email: req.body.email,
|
||||
file: req.body.file,
|
||||
filament: req.body.filament,
|
||||
};
|
||||
const result = await newObject({
|
||||
model: gcodeFileModel,
|
||||
@ -164,11 +131,11 @@ export const newGCodeFileRouteHandler = async (req, res) => {
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No gcode file created:', result.error);
|
||||
logger.error('No gcodeFile created:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`New gcode file with ID: ${result._id}`);
|
||||
logger.debug(`New gcodeFile with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
@ -177,7 +144,7 @@ export const deleteGCodeFileRouteHandler = async (req, res) => {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
|
||||
logger.trace(`GCode file with ID: ${id}`);
|
||||
logger.trace(`GCodeFile with ID: ${id}`);
|
||||
|
||||
const result = await deleteObject({
|
||||
model: gcodeFileModel,
|
||||
@ -185,163 +152,36 @@ export const deleteGCodeFileRouteHandler = async (req, res) => {
|
||||
user: req.user,
|
||||
});
|
||||
if (result.error) {
|
||||
logger.error('No gcode file deleted:', result.error);
|
||||
logger.error('No gcodeFile deleted:', result.error);
|
||||
return res.status(result.code).send(result);
|
||||
}
|
||||
|
||||
logger.debug(`Deleted gcode file with ID: ${result._id}`);
|
||||
logger.debug(`Deleted gcodeFile with ID: ${result._id}`);
|
||||
|
||||
res.send(result);
|
||||
};
|
||||
|
||||
// Check file type
|
||||
function checkFileType(file, cb) {
|
||||
// Allowed ext
|
||||
const filetypes = /g|gco|gcode/;
|
||||
// Check ext
|
||||
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
||||
|
||||
if (extname) {
|
||||
console.log(file);
|
||||
return cb(null, true);
|
||||
} else {
|
||||
cb('Error: .g, .gco, and .gcode files only!');
|
||||
}
|
||||
}
|
||||
|
||||
export const getGCodeFileContentRouteHandler = async (req, res) => {
|
||||
export const getGCodeFileStatsRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the gcodeFile with the given remote address
|
||||
const gcodeFile = await gcodeFileModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!gcodeFile) {
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
|
||||
logger.trace(`Returning GCode File contents with ID: ${id}:`);
|
||||
|
||||
const filePath = path.join(process.env.GCODE_STORAGE, gcodeFile.gcodeFileName);
|
||||
|
||||
// Read the file
|
||||
fs.readFile(filePath, 'utf8', (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
// File not found
|
||||
return res.status(404).send({ error: 'File not found!' });
|
||||
} else {
|
||||
// Other errors
|
||||
return res.status(500).send({ error: 'Error reading file.' });
|
||||
}
|
||||
}
|
||||
|
||||
// Send the file contents in the response
|
||||
res.send(data);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error fetching GCodeFile:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const parseGCodeFileHandler = async (req, res) => {
|
||||
try {
|
||||
// Use the same upload middleware as the uploadGCodeFileContentRouteHandler
|
||||
gcodeUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
return res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
}
|
||||
|
||||
if (req.file == undefined) {
|
||||
return res.send({
|
||||
message: 'No file selected!',
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the path to the uploaded file
|
||||
const filePath = path.join(req.file.destination, req.file.filename);
|
||||
|
||||
// Read the file content
|
||||
const fileContent = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// Extract the config block
|
||||
const configInfo = extractConfigBlock(fileContent);
|
||||
|
||||
// Return the config as JSON
|
||||
res.json(configInfo);
|
||||
|
||||
// Optionally clean up the file after processing if it's not needed
|
||||
fs.unlinkSync(filePath);
|
||||
} catch (parseError) {
|
||||
logger.error('Error parsing GCode file:', parseError);
|
||||
res.status(500).send({ error: parseError.message });
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Error in parseGCodeFileHandler:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const uploadGCodeFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the gcodeFile with the given remote address
|
||||
const gcodeFile = await gcodeFileModel.findOne({ _id: id });
|
||||
if (!gcodeFile) {
|
||||
// Error handling
|
||||
logger.warn(`GCodeFile not found with supplied id.`);
|
||||
return res.status(404).send({ error: 'Print job not found.' });
|
||||
}
|
||||
logger.trace(`GCodeFile with ID: ${id}`);
|
||||
try {
|
||||
gcodeUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
} else {
|
||||
if (req.file == undefined) {
|
||||
res.send({
|
||||
message: 'No file selected!',
|
||||
});
|
||||
} else {
|
||||
// Update the gcodeFile document with the filename and the extracted config
|
||||
const result = await gcodeFileModel.updateOne(
|
||||
{ _id: id },
|
||||
const stats = await gcodeFileModel.aggregate([
|
||||
{
|
||||
$set: {
|
||||
gcodeFileName: req.file.filename,
|
||||
$group: {
|
||||
_id: '$state.type',
|
||||
count: { $sum: 1 },
|
||||
},
|
||||
}
|
||||
);
|
||||
},
|
||||
]);
|
||||
|
||||
if (result.nModified === 0) {
|
||||
logger.error('No gcodeFile updated.');
|
||||
res.status(500).send({ error: 'No gcodeFiles updated.' });
|
||||
}
|
||||
// Transform the results into a more readable format
|
||||
const formattedStats = stats.reduce((acc, curr) => {
|
||||
acc[curr._id] = curr.count;
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
res.send({
|
||||
status: 'OK',
|
||||
file: `${req.file.filename}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (updateError) {
|
||||
logger.error('Error updating gcodeFile:', updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
} catch (fetchError) {
|
||||
logger.error('Error fetching gcodeFile:', fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
logger.trace('GCodeFile stats by state:', formattedStats);
|
||||
res.send(formattedStats);
|
||||
} catch (error) {
|
||||
logger.error('Error fetching gcodeFile stats:', error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
231
src/services/storage/ceph.js
Normal file
231
src/services/storage/ceph.js
Normal file
@ -0,0 +1,231 @@
|
||||
import {
|
||||
S3Client,
|
||||
HeadBucketCommand,
|
||||
CreateBucketCommand,
|
||||
PutObjectCommand,
|
||||
GetObjectCommand,
|
||||
DeleteObjectCommand,
|
||||
HeadObjectCommand,
|
||||
ListObjectsV2Command,
|
||||
GetObjectCommand as GetObjectCmd,
|
||||
} from '@aws-sdk/client-s3';
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
|
||||
import log4js from 'log4js';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger('CephStorage');
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Configure AWS SDK v3 for Ceph (S3-compatible)
|
||||
const s3Config = {
|
||||
credentials: {
|
||||
accessKeyId: process.env.CEPH_ACCESS_KEY_ID,
|
||||
secretAccessKey: process.env.CEPH_SECRET_ACCESS_KEY,
|
||||
},
|
||||
endpoint: process.env.CEPH_ENDPOINT, // e.g., 'http://ceph-gateway:7480'
|
||||
forcePathStyle: true, // Required for Ceph (renamed from s3ForcePathStyle)
|
||||
region: process.env.CEPH_REGION || 'us-east-1',
|
||||
};
|
||||
|
||||
const s3Client = new S3Client(s3Config);
|
||||
|
||||
// Default bucket names for different file types
|
||||
const BUCKETS = {
|
||||
FILES: process.env.CEPH_FILES_BUCKET || 'farmcontrol',
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize buckets if they don't exist
|
||||
*/
|
||||
export const initializeBuckets = async () => {
|
||||
try {
|
||||
for (const [type, bucketName] of Object.entries(BUCKETS)) {
|
||||
try {
|
||||
await s3Client.send(new HeadBucketCommand({ Bucket: bucketName }));
|
||||
logger.debug(`Bucket ${bucketName} already exists`);
|
||||
} catch (error) {
|
||||
if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
|
||||
await s3Client.send(new CreateBucketCommand({ Bucket: bucketName }));
|
||||
logger.info(`Created bucket: ${bucketName}`);
|
||||
} else {
|
||||
logger.error(`Error checking bucket ${bucketName}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error initializing buckets:', error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Upload a file to Ceph
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @param {Buffer} body - File content
|
||||
* @param {string} contentType - MIME type
|
||||
* @param {Object} metadata - Additional metadata
|
||||
* @returns {Promise<Object>} Upload result
|
||||
*/
|
||||
export const uploadFile = async (bucket, key, body, contentType, metadata = {}) => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
Body: body,
|
||||
ContentType: contentType,
|
||||
Metadata: metadata,
|
||||
};
|
||||
|
||||
await s3Client.send(new PutObjectCommand(params));
|
||||
const result = { Location: `${process.env.CEPH_ENDPOINT}/${bucket}/${key}` };
|
||||
logger.debug(`File uploaded successfully: ${key} to bucket ${bucket}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(`Error uploading file ${key} to bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Download a file from Ceph
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @returns {Promise<Buffer>} File content
|
||||
*/
|
||||
export const downloadFile = async (bucket, key) => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await s3Client.send(new GetObjectCommand(params));
|
||||
logger.debug(`File downloaded successfully: ${key} from bucket ${bucket}`);
|
||||
return result.Body;
|
||||
} catch (error) {
|
||||
if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
|
||||
logger.warn(`File not found: ${key} in bucket ${bucket}`);
|
||||
throw new Error('File not found');
|
||||
}
|
||||
logger.error(`Error downloading file ${key} from bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Delete a file from Ceph
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @returns {Promise<Object>} Delete result
|
||||
*/
|
||||
export const deleteFile = async (bucket, key) => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await s3Client.send(new DeleteObjectCommand(params));
|
||||
logger.debug(`File deleted successfully: ${key} from bucket ${bucket}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error(`Error deleting file ${key} from bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if a file exists in Ceph
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @returns {Promise<boolean>} True if file exists
|
||||
*/
|
||||
export const fileExists = async (bucket, key) => {
|
||||
try {
|
||||
await s3Client.send(new HeadObjectCommand({ Bucket: bucket, Key: key }));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
|
||||
return false;
|
||||
}
|
||||
logger.error(`Error checking file existence ${key} in bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* List files in a bucket with optional prefix
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} prefix - Optional prefix to filter files
|
||||
* @returns {Promise<Array>} List of file objects
|
||||
*/
|
||||
export const listFiles = async (bucket, prefix = '') => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Prefix: prefix,
|
||||
};
|
||||
|
||||
const result = await s3Client.send(new ListObjectsV2Command(params));
|
||||
logger.debug(`Listed ${result.Contents.length} files in bucket ${bucket}`);
|
||||
return result.Contents;
|
||||
} catch (error) {
|
||||
logger.error(`Error listing files in bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get file metadata from Ceph
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @returns {Promise<Object>} File metadata
|
||||
*/
|
||||
export const getFileMetadata = async (bucket, key) => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const result = await s3Client.send(new HeadObjectCommand(params));
|
||||
logger.debug(`Retrieved metadata for file: ${key} in bucket ${bucket}`);
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (error.name === 'NotFound' || error.$metadata?.httpStatusCode === 404) {
|
||||
logger.warn(`File not found: ${key} in bucket ${bucket}`);
|
||||
throw new Error('File not found');
|
||||
}
|
||||
logger.error(`Error getting metadata for file ${key} in bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a presigned URL for file access
|
||||
* @param {string} bucket - Bucket name
|
||||
* @param {string} key - Object key (file path)
|
||||
* @param {number} expiresIn - URL expiration time in seconds (default: 3600)
|
||||
* @returns {Promise<string>} Presigned URL
|
||||
*/
|
||||
export const getPresignedUrl = async (bucket, key, expiresIn = 3600) => {
|
||||
try {
|
||||
const params = {
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
};
|
||||
|
||||
const url = await getSignedUrl(s3Client, new GetObjectCommand(params), { expiresIn });
|
||||
logger.debug(`Generated presigned URL for file: ${key} in bucket ${bucket}`);
|
||||
return url;
|
||||
} catch (error) {
|
||||
logger.error(`Error generating presigned URL for file ${key} in bucket ${bucket}:`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Export bucket constants for use in other modules
|
||||
export { BUCKETS };
|
||||
72
src/utils.js
72
src/utils.js
@ -1,5 +1,6 @@
|
||||
import { ObjectId } from 'mongodb';
|
||||
import { auditLogModel } from './schemas/management/auditlog.schema.js';
|
||||
import exifr from 'exifr';
|
||||
import { etcdServer } from './database/etcd.js';
|
||||
import { natsServer } from './database/nats.js';
|
||||
|
||||
@ -77,7 +78,7 @@ function convertToCamelCase(obj) {
|
||||
return result;
|
||||
}
|
||||
|
||||
function extractConfigBlock(fileContent, useCamelCase = true) {
|
||||
function extractGCodeConfigBlock(fileContent, useCamelCase = true) {
|
||||
const configObject = {};
|
||||
|
||||
// Extract header information
|
||||
@ -480,10 +481,74 @@ function convertPropertiesString(properties) {
|
||||
return properties;
|
||||
}
|
||||
|
||||
async function getFileMeta(file) {
|
||||
try {
|
||||
if (!file) return {};
|
||||
|
||||
const originalName = file.originalname || '';
|
||||
const lowerName = originalName.toLowerCase();
|
||||
|
||||
if (lowerName.endsWith('.g') || lowerName.endsWith('.gcode')) {
|
||||
const content = file.buffer ? file.buffer.toString('utf8') : '';
|
||||
if (!content) return {};
|
||||
return extractGCodeConfigBlock(content);
|
||||
}
|
||||
|
||||
// Image EXIF metadata
|
||||
if (file.mimetype && file.mimetype.startsWith('image/') && file.buffer) {
|
||||
try {
|
||||
const exif = await exifr.parse(file.buffer);
|
||||
return exif || {};
|
||||
} catch (_) {
|
||||
// Ignore EXIF parse errors and fall through
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
} catch (_) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function modelHasRef(model, refName) {
|
||||
if (!model || !model.schema) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let hasRef = false;
|
||||
model.schema.eachPath((pathName, schemaType) => {
|
||||
const directRef = schemaType?.options?.ref;
|
||||
const arrayRef = schemaType?.caster?.options?.ref;
|
||||
const ref = directRef || arrayRef;
|
||||
if (ref === refName) {
|
||||
hasRef = true;
|
||||
}
|
||||
});
|
||||
|
||||
return hasRef;
|
||||
}
|
||||
|
||||
function getFieldsByRef(model, refName) {
|
||||
if (!model || !model.schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const fields = [];
|
||||
model.schema.eachPath((pathName, schemaType) => {
|
||||
const directRef = schemaType?.options?.ref;
|
||||
const arrayRef = schemaType?.caster?.options?.ref;
|
||||
const ref = directRef || arrayRef;
|
||||
if (ref === refName) {
|
||||
fields.push(pathName);
|
||||
}
|
||||
});
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
export {
|
||||
parseFilter,
|
||||
convertToCamelCase,
|
||||
extractConfigBlock,
|
||||
newAuditLog,
|
||||
editAuditLog,
|
||||
deleteAuditLog,
|
||||
@ -495,4 +560,7 @@ export {
|
||||
distributeDelete,
|
||||
getFilter, // <-- add here
|
||||
convertPropertiesString,
|
||||
getFileMeta,
|
||||
modelHasRef,
|
||||
getFieldsByRef,
|
||||
};
|
||||
|
||||
870082
uploads/default.gcode
Normal file
870082
uploads/default.gcode
Normal file
File diff suppressed because it is too large
Load Diff
BIN
uploads/default.jpg
Normal file
BIN
uploads/default.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.3 MiB |
1
uploads/default.svg
Normal file
1
uploads/default.svg
Normal file
@ -0,0 +1 @@
|
||||
<svg height="2500" viewBox="0 0 78.799 96" width="2052" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><linearGradient id="a" gradientUnits="userSpaceOnUse" x1="16.942" x2="85.671" y1="83.36" y2="89.583"><stop offset="0" stop-color="#f32b44"/><stop offset=".6" stop-color="#a4070a"/></linearGradient><linearGradient id="b"><stop offset="0" stop-opacity=".4"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" gradientUnits="userSpaceOnUse" x1="63.515" x2="33.003" xlink:href="#b" y1="87.298" y2="84.535"/><linearGradient id="d" gradientUnits="userSpaceOnUse" x1="44.738" x2="-5.901" y1="-3.312" y2="71.527"><stop offset="0" stop-color="#800600"/><stop offset=".6" stop-color="#c72127"/><stop offset=".728" stop-color="#c13959"/><stop offset=".847" stop-color="#bc4b81"/><stop offset=".942" stop-color="#b95799"/><stop offset="1" stop-color="#b85ba2"/></linearGradient><linearGradient id="e" gradientUnits="userSpaceOnUse" x1="45.823" x2="35.099" xlink:href="#b" y1="-4.81" y2="11.039"/><linearGradient id="f" gradientUnits="userSpaceOnUse" x1="61.486" x2="61.486" y1="-4.887" y2="88.781"><stop offset="0" stop-color="#ffb900"/><stop offset=".166" stop-color="#ef8400"/><stop offset=".313" stop-color="#e25c01"/><stop offset=".429" stop-color="#db4401"/><stop offset=".5" stop-color="#d83b01"/></linearGradient><path d="m19.143 75.558c-2.724 0-4.945 2.121-4.945 4.753 0 1.789 1.031 3.322 2.565 4.14l19.118 10.246a10.11 10.11 0 0 0 4.969 1.303c1.164 0 2.275-.204 3.306-.562l6.531-1.814v-18.091c.027.025-31.519.025-31.545.025z" fill="url(#a)"/><path d="m19.143 75.558c-2.724 0-4.945 2.121-4.945 4.753 0 1.789 1.031 3.322 2.565 4.14l19.118 10.246a10.11 10.11 0 0 0 4.969 1.303c1.164 0 2.275-.204 3.306-.562l6.531-1.814v-18.091c.027.025-31.519.025-31.545.025z" fill="url(#c)"/><path d="m43.736.383a9.968 9.968 0 0 0 -2.777-.383c-1.56 0-3.12.307-4.522 1.022-.29.128-31.096 16.864-31.096 16.864-.423.205-.82.46-1.19.716-.052.025-.079.051-.132.077-.238.178-.45.357-.687.536-.106.077-.212.18-.291.256-.132.127-.265.255-.37.383-.37.383-1.005 1.2-1.005 1.2a9.15 9.15 0 0 0 -1.666 5.291v44.46c0 2.633 2.221 4.754 4.945 4.754.687 0 1.322-.128 1.904-.384l8.805-4.778c1.586-.766 2.856-2.07 3.517-3.68.158-.332.29-.74.37-1.15.026-.102.053-.23.053-.332 0-.05.026-.127.026-.178.027-.18.053-.384.053-.562 0-.154.027-.282.027-.435v-23.662-7.385c0-2.07.925-3.935 2.38-5.238 0 0-.688.613 0 0 .687-.613 1.586-1.15 2.644-1.507 1.057-.384 26.072-9.122 26.072-9.122v-14.744z" fill="url(#d)"/><path d="m43.736.383a9.968 9.968 0 0 0 -2.777-.383c-1.56 0-3.12.307-4.522 1.022-.29.128-31.096 16.864-31.096 16.864-.423.205-.82.46-1.19.716-.052.025-.079.051-.132.077-.238.178-.45.357-.687.536-.106.077-.212.18-.291.256-.132.127-.265.255-.37.383-.37.383-1.005 1.2-1.005 1.2a9.15 9.15 0 0 0 -1.666 5.291v44.46c0 2.633 2.221 4.754 4.945 4.754.687 0 1.322-.128 1.904-.384l8.805-4.778c1.586-.766 2.856-2.07 3.517-3.68.158-.332.29-.74.37-1.15.026-.102.053-.23.053-.332 0-.05.026-.127.026-.178.027-.18.053-.384.053-.562 0-.154.027-.282.027-.435v-23.662-7.385c0-2.07.925-3.935 2.38-5.238 0 0-.688.613 0 0 .687-.613 1.586-1.15 2.644-1.507 1.057-.384 26.072-9.122 26.072-9.122v-14.744z" fill="url(#e)"/><path d="m71.898 8.35-27.738-7.843c4.019 1.508 6.53 4.906 6.53 9.046 0 0-.025 75.2 0 77.014.027 4.088-2.67 7.589-6.53 8.892.846-.23 27.738-7.717 27.738-7.717 3.992-1.226 6.875-4.804 6.875-9.07v-61.252c.026-4.24-2.883-7.844-6.875-9.07z" fill="url(#f)"/></svg>
|
||||
|
After Width: | Height: | Size: 3.4 KiB |
Loading…
x
Reference in New Issue
Block a user