Removed development logging.
This commit is contained in:
parent
c4f2de910a
commit
dea6a90b68
@ -1,11 +1,10 @@
|
|||||||
import mongoose from "mongoose";
|
import mongoose from 'mongoose';
|
||||||
import { userModel } from "../schemas/user.schema.js";
|
import { userModel } from '../schemas/user.schema.js';
|
||||||
import { dbConnect } from "../mongo/index.js";
|
import { dbConnect } from '../mongo/index.js';
|
||||||
|
|
||||||
async function clear() {
|
async function clear() {
|
||||||
dbConnect();
|
dbConnect();
|
||||||
await userModel.deleteMany({});
|
await userModel.deleteMany({});
|
||||||
console.log("DB cleared");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
clear().then(() => {
|
clear().then(() => {
|
||||||
|
|||||||
@ -373,10 +373,6 @@ export const listObjects = async ({
|
|||||||
sort = 'createdAt';
|
sort = 'createdAt';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filter) {
|
|
||||||
console.log('filter', filter);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Translate any key ending with ._id to remove the ._id suffix for Mongoose
|
// Translate any key ending with ._id to remove the ._id suffix for Mongoose
|
||||||
Object.keys(filter).forEach((key) => {
|
Object.keys(filter).forEach((key) => {
|
||||||
if (key.endsWith('._id')) {
|
if (key.endsWith('._id')) {
|
||||||
@ -479,8 +475,6 @@ function nestGroups(groups, props, filter, idx = 0) {
|
|||||||
// Check if any group in this key matches the filter (by _id or name)
|
// Check if any group in this key matches the filter (by _id or name)
|
||||||
const matches = groupList.filter((group) => {
|
const matches = groupList.filter((group) => {
|
||||||
const { filterVals } = getKeyAndFilterVals(group._id[prop]);
|
const { filterVals } = getKeyAndFilterVals(group._id[prop]);
|
||||||
console.log('filterVals', filterVals);
|
|
||||||
console.log('filterValue', filterValue);
|
|
||||||
return filterVals.some((val) => val?.toString() === filterValue);
|
return filterVals.some((val) => val?.toString() === filterValue);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -524,7 +518,6 @@ export const listObjectsByProperties = async ({
|
|||||||
populate,
|
populate,
|
||||||
}) => {
|
}) => {
|
||||||
try {
|
try {
|
||||||
console.log('Props', properties);
|
|
||||||
const propertiesPresent = !(
|
const propertiesPresent = !(
|
||||||
!Array.isArray(properties) ||
|
!Array.isArray(properties) ||
|
||||||
properties.length === 0 ||
|
properties.length === 0 ||
|
||||||
@ -596,7 +589,6 @@ export const listObjectsByProperties = async ({
|
|||||||
|
|
||||||
// Run aggregation
|
// Run aggregation
|
||||||
const results = await model.aggregate(pipeline);
|
const results = await model.aggregate(pipeline);
|
||||||
console.log('results', results);
|
|
||||||
return nestGroups(results, properties, filter);
|
return nestGroups(results, properties, filter);
|
||||||
} else {
|
} else {
|
||||||
// If no properties specified, just return all objects without grouping
|
// If no properties specified, just return all objects without grouping
|
||||||
|
|||||||
@ -54,8 +54,6 @@ orderItemSchema.statics.stats = async function () {
|
|||||||
rollupConfigs: rollupConfigs,
|
rollupConfigs: rollupConfigs,
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(results);
|
|
||||||
|
|
||||||
// Transform the results to match the expected format
|
// Transform the results to match the expected format
|
||||||
return results;
|
return results;
|
||||||
};
|
};
|
||||||
@ -132,8 +130,6 @@ orderItemSchema.statics.recalculate = async function (orderItem, user) {
|
|||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('rollupResults', rollupResults);
|
|
||||||
|
|
||||||
const totals = rollupResults.orderTotals || {};
|
const totals = rollupResults.orderTotals || {};
|
||||||
const totalAmount = totals.totalAmount.sum?.toFixed(2) || 0;
|
const totalAmount = totals.totalAmount.sum?.toFixed(2) || 0;
|
||||||
const totalAmountWithTax = totals.totalAmountWithTax.sum?.toFixed(2) || 0;
|
const totalAmountWithTax = totals.totalAmountWithTax.sum?.toFixed(2) || 0;
|
||||||
|
|||||||
@ -92,8 +92,6 @@ printerSchema.statics.stats = async function () {
|
|||||||
rollupConfigs: rollupConfigs,
|
rollupConfigs: rollupConfigs,
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(results);
|
|
||||||
|
|
||||||
// Transform the results to match the expected format
|
// Transform the results to match the expected format
|
||||||
return results;
|
return results;
|
||||||
};
|
};
|
||||||
|
|||||||
@ -121,8 +121,6 @@ const isAuthenticated = async (req, res, next) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('Checking host authentication', req.headers);
|
|
||||||
|
|
||||||
const hostId = req.headers['x-host-id'];
|
const hostId = req.headers['x-host-id'];
|
||||||
const authCode = req.headers['x-auth-code'];
|
const authCode = req.headers['x-auth-code'];
|
||||||
if (hostId && authCode) {
|
if (hostId && authCode) {
|
||||||
@ -135,7 +133,6 @@ const isAuthenticated = async (req, res, next) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to session-based authentication
|
// Fallback to session-based authentication
|
||||||
console.log('Using session token');
|
|
||||||
if (req.session && req.session['keycloak-token']) {
|
if (req.session && req.session['keycloak-token']) {
|
||||||
const sessionToken = req.session['keycloak-token'];
|
const sessionToken = req.session['keycloak-token'];
|
||||||
if (sessionToken.expires_at > new Date().getTime()) {
|
if (sessionToken.expires_at > new Date().getTime()) {
|
||||||
|
|||||||
@ -36,8 +36,6 @@ export const listAuditLogsRouteHandler = async (
|
|||||||
delete filter['owner._id'];
|
delete filter['owner._id'];
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('sort: ', { [sort]: sortOrder });
|
|
||||||
|
|
||||||
// Use find with population and filter
|
// Use find with population and filter
|
||||||
let query = auditLogModel
|
let query = auditLogModel
|
||||||
.find(filter)
|
.find(filter)
|
||||||
|
|||||||
@ -272,7 +272,6 @@ function checkFileType(file, cb) {
|
|||||||
const allowedTypes = /.*/; // Allow all file types
|
const allowedTypes = /.*/; // Allow all file types
|
||||||
|
|
||||||
if (allowedTypes.test(file.mimetype)) {
|
if (allowedTypes.test(file.mimetype)) {
|
||||||
console.log(file);
|
|
||||||
return cb(null, true);
|
return cb(null, true);
|
||||||
} else {
|
} else {
|
||||||
cb('Error: File type not allowed!');
|
cb('Error: File type not allowed!');
|
||||||
|
|||||||
@ -36,8 +36,6 @@ export const listMaterialsRouteHandler = async (
|
|||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
console.log(aggregateCommand);
|
|
||||||
|
|
||||||
material = await materialModel.aggregate(aggregateCommand);
|
material = await materialModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
|
|||||||
@ -126,11 +126,8 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(queryParams);
|
|
||||||
|
|
||||||
if (Object.keys(queryParams).length > 0) {
|
if (Object.keys(queryParams).length > 0) {
|
||||||
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
const prefixEntry = PREFIX_MODEL_MAP[prefix];
|
||||||
console.log(prefixEntry);
|
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
|
|||||||
@ -107,11 +107,8 @@ export const getSpotlightRouteHandler = async (req, res) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(queryParams);
|
|
||||||
|
|
||||||
if (Object.keys(queryParams).length > 0) {
|
if (Object.keys(queryParams).length > 0) {
|
||||||
const prefixEntry = getModelByPrefix(prefix);
|
const prefixEntry = getModelByPrefix(prefix);
|
||||||
console.log(prefixEntry);
|
|
||||||
if (!prefixEntry || !prefixEntry.model) {
|
if (!prefixEntry || !prefixEntry.model) {
|
||||||
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
res.status(400).send({ error: 'Invalid or unsupported prefix' });
|
||||||
return;
|
return;
|
||||||
|
|||||||
@ -108,7 +108,6 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
|||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
|
||||||
logger.trace(`GCodeFile with ID: ${id}`);
|
logger.trace(`GCodeFile with ID: ${id}`);
|
||||||
console.log('REQ.BODY', req.body);
|
|
||||||
|
|
||||||
const updateData = {
|
const updateData = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
|||||||
@ -161,7 +161,6 @@ export const deleteJobRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getJobStatsRouteHandler = async (req, res) => {
|
export const getJobStatsRouteHandler = async (req, res) => {
|
||||||
console.log('Getting job stats');
|
|
||||||
const result = await getModelStats({ model: jobModel });
|
const result = await getModelStats({ model: jobModel });
|
||||||
if (result?.error) {
|
if (result?.error) {
|
||||||
logger.error('Error fetching job stats:', result.error);
|
logger.error('Error fetching job stats:', result.error);
|
||||||
|
|||||||
@ -170,7 +170,6 @@ export const deletePrinterRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
export const getPrinterStatsRouteHandler = async (req, res) => {
|
export const getPrinterStatsRouteHandler = async (req, res) => {
|
||||||
const result = await getModelStats({ model: printerModel });
|
const result = await getModelStats({ model: printerModel });
|
||||||
console.log(result);
|
|
||||||
if (!result) {
|
if (!result) {
|
||||||
logger.error('Error fetching printer stats:', result.error);
|
logger.error('Error fetching printer stats:', result.error);
|
||||||
return res.status(result.code).send(result);
|
return res.status(result.code).send(result);
|
||||||
|
|||||||
@ -561,8 +561,6 @@ function getFilter(query, allowedFilters, parse = true) {
|
|||||||
let filter = {};
|
let filter = {};
|
||||||
for (const [key, value] of Object.entries(query)) {
|
for (const [key, value] of Object.entries(query)) {
|
||||||
if (allowedFilters.includes(key)) {
|
if (allowedFilters.includes(key)) {
|
||||||
console.log('key', key);
|
|
||||||
console.log('value', value);
|
|
||||||
const parsedFilter = parse ? parseFilter(key, value) : { [key]: value };
|
const parsedFilter = parse ? parseFilter(key, value) : { [key]: value };
|
||||||
filter = { ...filter, ...parsedFilter };
|
filter = { ...filter, ...parsedFilter };
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user