diff --git a/index.js b/index.js
deleted file mode 100644
index 67c9740..0000000
--- a/index.js
+++ /dev/null
@@ -1,86 +0,0 @@
-import bcrypt from "bcrypt";
-import dotenv from "dotenv";
-import { userModel } from "../../schemas/user.schema.js";
-import { printerModel } from "../../schemas/printer.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-
-dotenv.config();
-
-const logger = log4js.getLogger("Printers");
-logger.level = process.env.LOG_LEVEL;
-
-export const listPrintersRouteHandler = async (
- req,
- res,
- page = 1,
- limit = 25
-) => {
- try {
- // Calculate the skip value based on the page number and limit
- const skip = (page - 1) * limit;
-
- // Fetch users with pagination
- const printers = await printerModel.find().skip(skip).limit(limit);
-
- logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
- res.send(printers);
- } catch (error) {
- logger.error("Error listing users:", error);
- res.status(500).send({ error: error });
- }
-};
-
-export const getPrinterRouteHandler = async (req, res) => {
- const remoteAddress = req.params.remoteAddress;
-
- try {
- // Fetch the printer with the given remote address
- const printer = await printerModel.findOne({ remoteAddress });
-
- if (!printer) {
- logger.warn(`Printer with remote address ${remoteAddress} not found.`);
- return res.status(404).send({ error: "Printer not found" });
- }
-
- logger.trace(`Printer with remote address ${remoteAddress}:`, printer);
- res.send(printer);
- } catch (error) {
- logger.error("Error fetching printer:", error);
- res.status(500).send({ error: error.message });
- }
-};
-
-export const editPrinterRouteHandler = async (req, res) => {
- const remoteAddress = req.params.remoteAddress;
- const { friendlyName } = req.body;
-
- try {
- // Fetch the printer with the given remote address
- const printer = await printerModel.findOne({ remoteAddress });
-
- if (!printer) {
- logger.warn(`Printer with remote address ${remoteAddress} not found.`);
- return res.status(404).send({ error: "Printer not found" });
- }
-
- logger.trace(`Editing printer with remote address ${remoteAddress}:`, printer);
- try {
- const result = await printerModel.updateOne(
- { remoteAddress: remoteAddress },
- { $set: req.body }
- );
- if (result.nModified === 0) {
- logger.error("No printers updated.");
- res.status(500).send({ error: "No printers updated." });
- }
- } catch (updateError) {
- logger.error("Error updating printer:", updateError);
- res.status(500).send({ error: updateError.message });
- }
- res.send("OK");
- } catch (fetchError) {
- logger.error("Error fetching printer:", fetchError);
- res.status(500).send({ error: fetchError.message });
- }
-};
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 3096f0b..47d68a6 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -48,6 +48,10 @@
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
"@babel/preset-env": "^7.18.2",
"@babel/register": "^7.17.7",
+ "eslint": "^8.57.1",
+ "eslint-config-prettier": "^10.1.5",
+ "eslint-plugin-prettier": "^5.5.1",
+ "prettier": "^3.6.2",
"sequelize-cli": "^6.4.1",
"standard": "^17.1.0"
}
@@ -2820,9 +2824,9 @@
}
},
"node_modules/@eslint-community/regexpp": {
- "version": "4.11.0",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz",
- "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==",
+ "version": "4.12.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
+ "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2883,9 +2887,9 @@
}
},
"node_modules/@eslint/js": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
- "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
+ "version": "8.57.1",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz",
+ "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2981,14 +2985,14 @@
"license": "MIT"
},
"node_modules/@humanwhocodes/config-array": {
- "version": "0.11.14",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz",
- "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==",
+ "version": "0.13.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
+ "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==",
"deprecated": "Use @eslint/config-array instead",
"dev": true,
"license": "Apache-2.0",
"dependencies": {
- "@humanwhocodes/object-schema": "^2.0.2",
+ "@humanwhocodes/object-schema": "^2.0.3",
"debug": "^4.3.1",
"minimatch": "^3.0.5"
},
@@ -3361,6 +3365,19 @@
"node": ">=14"
}
},
+ "node_modules/@pkgr/core": {
+ "version": "0.2.7",
+ "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.7.tgz",
+ "integrity": "sha512-YLT9Zo3oNPJoBjBc4q8G2mjU4tqIbf5CEOORbUUr48dCD9q3umJ3IPlVqOqDakPfd2HuwccBaqlGhN4Gmr5OWg==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "^12.20.0 || ^14.18.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/pkgr"
+ }
+ },
"node_modules/@rc-component/async-validator": {
"version": "5.0.4",
"resolved": "https://registry.npmjs.org/@rc-component/async-validator/-/async-validator-5.0.4.tgz",
@@ -3510,6 +3527,13 @@
"react-dom": ">=16.9.0"
}
},
+ "node_modules/@rtsao/scc": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz",
+ "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/@simplewebauthn/server": {
"version": "10.0.0",
"resolved": "https://registry.npmjs.org/@simplewebauthn/server/-/server-10.0.0.tgz",
@@ -4327,9 +4351,9 @@
}
},
"node_modules/@ungap/structured-clone": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz",
- "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==",
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
+ "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
"dev": true,
"license": "ISC"
},
@@ -4353,9 +4377,9 @@
}
},
"node_modules/acorn": {
- "version": "8.12.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
- "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
+ "version": "8.15.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
+ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"bin": {
@@ -4577,14 +4601,14 @@
}
},
"node_modules/array-buffer-byte-length": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz",
- "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz",
+ "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.5",
- "is-array-buffer": "^3.0.4"
+ "call-bound": "^1.0.3",
+ "is-array-buffer": "^3.0.5"
},
"engines": {
"node": ">= 0.4"
@@ -4600,18 +4624,20 @@
"license": "MIT"
},
"node_modules/array-includes": {
- "version": "3.1.8",
- "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz",
- "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==",
+ "version": "3.1.9",
+ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz",
+ "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.4",
"define-properties": "^1.2.1",
- "es-abstract": "^1.23.2",
- "es-object-atoms": "^1.0.0",
- "get-intrinsic": "^1.2.4",
- "is-string": "^1.0.7"
+ "es-abstract": "^1.24.0",
+ "es-object-atoms": "^1.1.1",
+ "get-intrinsic": "^1.3.0",
+ "is-string": "^1.1.1",
+ "math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -4648,18 +4674,19 @@
}
},
"node_modules/array.prototype.findlastindex": {
- "version": "1.2.5",
- "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz",
- "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==",
+ "version": "1.2.6",
+ "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz",
+ "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.4",
"define-properties": "^1.2.1",
- "es-abstract": "^1.23.2",
+ "es-abstract": "^1.23.9",
"es-errors": "^1.3.0",
- "es-object-atoms": "^1.0.0",
- "es-shim-unscopables": "^1.0.2"
+ "es-object-atoms": "^1.1.1",
+ "es-shim-unscopables": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -4669,16 +4696,16 @@
}
},
"node_modules/array.prototype.flat": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz",
- "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==",
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz",
+ "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "define-properties": "^1.2.0",
- "es-abstract": "^1.22.1",
- "es-shim-unscopables": "^1.0.0"
+ "call-bind": "^1.0.8",
+ "define-properties": "^1.2.1",
+ "es-abstract": "^1.23.5",
+ "es-shim-unscopables": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -4688,16 +4715,16 @@
}
},
"node_modules/array.prototype.flatmap": {
- "version": "1.3.2",
- "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz",
- "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==",
+ "version": "1.3.3",
+ "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz",
+ "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "define-properties": "^1.2.0",
- "es-abstract": "^1.22.1",
- "es-shim-unscopables": "^1.0.0"
+ "call-bind": "^1.0.8",
+ "define-properties": "^1.2.1",
+ "es-abstract": "^1.23.5",
+ "es-shim-unscopables": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -4746,20 +4773,19 @@
}
},
"node_modules/arraybuffer.prototype.slice": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz",
- "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==",
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz",
+ "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"array-buffer-byte-length": "^1.0.1",
- "call-bind": "^1.0.5",
+ "call-bind": "^1.0.8",
"define-properties": "^1.2.1",
- "es-abstract": "^1.22.3",
- "es-errors": "^1.2.1",
- "get-intrinsic": "^1.2.3",
- "is-array-buffer": "^3.0.4",
- "is-shared-array-buffer": "^1.0.2"
+ "es-abstract": "^1.23.5",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.6",
+ "is-array-buffer": "^3.0.4"
},
"engines": {
"node": ">= 0.4"
@@ -4813,6 +4839,16 @@
"integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==",
"license": "MIT"
},
+ "node_modules/async-function": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz",
+ "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
@@ -5188,16 +5224,16 @@
}
},
"node_modules/call-bind": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
- "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
+ "version": "1.0.8",
+ "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
+ "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
+ "dev": true,
"license": "MIT",
"dependencies": {
+ "call-bind-apply-helpers": "^1.0.0",
"es-define-property": "^1.0.0",
- "es-errors": "^1.3.0",
- "function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
- "set-function-length": "^1.2.1"
+ "set-function-length": "^1.2.2"
},
"engines": {
"node": ">= 0.4"
@@ -5219,6 +5255,22 @@
"node": ">= 0.4"
}
},
+ "node_modules/call-bound": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
+ "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bind-apply-helpers": "^1.0.2",
+ "get-intrinsic": "^1.3.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -5609,9 +5661,9 @@
}
},
"node_modules/cross-spawn": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
- "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
+ "version": "7.0.6",
+ "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
+ "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
"license": "MIT",
"dependencies": {
"path-key": "^3.1.0",
@@ -5787,15 +5839,15 @@
}
},
"node_modules/data-view-buffer": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz",
- "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz",
+ "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.6",
+ "call-bound": "^1.0.3",
"es-errors": "^1.3.0",
- "is-data-view": "^1.0.1"
+ "is-data-view": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -5805,31 +5857,31 @@
}
},
"node_modules/data-view-byte-length": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz",
- "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz",
+ "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bound": "^1.0.3",
"es-errors": "^1.3.0",
- "is-data-view": "^1.0.1"
+ "is-data-view": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
- "url": "https://github.com/sponsors/ljharb"
+ "url": "https://github.com/sponsors/inspect-js"
}
},
"node_modules/data-view-byte-offset": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz",
- "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==",
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz",
+ "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.6",
+ "call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"is-data-view": "^1.0.1"
},
@@ -5899,6 +5951,7 @@
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
"integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"es-define-property": "^1.0.0",
@@ -6213,58 +6266,66 @@
}
},
"node_modules/es-abstract": {
- "version": "1.23.3",
- "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz",
- "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==",
+ "version": "1.24.0",
+ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
+ "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "array-buffer-byte-length": "^1.0.1",
- "arraybuffer.prototype.slice": "^1.0.3",
+ "array-buffer-byte-length": "^1.0.2",
+ "arraybuffer.prototype.slice": "^1.0.4",
"available-typed-arrays": "^1.0.7",
- "call-bind": "^1.0.7",
- "data-view-buffer": "^1.0.1",
- "data-view-byte-length": "^1.0.1",
- "data-view-byte-offset": "^1.0.0",
- "es-define-property": "^1.0.0",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.4",
+ "data-view-buffer": "^1.0.2",
+ "data-view-byte-length": "^1.0.2",
+ "data-view-byte-offset": "^1.0.1",
+ "es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
- "es-object-atoms": "^1.0.0",
- "es-set-tostringtag": "^2.0.3",
- "es-to-primitive": "^1.2.1",
- "function.prototype.name": "^1.1.6",
- "get-intrinsic": "^1.2.4",
- "get-symbol-description": "^1.0.2",
- "globalthis": "^1.0.3",
- "gopd": "^1.0.1",
+ "es-object-atoms": "^1.1.1",
+ "es-set-tostringtag": "^2.1.0",
+ "es-to-primitive": "^1.3.0",
+ "function.prototype.name": "^1.1.8",
+ "get-intrinsic": "^1.3.0",
+ "get-proto": "^1.0.1",
+ "get-symbol-description": "^1.1.0",
+ "globalthis": "^1.0.4",
+ "gopd": "^1.2.0",
"has-property-descriptors": "^1.0.2",
- "has-proto": "^1.0.3",
- "has-symbols": "^1.0.3",
+ "has-proto": "^1.2.0",
+ "has-symbols": "^1.1.0",
"hasown": "^2.0.2",
- "internal-slot": "^1.0.7",
- "is-array-buffer": "^3.0.4",
+ "internal-slot": "^1.1.0",
+ "is-array-buffer": "^3.0.5",
"is-callable": "^1.2.7",
- "is-data-view": "^1.0.1",
+ "is-data-view": "^1.0.2",
"is-negative-zero": "^2.0.3",
- "is-regex": "^1.1.4",
- "is-shared-array-buffer": "^1.0.3",
- "is-string": "^1.0.7",
- "is-typed-array": "^1.1.13",
- "is-weakref": "^1.0.2",
- "object-inspect": "^1.13.1",
+ "is-regex": "^1.2.1",
+ "is-set": "^2.0.3",
+ "is-shared-array-buffer": "^1.0.4",
+ "is-string": "^1.1.1",
+ "is-typed-array": "^1.1.15",
+ "is-weakref": "^1.1.1",
+ "math-intrinsics": "^1.1.0",
+ "object-inspect": "^1.13.4",
"object-keys": "^1.1.1",
- "object.assign": "^4.1.5",
- "regexp.prototype.flags": "^1.5.2",
- "safe-array-concat": "^1.1.2",
- "safe-regex-test": "^1.0.3",
- "string.prototype.trim": "^1.2.9",
- "string.prototype.trimend": "^1.0.8",
+ "object.assign": "^4.1.7",
+ "own-keys": "^1.0.1",
+ "regexp.prototype.flags": "^1.5.4",
+ "safe-array-concat": "^1.1.3",
+ "safe-push-apply": "^1.0.0",
+ "safe-regex-test": "^1.1.0",
+ "set-proto": "^1.0.0",
+ "stop-iteration-iterator": "^1.1.0",
+ "string.prototype.trim": "^1.2.10",
+ "string.prototype.trimend": "^1.0.9",
"string.prototype.trimstart": "^1.0.8",
- "typed-array-buffer": "^1.0.2",
- "typed-array-byte-length": "^1.0.1",
- "typed-array-byte-offset": "^1.0.2",
- "typed-array-length": "^1.0.6",
- "unbox-primitive": "^1.0.2",
- "which-typed-array": "^1.1.15"
+ "typed-array-buffer": "^1.0.3",
+ "typed-array-byte-length": "^1.0.3",
+ "typed-array-byte-offset": "^1.0.4",
+ "typed-array-length": "^1.0.7",
+ "unbox-primitive": "^1.1.0",
+ "which-typed-array": "^1.1.19"
},
"engines": {
"node": ">= 0.4"
@@ -6352,25 +6413,28 @@
}
},
"node_modules/es-shim-unscopables": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz",
- "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz",
+ "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "hasown": "^2.0.0"
+ "hasown": "^2.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
}
},
"node_modules/es-to-primitive": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
- "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz",
+ "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==",
"dev": true,
"license": "MIT",
"dependencies": {
- "is-callable": "^1.1.4",
- "is-date-object": "^1.0.1",
- "is-symbol": "^1.0.2"
+ "is-callable": "^1.2.7",
+ "is-date-object": "^1.0.5",
+ "is-symbol": "^1.0.4"
},
"engines": {
"node": ">= 0.4"
@@ -6484,17 +6548,18 @@
}
},
"node_modules/eslint": {
- "version": "8.57.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
- "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
+ "version": "8.57.1",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
+ "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
+ "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.4",
- "@eslint/js": "8.57.0",
- "@humanwhocodes/config-array": "^0.11.14",
+ "@eslint/js": "8.57.1",
+ "@humanwhocodes/config-array": "^0.13.0",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
@@ -6539,6 +6604,22 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/eslint-config-prettier": {
+ "version": "10.1.5",
+ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.5.tgz",
+ "integrity": "sha512-zc1UmCpNltmVY34vuLRV61r1K27sWuX39E+uyUnY8xS2Bex88VV9cugG+UZbRSRGtGyFboj+D8JODyme1plMpw==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "eslint-config-prettier": "bin/cli.js"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint-config-prettier"
+ },
+ "peerDependencies": {
+ "eslint": ">=7.0.0"
+ }
+ },
"node_modules/eslint-config-standard": {
"version": "17.1.0",
"resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.1.0.tgz",
@@ -6617,9 +6698,9 @@
}
},
"node_modules/eslint-module-utils": {
- "version": "2.8.1",
- "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz",
- "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==",
+ "version": "2.12.1",
+ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz",
+ "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6691,35 +6772,37 @@
}
},
"node_modules/eslint-plugin-import": {
- "version": "2.29.1",
- "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz",
- "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==",
+ "version": "2.32.0",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz",
+ "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "array-includes": "^3.1.7",
- "array.prototype.findlastindex": "^1.2.3",
- "array.prototype.flat": "^1.3.2",
- "array.prototype.flatmap": "^1.3.2",
+ "@rtsao/scc": "^1.1.0",
+ "array-includes": "^3.1.9",
+ "array.prototype.findlastindex": "^1.2.6",
+ "array.prototype.flat": "^1.3.3",
+ "array.prototype.flatmap": "^1.3.3",
"debug": "^3.2.7",
"doctrine": "^2.1.0",
"eslint-import-resolver-node": "^0.3.9",
- "eslint-module-utils": "^2.8.0",
- "hasown": "^2.0.0",
- "is-core-module": "^2.13.1",
+ "eslint-module-utils": "^2.12.1",
+ "hasown": "^2.0.2",
+ "is-core-module": "^2.16.1",
"is-glob": "^4.0.3",
"minimatch": "^3.1.2",
- "object.fromentries": "^2.0.7",
- "object.groupby": "^1.0.1",
- "object.values": "^1.1.7",
+ "object.fromentries": "^2.0.8",
+ "object.groupby": "^1.0.3",
+ "object.values": "^1.2.1",
"semver": "^6.3.1",
+ "string.prototype.trimend": "^1.0.9",
"tsconfig-paths": "^3.15.0"
},
"engines": {
"node": ">=4"
},
"peerDependencies": {
- "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8"
+ "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9"
}
},
"node_modules/eslint-plugin-import/node_modules/debug": {
@@ -6784,6 +6867,37 @@
"node": ">=10"
}
},
+ "node_modules/eslint-plugin-prettier": {
+ "version": "5.5.1",
+ "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.5.1.tgz",
+ "integrity": "sha512-dobTkHT6XaEVOo8IO90Q4DOSxnm3Y151QxPJlM/vKC0bVy+d6cVWQZLlFiuZPP0wS6vZwSKeJgKkcS+KfMBlRw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "prettier-linter-helpers": "^1.0.0",
+ "synckit": "^0.11.7"
+ },
+ "engines": {
+ "node": "^14.18.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/eslint-plugin-prettier"
+ },
+ "peerDependencies": {
+ "@types/eslint": ">=8.0.0",
+ "eslint": ">=8.0.0",
+ "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0",
+ "prettier": ">=3.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/eslint": {
+ "optional": true
+ },
+ "eslint-config-prettier": {
+ "optional": true
+ }
+ }
+ },
"node_modules/eslint-plugin-promise": {
"version": "6.6.0",
"resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz",
@@ -7384,6 +7498,13 @@
"dev": true,
"license": "MIT"
},
+ "node_modules/fast-diff": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.3.0.tgz",
+ "integrity": "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==",
+ "dev": true,
+ "license": "Apache-2.0"
+ },
"node_modules/fast-equals": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz",
@@ -7594,13 +7715,19 @@
}
},
"node_modules/for-each": {
- "version": "0.3.3",
- "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
- "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==",
+ "version": "0.3.5",
+ "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
+ "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "is-callable": "^1.1.3"
+ "is-callable": "^1.2.7"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/foreground-child": {
@@ -7747,16 +7874,18 @@
}
},
"node_modules/function.prototype.name": {
- "version": "1.1.6",
- "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz",
- "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==",
+ "version": "1.1.8",
+ "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz",
+ "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "define-properties": "^1.2.0",
- "es-abstract": "^1.22.1",
- "functions-have-names": "^1.2.3"
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.3",
+ "define-properties": "^1.2.1",
+ "functions-have-names": "^1.2.3",
+ "hasown": "^2.0.2",
+ "is-callable": "^1.2.7"
},
"engines": {
"node": ">= 0.4"
@@ -7892,15 +8021,15 @@
}
},
"node_modules/get-symbol-description": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz",
- "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz",
+ "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.5",
+ "call-bound": "^1.0.3",
"es-errors": "^1.3.0",
- "get-intrinsic": "^1.2.4"
+ "get-intrinsic": "^1.2.6"
},
"engines": {
"node": ">= 0.4"
@@ -8032,6 +8161,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
"integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"es-define-property": "^1.0.0"
@@ -8041,11 +8171,14 @@
}
},
"node_modules/has-proto": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
- "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz",
+ "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==",
"dev": true,
"license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.0"
+ },
"engines": {
"node": ">= 0.4"
},
@@ -8242,9 +8375,9 @@
"license": "ISC"
},
"node_modules/import-fresh": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz",
- "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==",
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
+ "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -8302,15 +8435,15 @@
"license": "ISC"
},
"node_modules/internal-slot": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz",
- "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
+ "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
- "hasown": "^2.0.0",
- "side-channel": "^1.0.4"
+ "hasown": "^2.0.2",
+ "side-channel": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -8358,14 +8491,15 @@
}
},
"node_modules/is-array-buffer": {
- "version": "3.0.4",
- "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz",
- "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==",
+ "version": "3.0.5",
+ "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz",
+ "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "get-intrinsic": "^1.2.1"
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.3",
+ "get-intrinsic": "^1.2.6"
},
"engines": {
"node": ">= 0.4"
@@ -8382,13 +8516,17 @@
"license": "MIT"
},
"node_modules/is-async-function": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz",
- "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==",
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz",
+ "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-tostringtag": "^1.0.0"
+ "async-function": "^1.0.0",
+ "call-bound": "^1.0.3",
+ "get-proto": "^1.0.1",
+ "has-tostringtag": "^1.0.2",
+ "safe-regex-test": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -8398,13 +8536,16 @@
}
},
"node_modules/is-bigint": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz",
- "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz",
+ "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-bigints": "^1.0.1"
+ "has-bigints": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -8423,14 +8564,14 @@
}
},
"node_modules/is-boolean-object": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz",
- "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==",
+ "version": "1.2.2",
+ "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz",
+ "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.3",
+ "has-tostringtag": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -8453,9 +8594,9 @@
}
},
"node_modules/is-core-module": {
- "version": "2.14.0",
- "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.14.0.tgz",
- "integrity": "sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==",
+ "version": "2.16.1",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
+ "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
"license": "MIT",
"dependencies": {
"hasown": "^2.0.2"
@@ -8468,12 +8609,14 @@
}
},
"node_modules/is-data-view": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz",
- "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==",
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz",
+ "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==",
"dev": true,
"license": "MIT",
"dependencies": {
+ "call-bound": "^1.0.2",
+ "get-intrinsic": "^1.2.6",
"is-typed-array": "^1.1.13"
},
"engines": {
@@ -8484,13 +8627,14 @@
}
},
"node_modules/is-date-object": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz",
- "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz",
+ "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.2",
+ "has-tostringtag": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -8509,13 +8653,16 @@
}
},
"node_modules/is-finalizationregistry": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz",
- "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz",
+ "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2"
+ "call-bound": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -8531,13 +8678,16 @@
}
},
"node_modules/is-generator-function": {
- "version": "1.0.10",
- "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz",
- "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
+ "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.3",
+ "get-proto": "^1.0.0",
+ "has-tostringtag": "^1.0.2",
+ "safe-regex-test": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -8594,13 +8744,14 @@
}
},
"node_modules/is-number-object": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz",
- "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
+ "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.3",
+ "has-tostringtag": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -8646,14 +8797,16 @@
"license": "MIT"
},
"node_modules/is-regex": {
- "version": "1.1.4",
- "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz",
- "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==",
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
+ "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.2",
+ "gopd": "^1.2.0",
+ "has-tostringtag": "^1.0.2",
+ "hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -8676,13 +8829,13 @@
}
},
"node_modules/is-shared-array-buffer": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz",
- "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==",
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz",
+ "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7"
+ "call-bound": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
@@ -8692,13 +8845,14 @@
}
},
"node_modules/is-string": {
- "version": "1.0.7",
- "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz",
- "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz",
+ "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-tostringtag": "^1.0.0"
+ "call-bound": "^1.0.3",
+ "has-tostringtag": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -8708,13 +8862,15 @@
}
},
"node_modules/is-symbol": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz",
- "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz",
+ "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==",
"dev": true,
"license": "MIT",
"dependencies": {
- "has-symbols": "^1.0.2"
+ "call-bound": "^1.0.2",
+ "has-symbols": "^1.1.0",
+ "safe-regex-test": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
@@ -8724,13 +8880,13 @@
}
},
"node_modules/is-typed-array": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz",
- "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==",
+ "version": "1.1.15",
+ "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
+ "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "which-typed-array": "^1.1.14"
+ "which-typed-array": "^1.1.16"
},
"engines": {
"node": ">= 0.4"
@@ -8760,27 +8916,30 @@
}
},
"node_modules/is-weakref": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz",
- "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz",
+ "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2"
+ "call-bound": "^1.0.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-weakset": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz",
- "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==",
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz",
+ "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
- "get-intrinsic": "^1.2.4"
+ "call-bound": "^1.0.3",
+ "get-intrinsic": "^1.2.6"
},
"engines": {
"node": ">= 0.4"
@@ -10236,9 +10395,9 @@
}
},
"node_modules/object-inspect": {
- "version": "1.13.2",
- "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz",
- "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==",
+ "version": "1.13.4",
+ "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
+ "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -10258,15 +10417,17 @@
}
},
"node_modules/object.assign": {
- "version": "4.1.5",
- "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz",
- "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==",
+ "version": "4.1.7",
+ "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz",
+ "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.5",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.3",
"define-properties": "^1.2.1",
- "has-symbols": "^1.0.3",
+ "es-object-atoms": "^1.0.0",
+ "has-symbols": "^1.1.0",
"object-keys": "^1.1.1"
},
"engines": {
@@ -10348,13 +10509,14 @@
}
},
"node_modules/object.values": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz",
- "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==",
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz",
+ "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.3",
"define-properties": "^1.2.1",
"es-object-atoms": "^1.0.0"
},
@@ -10413,6 +10575,24 @@
"node": ">= 0.8.0"
}
},
+ "node_modules/own-keys": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz",
+ "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "get-intrinsic": "^1.2.6",
+ "object-keys": "^1.1.1",
+ "safe-push-apply": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
"node_modules/p-limit": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
@@ -11052,6 +11232,35 @@
"node": ">= 0.8.0"
}
},
+ "node_modules/prettier": {
+ "version": "3.6.2",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz",
+ "integrity": "sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "prettier": "bin/prettier.cjs"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/prettier/prettier?sponsor=1"
+ }
+ },
+ "node_modules/prettier-linter-helpers": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz",
+ "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "fast-diff": "^1.1.2"
+ },
+ "engines": {
+ "node": ">=6.0.0"
+ }
+ },
"node_modules/process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@@ -12059,19 +12268,20 @@
"license": "MIT"
},
"node_modules/reflect.getprototypeof": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz",
- "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==",
+ "version": "1.0.10",
+ "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz",
+ "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
"define-properties": "^1.2.1",
- "es-abstract": "^1.23.1",
+ "es-abstract": "^1.23.9",
"es-errors": "^1.3.0",
- "get-intrinsic": "^1.2.4",
- "globalthis": "^1.0.3",
- "which-builtin-type": "^1.1.3"
+ "es-object-atoms": "^1.0.0",
+ "get-intrinsic": "^1.2.7",
+ "get-proto": "^1.0.1",
+ "which-builtin-type": "^1.2.1"
},
"engines": {
"node": ">= 0.4"
@@ -12117,16 +12327,18 @@
}
},
"node_modules/regexp.prototype.flags": {
- "version": "1.5.2",
- "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz",
- "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==",
+ "version": "1.5.4",
+ "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
+ "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.6",
+ "call-bind": "^1.0.8",
"define-properties": "^1.2.1",
"es-errors": "^1.3.0",
- "set-function-name": "^2.0.1"
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
+ "set-function-name": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -12293,15 +12505,16 @@
}
},
"node_modules/safe-array-concat": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz",
- "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==",
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz",
+ "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
- "get-intrinsic": "^1.2.4",
- "has-symbols": "^1.0.3",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.2",
+ "get-intrinsic": "^1.2.6",
+ "has-symbols": "^1.1.0",
"isarray": "^2.0.5"
},
"engines": {
@@ -12338,16 +12551,40 @@
],
"license": "MIT"
},
- "node_modules/safe-regex-test": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz",
- "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==",
+ "node_modules/safe-push-apply": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
+ "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.6",
"es-errors": "^1.3.0",
- "is-regex": "^1.1.4"
+ "isarray": "^2.0.5"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/safe-push-apply/node_modules/isarray": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
+ "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/safe-regex-test": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz",
+ "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "es-errors": "^1.3.0",
+ "is-regex": "^1.2.1"
},
"engines": {
"node": ">= 0.4"
@@ -12572,6 +12809,7 @@
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
"integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
+ "dev": true,
"license": "MIT",
"dependencies": {
"define-data-property": "^1.1.4",
@@ -12601,6 +12839,21 @@
"node": ">= 0.4"
}
},
+ "node_modules/set-proto": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz",
+ "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "dunder-proto": "^1.0.1",
+ "es-errors": "^1.3.0",
+ "es-object-atoms": "^1.0.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
@@ -12642,15 +12895,69 @@
}
},
"node_modules/side-channel": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
- "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
+ "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
"es-errors": "^1.3.0",
- "get-intrinsic": "^1.2.4",
- "object-inspect": "^1.13.1"
+ "object-inspect": "^1.13.3",
+ "side-channel-list": "^1.0.0",
+ "side-channel-map": "^1.0.1",
+ "side-channel-weakmap": "^1.0.2"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-list": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
+ "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "object-inspect": "^1.13.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-map": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
+ "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.5",
+ "object-inspect": "^1.13.3"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/ljharb"
+ }
+ },
+ "node_modules/side-channel-weakmap": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
+ "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
+ "license": "MIT",
+ "dependencies": {
+ "call-bound": "^1.0.2",
+ "es-errors": "^1.3.0",
+ "get-intrinsic": "^1.2.5",
+ "object-inspect": "^1.13.3",
+ "side-channel-map": "^1.0.1"
},
"engines": {
"node": ">= 0.4"
@@ -12892,6 +13199,20 @@
"node": ">= 0.8"
}
},
+ "node_modules/stop-iteration-iterator": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz",
+ "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "es-errors": "^1.3.0",
+ "internal-slot": "^1.1.0"
+ },
+ "engines": {
+ "node": ">= 0.4"
+ }
+ },
"node_modules/streamroller": {
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/streamroller/-/streamroller-3.1.5.tgz",
@@ -13035,16 +13356,19 @@
}
},
"node_modules/string.prototype.trim": {
- "version": "1.2.9",
- "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz",
- "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==",
+ "version": "1.2.10",
+ "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz",
+ "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.2",
+ "define-data-property": "^1.1.4",
"define-properties": "^1.2.1",
- "es-abstract": "^1.23.0",
- "es-object-atoms": "^1.0.0"
+ "es-abstract": "^1.23.5",
+ "es-object-atoms": "^1.0.0",
+ "has-property-descriptors": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@@ -13054,16 +13378,20 @@
}
},
"node_modules/string.prototype.trimend": {
- "version": "1.0.8",
- "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz",
- "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==",
+ "version": "1.0.9",
+ "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz",
+ "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.2",
"define-properties": "^1.2.1",
"es-object-atoms": "^1.0.0"
},
+ "engines": {
+ "node": ">= 0.4"
+ },
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -13261,6 +13589,22 @@
"url": "https://github.com/sponsors/ljharb"
}
},
+ "node_modules/synckit": {
+ "version": "0.11.8",
+ "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.8.tgz",
+ "integrity": "sha512-+XZ+r1XGIJGeQk3VvXhT6xx/VpbHsRzsTkGgF6E5RX9TTXD0118l87puaEBZ566FhqblC6U0d4XnubznJDm30A==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@pkgr/core": "^0.2.4"
+ },
+ "engines": {
+ "node": "^14.18.0 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://opencollective.com/synckit"
+ }
+ },
"node_modules/tabbable": {
"version": "6.2.0",
"resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz",
@@ -13586,32 +13930,32 @@
}
},
"node_modules/typed-array-buffer": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz",
- "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==",
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
+ "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bound": "^1.0.3",
"es-errors": "^1.3.0",
- "is-typed-array": "^1.1.13"
+ "is-typed-array": "^1.1.14"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/typed-array-byte-length": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz",
- "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==",
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz",
+ "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
"for-each": "^0.3.3",
- "gopd": "^1.0.1",
- "has-proto": "^1.0.3",
- "is-typed-array": "^1.1.13"
+ "gopd": "^1.2.0",
+ "has-proto": "^1.2.0",
+ "is-typed-array": "^1.1.14"
},
"engines": {
"node": ">= 0.4"
@@ -13621,18 +13965,19 @@
}
},
"node_modules/typed-array-byte-offset": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz",
- "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==",
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz",
+ "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"available-typed-arrays": "^1.0.7",
- "call-bind": "^1.0.7",
+ "call-bind": "^1.0.8",
"for-each": "^0.3.3",
- "gopd": "^1.0.1",
- "has-proto": "^1.0.3",
- "is-typed-array": "^1.1.13"
+ "gopd": "^1.2.0",
+ "has-proto": "^1.2.0",
+ "is-typed-array": "^1.1.15",
+ "reflect.getprototypeof": "^1.0.9"
},
"engines": {
"node": ">= 0.4"
@@ -13642,18 +13987,18 @@
}
},
"node_modules/typed-array-length": {
- "version": "1.0.6",
- "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz",
- "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==",
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz",
+ "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind": "^1.0.7",
"for-each": "^0.3.3",
"gopd": "^1.0.1",
- "has-proto": "^1.0.3",
"is-typed-array": "^1.1.13",
- "possible-typed-array-names": "^1.0.0"
+ "possible-typed-array-names": "^1.0.0",
+ "reflect.getprototypeof": "^1.0.6"
},
"engines": {
"node": ">= 0.4"
@@ -13694,16 +14039,19 @@
}
},
"node_modules/unbox-primitive": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz",
- "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==",
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz",
+ "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==",
"dev": true,
"license": "MIT",
"dependencies": {
- "call-bind": "^1.0.2",
+ "call-bound": "^1.0.3",
"has-bigints": "^1.0.2",
- "has-symbols": "^1.0.3",
- "which-boxed-primitive": "^1.0.2"
+ "has-symbols": "^1.1.0",
+ "which-boxed-primitive": "^1.1.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -13950,41 +14298,45 @@
}
},
"node_modules/which-boxed-primitive": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz",
- "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==",
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz",
+ "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==",
"dev": true,
"license": "MIT",
"dependencies": {
- "is-bigint": "^1.0.1",
- "is-boolean-object": "^1.1.0",
- "is-number-object": "^1.0.4",
- "is-string": "^1.0.5",
- "is-symbol": "^1.0.3"
+ "is-bigint": "^1.1.0",
+ "is-boolean-object": "^1.2.1",
+ "is-number-object": "^1.1.1",
+ "is-string": "^1.1.1",
+ "is-symbol": "^1.1.1"
+ },
+ "engines": {
+ "node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/which-builtin-type": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz",
- "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==",
+ "version": "1.2.1",
+ "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz",
+ "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==",
"dev": true,
"license": "MIT",
"dependencies": {
- "function.prototype.name": "^1.1.5",
- "has-tostringtag": "^1.0.0",
+ "call-bound": "^1.0.2",
+ "function.prototype.name": "^1.1.6",
+ "has-tostringtag": "^1.0.2",
"is-async-function": "^2.0.0",
- "is-date-object": "^1.0.5",
- "is-finalizationregistry": "^1.0.2",
+ "is-date-object": "^1.1.0",
+ "is-finalizationregistry": "^1.1.0",
"is-generator-function": "^1.0.10",
- "is-regex": "^1.1.4",
+ "is-regex": "^1.2.1",
"is-weakref": "^1.0.2",
"isarray": "^2.0.5",
- "which-boxed-primitive": "^1.0.2",
- "which-collection": "^1.0.1",
- "which-typed-array": "^1.1.9"
+ "which-boxed-primitive": "^1.1.0",
+ "which-collection": "^1.0.2",
+ "which-typed-array": "^1.1.16"
},
"engines": {
"node": ">= 0.4"
@@ -14020,16 +14372,18 @@
}
},
"node_modules/which-typed-array": {
- "version": "1.1.15",
- "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz",
- "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==",
+ "version": "1.1.19",
+ "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
+ "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
"dev": true,
"license": "MIT",
"dependencies": {
"available-typed-arrays": "^1.0.7",
- "call-bind": "^1.0.7",
- "for-each": "^0.3.3",
- "gopd": "^1.0.1",
+ "call-bind": "^1.0.8",
+ "call-bound": "^1.0.4",
+ "for-each": "^0.3.5",
+ "get-proto": "^1.0.1",
+ "gopd": "^1.2.0",
"has-tostringtag": "^1.0.2"
},
"engines": {
diff --git a/package.json b/package.json
index d2fc9e1..3b5c061 100644
--- a/package.json
+++ b/package.json
@@ -44,6 +44,10 @@
"@babel/plugin-proposal-object-rest-spread": "^7.18.0",
"@babel/preset-env": "^7.18.2",
"@babel/register": "^7.17.7",
+ "eslint": "^8.57.1",
+ "eslint-config-prettier": "^10.1.5",
+ "eslint-plugin-prettier": "^5.5.1",
+ "prettier": "^3.6.2",
"sequelize-cli": "^6.4.1",
"standard": "^17.1.0"
},
diff --git a/src/mongo/ReseedAction.js b/src/database/ReseedAction.js
similarity index 53%
rename from src/mongo/ReseedAction.js
rename to src/database/ReseedAction.js
index 551d360..cc572f7 100644
--- a/src/mongo/ReseedAction.js
+++ b/src/database/ReseedAction.js
@@ -1,33 +1,33 @@
-import mongoose from "mongoose";
-import bcrypt from "bcrypt";
-import { userModel } from "../schemas/management/user.schema.js";
-import { dbConnect } from "./index.js";
+import mongoose from 'mongoose';
+import bcrypt from 'bcrypt';
+import { userModel } from '../schemas/management/user.schema.js';
+import { dbConnect } from './mongo.js';
const ReseedAction = () => {
async function clear() {
dbConnect();
await userModel.deleteMany({});
- console.log("DB cleared");
+ console.log('DB cleared');
}
async function seedDB() {
await clear();
const salt = await bcrypt.genSalt(10);
- const hashPassword = await bcrypt.hash("secret", salt);
+ const hashPassword = await bcrypt.hash('secret', salt);
const user = {
_id: mongoose.Types.ObjectId(1),
- name: "Admin",
- email: "admin@jsonapi.com",
+ name: 'Admin',
+ email: 'admin@jsonapi.com',
password: hashPassword,
createdAt: new Date(),
- profile_image: "../../images/admin.jpg",
+ profile_image: '../../images/admin.jpg',
};
const admin = new userModel(user);
await admin.save();
- console.log("DB seeded");
+ console.log('DB seeded');
}
seedDB();
diff --git a/src/mongo/clearDbs.js b/src/database/clearDbs.js
similarity index 100%
rename from src/mongo/clearDbs.js
rename to src/database/clearDbs.js
diff --git a/src/mongo/index.js b/src/database/mongo.js
similarity index 54%
rename from src/mongo/index.js
rename to src/database/mongo.js
index 3276d90..b0bead3 100644
--- a/src/mongo/index.js
+++ b/src/database/mongo.js
@@ -1,8 +1,8 @@
-import mongoose from "mongoose";
-import dotenv from "dotenv";
-import log4js from "log4js";
+import mongoose from 'mongoose';
+import dotenv from 'dotenv';
+import log4js from 'log4js';
-const logger = log4js.getLogger("MongoDB");
+const logger = log4js.getLogger('MongoDB');
logger.level = process.env.LOG_LEVEL;
dotenv.config();
@@ -11,11 +11,11 @@ dotenv.config();
mongoose.set('strictQuery', false);
function dbConnect() {
- mongoose.connection.once("open", () => logger.info("Database connected."));
+ mongoose.connection.once('open', () => logger.info('Database connected.'));
return mongoose.connect(
`mongodb://${process.env.DB_LINK}/farmcontrol?retryWrites=true&w=majority`,
- { }
+ {}
);
}
-export { dbConnect };
\ No newline at end of file
+export { dbConnect };
diff --git a/src/mongo/seedData.js b/src/database/seedData.js
similarity index 100%
rename from src/mongo/seedData.js
rename to src/database/seedData.js
diff --git a/src/index.js b/src/index.js
index 27468cd..dce3203 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,9 +1,9 @@
-import express from "express";
-import bodyParser from "body-parser";
-import cors from "cors";
-import dotenv from "dotenv";
-import { expressSession, keycloak } from "./keycloak.js";
-import { dbConnect } from "./mongo/index.js";
+import express from 'express';
+import bodyParser from 'body-parser';
+import cors from 'cors';
+import dotenv from 'dotenv';
+import { expressSession, keycloak } from './keycloak.js';
+import { dbConnect } from './database/mongo.js';
import {
authRoutes,
userRoutes,
@@ -22,24 +22,24 @@ import {
stockEventRoutes,
auditLogRoutes,
noteTypeRoutes,
- noteRoutes
-} from "./routes/index.js";
-import path from "path";
-import * as fs from "fs";
-import cron from "node-cron";
-import ReseedAction from "./mongo/ReseedAction.js";
-import log4js from "log4js";
-import { populateUserMiddleware } from "./services/misc/auth.js";
+ noteRoutes,
+} from './routes/index.js';
+import path from 'path';
+import * as fs from 'fs';
+import cron from 'node-cron';
+import ReseedAction from './database/ReseedAction.js';
+import log4js from 'log4js';
+import { populateUserMiddleware } from './services/misc/auth.js';
dotenv.config();
const PORT = process.env.PORT || 8080;
const app = express();
-const logger = log4js.getLogger("App");
+const logger = log4js.getLogger('App');
logger.level = process.env.LOG_LEVEL;
-app.use(log4js.connectLogger(logger, { level: "trace" }));
+app.use(log4js.connectLogger(logger, { level: 'trace' }));
const whitelist = [process.env.APP_URL_CLIENT];
const corsOptions = {
@@ -47,7 +47,7 @@ const corsOptions = {
if (!origin || whitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
- callback(new Error("Not allowed by CORS"));
+ callback(new Error('Not allowed by CORS'));
}
},
credentials: true,
@@ -56,37 +56,35 @@ const corsOptions = {
dbConnect();
app.use(cors(corsOptions));
-app.use(
- bodyParser.json({ type: "application/json", strict: false, limit: "50mb" }),
-);
+app.use(bodyParser.json({ type: 'application/json', strict: false, limit: '50mb' }));
app.use(express.json());
app.use(expressSession);
app.use(keycloak.middleware());
app.use(populateUserMiddleware);
-app.get("/", function (req, res) {
- const __dirname = fs.realpathSync(".");
- res.sendFile(path.join(__dirname, "/src/landing/index.html"));
+app.get('/', function (req, res) {
+ const __dirname = fs.realpathSync('.');
+ res.sendFile(path.join(__dirname, '/src/landing/index.html'));
});
-app.use("/auth", authRoutes);
-app.use("/users", userRoutes)
-app.use("/spotlight", spotlightRoutes);
-app.use("/printers", printerRoutes);
-app.use("/jobs", jobRoutes);
-app.use("/gcodefiles", gcodeFileRoutes);
-app.use("/filaments", filamentRoutes);
-app.use("/parts", partRoutes);
-app.use("/products", productRoutes);
-app.use("/vendors", vendorRoutes);
-app.use("/materials", materialRoutes);
-app.use("/partstocks", partStockRoutes);
-app.use("/filamentstocks", filamentStockRoutes);
-app.use("/stockevents", stockEventRoutes);
-app.use("/stockaudits", stockAuditRoutes);
-app.use("/auditlogs", auditLogRoutes);
-app.use("/notetypes", noteTypeRoutes);
-app.use("/notes", noteRoutes)
+app.use('/auth', authRoutes);
+app.use('/users', userRoutes);
+app.use('/spotlight', spotlightRoutes);
+app.use('/printers', printerRoutes);
+app.use('/jobs', jobRoutes);
+app.use('/gcodefiles', gcodeFileRoutes);
+app.use('/filaments', filamentRoutes);
+app.use('/parts', partRoutes);
+app.use('/products', productRoutes);
+app.use('/vendors', vendorRoutes);
+app.use('/materials', materialRoutes);
+app.use('/partstocks', partStockRoutes);
+app.use('/filamentstocks', filamentStockRoutes);
+app.use('/stockevents', stockEventRoutes);
+app.use('/stockaudits', stockAuditRoutes);
+app.use('/auditlogs', auditLogRoutes);
+app.use('/notetypes', noteTypeRoutes);
+app.use('/notes', noteRoutes);
if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
diff --git a/src/keycloak.js b/src/keycloak.js
index 02a6811..2c8a3f7 100644
--- a/src/keycloak.js
+++ b/src/keycloak.js
@@ -1,26 +1,25 @@
-import Keycloak from "keycloak-connect";
-import session from "express-session";
-import dotenv from "dotenv";
-import axios from "axios";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
+import Keycloak from 'keycloak-connect';
+import session from 'express-session';
+import dotenv from 'dotenv';
+import axios from 'axios';
+import jwt from 'jsonwebtoken';
+import log4js from 'log4js';
dotenv.config();
-
-const logger = log4js.getLogger("Keycloak");
-logger.level = process.env.LOG_LEVEL || "info";
+const logger = log4js.getLogger('Keycloak');
+logger.level = process.env.LOG_LEVEL || 'info';
// Initialize Keycloak
const keycloakConfig = {
- realm: process.env.KEYCLOAK_REALM || "farm-control",
- "auth-server-url": process.env.KEYCLOAK_URL || "http://localhost:8080/auth",
- "ssl-required": process.env.NODE_ENV === "production" ? "external" : "none",
- resource: process.env.KEYCLOAK_CLIENT_ID || "farmcontrol-client",
- "confidential-port": 0,
- "bearer-only": true,
- "public-client": false,
- "use-resource-role-mappings": true,
- "verify-token-audience": true,
+ realm: process.env.KEYCLOAK_REALM || 'farm-control',
+ 'auth-server-url': process.env.KEYCLOAK_URL || 'http://localhost:8080/auth',
+ 'ssl-required': process.env.NODE_ENV === 'production' ? 'external' : 'none',
+ resource: process.env.KEYCLOAK_CLIENT_ID || 'farmcontrol-client',
+ 'confidential-port': 0,
+ 'bearer-only': true,
+ 'public-client': false,
+ 'use-resource-role-mappings': true,
+ 'verify-token-audience': true,
credentials: {
secret: process.env.KEYCLOAK_CLIENT_SECRET,
},
@@ -29,7 +28,7 @@ const keycloakConfig = {
const memoryStore = new session.MemoryStore();
var expressSession = session({
- secret: process.env.SESSION_SECRET || "n00Dl3s23!",
+ secret: process.env.SESSION_SECRET || 'n00Dl3s23!',
resave: false,
saveUninitialized: true, // Set this to true to ensure session is initialized
store: memoryStore,
@@ -43,12 +42,12 @@ var keycloak = new Keycloak({ store: memoryStore }, keycloakConfig);
// Custom middleware to check if the user is authenticated
const isAuthenticated = async (req, res, next) => {
let token = null;
-
+
// Try to get token from Authorization header
const authHeader = req.headers.authorization;
if (authHeader && authHeader.startsWith('Bearer ')) {
token = authHeader.substring(7);
-
+
try {
// Verify token with Keycloak introspection endpoint
const response = await axios.post(
@@ -60,15 +59,15 @@ const isAuthenticated = async (req, res, next) => {
}),
{
headers: {
- "Content-Type": "application/x-www-form-urlencoded",
+ 'Content-Type': 'application/x-www-form-urlencoded',
},
}
);
const introspection = response.data;
if (!introspection.active) {
- logger.info("Token is not active");
- return res.status(401).json({ error: "Not authenticated" });
+ logger.info('Token is not active');
+ return res.status(401).json({ error: 'Not authenticated' });
}
// Parse token to extract user info
@@ -83,20 +82,20 @@ const isAuthenticated = async (req, res, next) => {
return next();
} catch (error) {
- logger.error("Token verification error:", error.message);
- return res.status(401).json({ error: "Not authenticated" });
+ logger.error('Token verification error:', error.message);
+ return res.status(401).json({ error: 'Not authenticated' });
}
}
-
+
// Fallback to session-based authentication
- if (req.session && req.session["keycloak-token"]) {
- const sessionToken = req.session["keycloak-token"];
+ if (req.session && req.session['keycloak-token']) {
+ const sessionToken = req.session['keycloak-token'];
if (sessionToken.expires_at > new Date().getTime()) {
return next();
}
}
-
- return res.status(401).json({ error: "Not authenticated" });
+
+ return res.status(401).json({ error: 'Not authenticated' });
};
// Helper function to extract roles from token
@@ -112,11 +111,7 @@ function extractRoles(token) {
if (token.resource_access) {
for (const client in token.resource_access) {
if (token.resource_access[client].roles) {
- roles.push(
- ...token.resource_access[client].roles.map(
- (role) => `${client}:${role}`
- )
- );
+ roles.push(...token.resource_access[client].roles.map((role) => `${client}:${role}`));
}
}
}
diff --git a/src/landing/index.html b/src/landing/index.html
deleted file mode 100644
index 0616b06..0000000
--- a/src/landing/index.html
+++ /dev/null
@@ -1,77 +0,0 @@
-
-
-
-
-
-
- Node.js API FREE by Creative Tim & UPDIVISION
-
-
-
-
-
-
-
Headless CMS with ExpressJS API:FREE
-
-
-
-
-
-
diff --git a/src/passport.js b/src/passport.js
deleted file mode 100644
index f8066e7..0000000
--- a/src/passport.js
+++ /dev/null
@@ -1,40 +0,0 @@
-import { ExtractJwt } from "passport-jwt";
-import passportJWT from "passport-jwt";
-import dotenv from "dotenv";
-import passport from "passport";
-
-import { userModel } from "./schemas/user.schema.js";
-import { hostModel } from "./schemas/host.schema.js";
-
-const JWTStrategy = passportJWT.Strategy;
-dotenv.config();
-
-passport.use(
- new JWTStrategy(
- {
- jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
- secretOrKey: process.env.JWT_SECRET,
- },
- function (jwtPayload, done) {
- if (jwtPayload.hostId) {
- return hostModel
- .findOne({ hostId: jwtPayload.hostId })
- .then((host) => {
- return done(null, host);
- })
- .catch((err) => {
- return done(err);
- });
- } else {
- return userModel
- .findOne({ _id: jwtPayload.id })
- .then((user) => {
- return done(null, user);
- })
- .catch((err) => {
- return done(err);
- });
- }
- }
- )
-);
diff --git a/src/routes/inventory/filamentstocks.js b/src/routes/inventory/filamentstocks.js
index e60a6b6..b91c8d1 100644
--- a/src/routes/inventory/filamentstocks.js
+++ b/src/routes/inventory/filamentstocks.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,21 +8,21 @@ import {
getFilamentStockRouteHandler,
editFilamentStockRouteHandler,
newFilamentStockRouteHandler,
-} from "../../services/inventory/filamentstocks.js";
+} from '../../services/inventory/filamentstocks.js';
// list of filamentStocks
-router.get("/", isAuthenticated, (req, res) => {
- const { page, limit, property, sort, order } = req.query;
+router.get('/', isAuthenticated, (req, res) => {
+ const { page, limit, property, sort, order } = req.query;
- const allowedFilters = ["country"];
+ const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listFilamentStocksRouteHandler(req, res, page, limit, property, filter, sort, order);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newFilamentStockRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getFilamentStockRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editFilamentStockRouteHandler(req, res);
});
diff --git a/src/routes/inventory/partstocks.js b/src/routes/inventory/partstocks.js
index e5c62e9..0dcf9d5 100644
--- a/src/routes/inventory/partstocks.js
+++ b/src/routes/inventory/partstocks.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,21 +8,21 @@ import {
getPartStockRouteHandler,
editPartStockRouteHandler,
newPartStockRouteHandler,
-} from "../../services/inventory/partstocks.js";
+} from '../../services/inventory/partstocks.js';
// list of partStocks
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["country"];
+ const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listPartStocksRouteHandler(req, res, page, limit, property, filter);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newPartStockRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getPartStockRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editPartStockRouteHandler(req, res);
});
diff --git a/src/routes/inventory/stockaudits.js b/src/routes/inventory/stockaudits.js
index 5ac87fa..1c0dd9e 100644
--- a/src/routes/inventory/stockaudits.js
+++ b/src/routes/inventory/stockaudits.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -9,21 +9,21 @@ import {
newStockAuditRouteHandler,
updateStockAuditRouteHandler,
deleteStockAuditRouteHandler,
-} from "../../services/inventory/stockaudits.js";
+} from '../../services/inventory/stockaudits.js';
// List stock audits
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["status", "type", "createdBy"];
+ const allowedFilters = ['status', 'type', 'createdBy'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -32,23 +32,23 @@ router.get("/", isAuthenticated, (req, res) => {
});
// Create new stock audit
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newStockAuditRouteHandler(req, res);
});
// Get specific stock audit
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getStockAuditRouteHandler(req, res);
});
// Update stock audit
-router.put("/:id", isAuthenticated, (req, res) => {
+router.put('/:id', isAuthenticated, (req, res) => {
updateStockAuditRouteHandler(req, res);
});
// Delete stock audit
-router.delete("/:id", isAuthenticated, (req, res) => {
+router.delete('/:id', isAuthenticated, (req, res) => {
deleteStockAuditRouteHandler(req, res);
});
-export default router;
\ No newline at end of file
+export default router;
diff --git a/src/routes/inventory/stockevents.js b/src/routes/inventory/stockevents.js
index e44857c..2e9731c 100644
--- a/src/routes/inventory/stockevents.js
+++ b/src/routes/inventory/stockevents.js
@@ -1,27 +1,27 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listStockEventsRouteHandler,
getStockEventRouteHandler,
newStockEventRouteHandler,
-} from "../../services/inventory/stockevents.js";
+} from '../../services/inventory/stockevents.js';
// List stock events
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
- const allowedFilters = ["type", "filamentStock"];
+ const allowedFilters = ['type', 'filamentStock'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -29,13 +29,13 @@ router.get("/", isAuthenticated, (req, res) => {
});
// Create new stock event
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newStockEventRouteHandler(req, res);
});
// Get specific stock event
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getStockEventRouteHandler(req, res);
});
-export default router;
\ No newline at end of file
+export default router;
diff --git a/src/routes/management/materials.js b/src/routes/management/materials.js
index b7266e5..dfbbe5f 100644
--- a/src/routes/management/materials.js
+++ b/src/routes/management/materials.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,21 +8,21 @@ import {
getMaterialRouteHandler,
editMaterialRouteHandler,
newMaterialRouteHandler,
-} from "../../services/management/materials.js";
+} from '../../services/management/materials.js';
// list of materials
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["type", "brand", "diameter", "color"];
+ const allowedFilters = ['type', 'brand', 'diameter', 'color'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listMaterialsRouteHandler(req, res, page, limit, property, filter);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newMaterialRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getMaterialRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editMaterialRouteHandler(req, res);
});
diff --git a/src/routes/management/notetypes.js b/src/routes/management/notetypes.js
index fe8e59a..6589452 100644
--- a/src/routes/management/notetypes.js
+++ b/src/routes/management/notetypes.js
@@ -1,55 +1,42 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
import {
listNoteTypesRouteHandler,
getNoteTypeRouteHandler,
editNoteTypeRouteHandler,
newNoteTypeRouteHandler,
-} from "../../services/management/notetypes.js";
-import { parseFilter } from "../../util/index.js";
+} from '../../services/management/notetypes.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
// List note types
-router.get("/", isAuthenticated, async (req, res) => {
- const { page, limit, property, sort, order } = req.query;
+router.get('/', isAuthenticated, async (req, res) => {
+ const { page, limit, property, sort, order } = req.query;
- const allowedFilters = ["name", "active"];
+ const allowedFilters = ['name', 'active'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
- listNoteTypesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
- }
-);
+ listNoteTypesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
+});
// Get single note type
-router.get(
- "/:id",
- isAuthenticated,
- getNoteTypeRouteHandler
-);
+router.get('/:id', isAuthenticated, getNoteTypeRouteHandler);
// Edit note type
-router.put(
- "/:id",
- isAuthenticated,
- editNoteTypeRouteHandler
-);
+router.put('/:id', isAuthenticated, editNoteTypeRouteHandler);
// Create new note type
-router.post(
- "/",
- isAuthenticated,
- newNoteTypeRouteHandler
-);
+router.post('/', isAuthenticated, newNoteTypeRouteHandler);
-export default router;
\ No newline at end of file
+export default router;
diff --git a/src/routes/management/parts.js b/src/routes/management/parts.js
index 0424760..17bd180 100644
--- a/src/routes/management/parts.js
+++ b/src/routes/management/parts.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -10,46 +10,46 @@ import {
newPartRouteHandler,
uploadPartFileContentRouteHandler,
getPartFileContentRouteHandler,
-} from "../../services/management/parts.js";
+} from '../../services/management/parts.js';
// list of parts
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, sort, order } = req.query;
- const allowedFilters = ["products", "name"];
+ const allowedFilters = ['products', 'name'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
- listPartsRouteHandler(req, res, page, limit, property, filter, "", sort, order);
+ listPartsRouteHandler(req, res, page, limit, property, filter, '', sort, order);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newPartRouteHandler(req, res);
});
-router.post("/:id/content", isAuthenticated, (req, res) => {
+router.post('/:id/content', isAuthenticated, (req, res) => {
uploadPartFileContentRouteHandler(req, res);
});
-router.get("/:id/content", isAuthenticated, (req, res) => {
+router.get('/:id/content', isAuthenticated, (req, res) => {
getPartFileContentRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getPartRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editPartRouteHandler(req, res);
});
diff --git a/src/routes/management/products.js b/src/routes/management/products.js
index d3df011..672294a 100644
--- a/src/routes/management/products.js
+++ b/src/routes/management/products.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,21 +8,21 @@ import {
getProductRouteHandler,
editProductRouteHandler,
newProductRouteHandler,
-} from "../../services/management/products.js";
+} from '../../services/management/products.js';
// list of products
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["type", "brand", "diameter", "color"];
+ const allowedFilters = ['type', 'brand', 'diameter', 'color'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listProductsRouteHandler(req, res, page, limit, property, filter);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newProductRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getProductRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editProductRouteHandler(req, res);
});
diff --git a/src/routes/management/users.js b/src/routes/management/users.js
index c82bfb6..bfd1b19 100644
--- a/src/routes/management/users.js
+++ b/src/routes/management/users.js
@@ -1,27 +1,27 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
listUsersRouteHandler,
getUserRouteHandler,
editUserRouteHandler,
-} from "../../services/management/users.js";
+} from '../../services/management/users.js';
// list of users
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["username", "name", "firstName", "lastName"];
+ const allowedFilters = ['username', 'name', 'firstName', 'lastName'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -29,12 +29,12 @@ router.get("/", isAuthenticated, (req, res) => {
listUsersRouteHandler(req, res, page, limit, property, filter);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getUserRouteHandler(req, res);
});
// update user info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editUserRouteHandler(req, res);
});
diff --git a/src/routes/management/vendors.js b/src/routes/management/vendors.js
index 079c456..752d445 100644
--- a/src/routes/management/vendors.js
+++ b/src/routes/management/vendors.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,21 +8,21 @@ import {
getVendorRouteHandler,
editVendorRouteHandler,
newVendorRouteHandler,
-} from "../../services/management/vendors.js";
+} from '../../services/management/vendors.js';
// list of vendors
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["country"];
+ const allowedFilters = ['country'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -30,16 +30,16 @@ router.get("/", isAuthenticated, (req, res) => {
listVendorsRouteHandler(req, res, page, limit, property, filter);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newVendorRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getVendorRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editVendorRouteHandler(req, res);
});
diff --git a/src/routes/misc/notes.js b/src/routes/misc/notes.js
index 6046e64..0c77892 100644
--- a/src/routes/misc/notes.js
+++ b/src/routes/misc/notes.js
@@ -1,21 +1,21 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
import {
listNotesRouteHandler,
getNoteRouteHandler,
editNoteRouteHandler,
newNoteRouteHandler,
- deleteNoteRouteHandler
-} from "../../services/misc/notes.js";
-import { parseFilter } from "../../util/index.js";
+ deleteNoteRouteHandler,
+} from '../../services/misc/notes.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
// List notes
-router.get("/", isAuthenticated, async (req, res) => {
+router.get('/', isAuthenticated, async (req, res) => {
const { page, limit, property, sort, order } = req.query;
- const allowedFilters = ["parent", "user._id"];
+ const allowedFilters = ['parent', 'user._id'];
var filter = {};
@@ -23,41 +23,24 @@ router.get("/", isAuthenticated, async (req, res) => {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
const filterObject = parseFilter(key, value);
- filter = {...filter, ...filterObject}
+ filter = { ...filter, ...filterObject };
}
}
}
- listNotesRouteHandler(req, res, page, limit, property, filter, "", sort, order);
- }
-);
+ listNotesRouteHandler(req, res, page, limit, property, filter, '', sort, order);
+});
// Get single note
-router.get(
- "/:id",
- isAuthenticated,
- getNoteRouteHandler
-);
+router.get('/:id', isAuthenticated, getNoteRouteHandler);
// Edit note
-router.put(
- "/:id",
- isAuthenticated,
- editNoteRouteHandler
-);
+router.put('/:id', isAuthenticated, editNoteRouteHandler);
// Delete note
-router.delete(
- "/:id",
- isAuthenticated,
- deleteNoteRouteHandler
-);
+router.delete('/:id', isAuthenticated, deleteNoteRouteHandler);
// Create new note
-router.post(
- "/",
- isAuthenticated,
- newNoteRouteHandler
-);
+router.post('/', isAuthenticated, newNoteRouteHandler);
-export default router;
\ No newline at end of file
+export default router;
diff --git a/src/routes/production/filaments.js b/src/routes/production/filaments.js
index 41b5c9a..1d7a7aa 100644
--- a/src/routes/production/filaments.js
+++ b/src/routes/production/filaments.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -8,20 +8,20 @@ import {
getFilamentRouteHandler,
editFilamentRouteHandler,
newFilamentRouteHandler,
-} from "../../services/management/filaments.js";
+} from '../../services/management/filaments.js';
// list of filaments
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property } = req.query;
- const allowedFilters = ["type", "vendor.name", "diameter", "color"];
+ const allowedFilters = ['type', 'vendor.name', 'diameter', 'color'];
var filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- filter = {...filter, ...parseFilter(key, value)};
+ filter = { ...filter, ...parseFilter(key, value) };
}
}
}
@@ -29,16 +29,16 @@ router.get("/", isAuthenticated, (req, res) => {
listFilamentsRouteHandler(req, res, page, limit, property, filter);
});
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newFilamentRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getFilamentRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editFilamentRouteHandler(req, res);
});
diff --git a/src/routes/production/gcodefiles.js b/src/routes/production/gcodefiles.js
index 28f1c30..979d891 100644
--- a/src/routes/production/gcodefiles.js
+++ b/src/routes/production/gcodefiles.js
@@ -1,6 +1,6 @@
-import express from "express";
-import { isAuthenticated } from "../../keycloak.js";
-import { parseFilter } from "../../util/index.js";
+import express from 'express';
+import { isAuthenticated } from '../../keycloak.js';
+import { parseFilter } from '../../utils.js';
const router = express.Router();
import {
@@ -11,17 +11,17 @@ import {
parseGCodeFileHandler,
uploadGCodeFileContentRouteHandler,
getGCodeFileContentRouteHandler,
-} from "../../services/production/gcodefiles.js";
+} from '../../services/production/gcodefiles.js';
// list of printers
-router.get("/", isAuthenticated, (req, res) => {
+router.get('/', isAuthenticated, (req, res) => {
const { page, limit, property, search, sort, order } = req.query;
const allowedFilters = [
- "filament.type",
- "filament.vendor.name",
- "filament.diameter",
- "filament.color",
+ 'filament.type',
+ 'filament.vendor.name',
+ 'filament.diameter',
+ 'filament.color',
];
var filter = {};
@@ -29,8 +29,8 @@ router.get("/", isAuthenticated, (req, res) => {
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
- const parsedFilter = parseFilter(key, value)
- filter = {...filter, ...parsedFilter};
+ const parsedFilter = parseFilter(key, value);
+ filter = { ...filter, ...parsedFilter };
}
}
}
@@ -39,28 +39,28 @@ router.get("/", isAuthenticated, (req, res) => {
});
// new pritner
-router.post("/", isAuthenticated, (req, res) => {
+router.post('/', isAuthenticated, (req, res) => {
newGCodeFileRouteHandler(req, res);
});
-router.get("/:id", isAuthenticated, (req, res) => {
+router.get('/:id', isAuthenticated, (req, res) => {
getGCodeFileRouteHandler(req, res);
});
// update printer info
-router.put("/:id", isAuthenticated, async (req, res) => {
+router.put('/:id', isAuthenticated, async (req, res) => {
editGCodeFileRouteHandler(req, res);
});
-router.post("/:id/content", isAuthenticated, (req, res) => {
+router.post('/:id/content', isAuthenticated, (req, res) => {
uploadGCodeFileContentRouteHandler(req, res);
});
-router.post("/content", isAuthenticated, (req, res) => {
+router.post('/content', isAuthenticated, (req, res) => {
parseGCodeFileHandler(req, res);
});
-router.get("/:id/content", isAuthenticated, (req, res) => {
+router.get('/:id/content', isAuthenticated, (req, res) => {
getGCodeFileContentRouteHandler(req, res);
});
diff --git a/src/schemas/management/material.schema.js b/src/schemas/management/material.schema.js
index ff09acc..8c6e554 100644
--- a/src/schemas/management/material.schema.js
+++ b/src/schemas/management/material.schema.js
@@ -1,4 +1,4 @@
-import mongoose from "mongoose";
+import mongoose from 'mongoose';
const materialSchema = new mongoose.Schema({
name: { required: true, type: String },
@@ -7,10 +7,10 @@ const materialSchema = new mongoose.Schema({
tags: [{ type: String }],
});
-materialSchema.virtual("id").get(function () {
+materialSchema.virtual('id').get(function () {
return this._id.toHexString();
});
-materialSchema.set("toJSON", { virtuals: true });
+materialSchema.set('toJSON', { virtuals: true });
-export const materialModel = mongoose.model("Material", materialSchema);
+export const materialModel = mongoose.model('Material', materialSchema);
diff --git a/src/services/inventory/filamentstocks.js b/src/services/inventory/filamentstocks.js
index 6b764f8..f96ae39 100644
--- a/src/services/inventory/filamentstocks.js
+++ b/src/services/inventory/filamentstocks.js
@@ -1,15 +1,14 @@
-import dotenv from "dotenv";
-import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
-import { filamentModel } from "../../schemas/management/filament.schema.js";
-import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
+import { filamentModel } from '../../schemas/management/filament.schema.js';
+import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Filament Stocks");
+const logger = log4js.getLogger('Filament Stocks');
logger.level = process.env.LOG_LEVEL;
export const listFilamentStocksRouteHandler = async (
@@ -17,10 +16,10 @@ export const listFilamentStocksRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
+ property = '',
filter = {},
- sort = "",
- order = "ascend"
+ sort = '',
+ order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -31,30 +30,30 @@ export const listFilamentStocksRouteHandler = async (
aggregateCommand.push({
$lookup: {
- from: "filaments", // The collection name (usually lowercase plural)
- localField: "filament", // The field in your current model
- foreignField: "_id", // The field in the products collection
- as: "filament", // The output field name
+ from: 'filaments', // The collection name (usually lowercase plural)
+ localField: 'filament', // The field in your current model
+ foreignField: '_id', // The field in the products collection
+ as: 'filament', // The output field name
},
});
- aggregateCommand.push({ $unwind: "$filament" });
+ aggregateCommand.push({ $unwind: '$filament' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
- // Add sorting if sort parameter is provided
+ // Add sorting if sort parameter is provided
if (sort) {
- const sortOrder = order === "descend" ? -1 : 1;
+ const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@@ -67,11 +66,11 @@ export const listFilamentStocksRouteHandler = async (
logger.trace(
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
- filamentStock,
+ filamentStock
);
res.send(filamentStock);
} catch (error) {
- logger.error("Error listing filament stocks:", error);
+ logger.error('Error listing filament stocks:', error);
res.status(500).send({ error: error });
}
};
@@ -85,35 +84,37 @@ export const getFilamentStockRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("filament")
+ .populate('filament')
.populate({
path: 'stockEvents',
populate: [
{
path: 'subJob',
- select: 'number'
+ select: 'number',
},
{
path: 'job',
- select: 'startedAt'
- }
- ]
+ select: 'startedAt',
+ },
+ ],
});
if (!filamentStock) {
logger.warn(`Filament stock not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...filamentStock._doc, auditLogs: auditLogs});
+ res.send({ ...filamentStock._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching filament stock:", error);
+ logger.error('Error fetching filament stock:', error);
res.status(500).send({ error: error.message });
}
};
@@ -128,7 +129,7 @@ export const editFilamentStockRouteHandler = async (req, res) => {
if (!filamentStock) {
// Error handling
logger.warn(`Filament stock not found with supplied id.`);
- return res.status(404).send({ error: "Filament stock not found." });
+ return res.status(404).send({ error: 'Filament stock not found.' });
}
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
@@ -144,21 +145,18 @@ export const editFilamentStockRouteHandler = async (req, res) => {
email: req.body.email,
};
- const result = await filamentStockModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await filamentStockModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No filament stock updated.");
- res.status(500).send({ error: "No filament stocks updated." });
+ logger.error('No filament stock updated.');
+ res.status(500).send({ error: 'No filament stocks updated.' });
}
} catch (updateError) {
- logger.error("Error updating filament stock:", updateError);
+ logger.error('Error updating filament stock:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching filament stock:", fetchError);
+ logger.error('Error fetching filament stock:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -176,12 +174,12 @@ export const newFilamentStockRouteHandler = async (req, res) => {
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
- return res.status(404).send({ error: "Filament not found." });
+ return res.status(404).send({ error: 'Filament not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
} catch (error) {
- logger.error("Error fetching filament:", error);
+ logger.error('Error fetching filament:', error);
return res.status(500).send({ error: error.message });
}
@@ -195,22 +193,22 @@ export const newFilamentStockRouteHandler = async (req, res) => {
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
filament: req.body.filament._id,
state: {
- type: "unconsumed",
+ type: 'unconsumed',
percent: 0,
},
};
const result = await filamentStockModel.create(newFilamentStock);
if (result.nCreated === 0) {
- logger.error("No filament stock created.");
- return res.status(500).send({ error: "No filament stock created." });
+ logger.error('No filament stock created.');
+ return res.status(500).send({ error: 'No filament stock created.' });
}
// Create initial stock event
const stockEvent = {
- type: "initial",
- value: startingNetWeight,
- unit: "g",
+ type: 'initial',
+ value: startingGrossWeight - filament.emptySpoolWeight,
+ unit: 'g',
filamentStock: result._id,
createdAt: new Date(),
updatedAt: new Date(),
@@ -218,8 +216,8 @@ export const newFilamentStockRouteHandler = async (req, res) => {
const eventResult = await stockEventModel.create(stockEvent);
if (!eventResult) {
- logger.error("Failed to create initial stock event.");
- return res.status(500).send({ error: "Failed to create initial stock event." });
+ logger.error('Failed to create initial stock event.');
+ return res.status(500).send({ error: 'Failed to create initial stock event.' });
}
// Update the filament stock with the stock event reference
@@ -228,9 +226,9 @@ export const newFilamentStockRouteHandler = async (req, res) => {
{ $push: { stockEvents: eventResult._id } }
);
- return res.send({ status: "ok" });
+ return res.send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error adding filament stock:", updateError);
+ logger.error('Error adding filament stock:', updateError);
return res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/inventory/partstocks.js b/src/services/inventory/partstocks.js
index d3c6ba6..e77ac33 100644
--- a/src/services/inventory/partstocks.js
+++ b/src/services/inventory/partstocks.js
@@ -1,11 +1,11 @@
-import dotenv from "dotenv";
-import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
+import dotenv from 'dotenv';
+import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
dotenv.config();
-const logger = log4js.getLogger("PartStocks");
+const logger = log4js.getLogger('PartStocks');
logger.level = process.env.LOG_LEVEL;
export const listPartStocksRouteHandler = async (
@@ -13,8 +13,8 @@ export const listPartStocksRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -28,9 +28,9 @@ export const listPartStocksRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@@ -44,11 +44,11 @@ export const listPartStocksRouteHandler = async (
logger.trace(
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
- partStock,
+ partStock
);
res.send(partStock);
} catch (error) {
- logger.error("Error listing partStocks:", error);
+ logger.error('Error listing partStocks:', error);
res.status(500).send({ error: error });
}
};
@@ -64,13 +64,13 @@ export const getPartStockRouteHandler = async (req, res) => {
if (!partStock) {
logger.warn(`PartStock not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`PartStock with ID: ${id}:`, partStock);
res.send(partStock);
} catch (error) {
- logger.error("Error fetching PartStock:", error);
+ logger.error('Error fetching PartStock:', error);
res.status(500).send({ error: error.message });
}
};
@@ -85,7 +85,7 @@ export const editPartStockRouteHandler = async (req, res) => {
if (!partStock) {
// Error handling
logger.warn(`PartStock not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`PartStock with ID: ${id}:`, partStock);
@@ -101,21 +101,18 @@ export const editPartStockRouteHandler = async (req, res) => {
email: req.body.email,
};
- const result = await partStockModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await partStockModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No PartStock updated.");
- res.status(500).send({ error: "No partStocks updated." });
+ logger.error('No PartStock updated.');
+ res.status(500).send({ error: 'No partStocks updated.' });
}
} catch (updateError) {
- logger.error("Error updating partStock:", updateError);
+ logger.error('Error updating partStock:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching partStock:", fetchError);
+ logger.error('Error fetching partStock:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -131,12 +128,12 @@ export const newPartStockRouteHandler = async (req, res) => {
const result = await partStockModel.create(newPartStock);
if (result.nCreated === 0) {
- logger.error("No partStock created.");
- res.status(500).send({ error: "No partStock created." });
+ logger.error('No partStock created.');
+ res.status(500).send({ error: 'No partStock created.' });
}
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error updating partStock:", updateError);
+ logger.error('Error updating partStock:', updateError);
res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/inventory/stockaudits.js b/src/services/inventory/stockaudits.js
index ed4fd06..5eecff4 100644
--- a/src/services/inventory/stockaudits.js
+++ b/src/services/inventory/stockaudits.js
@@ -1,12 +1,12 @@
-import dotenv from "dotenv";
-import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Stock Audits");
+const logger = log4js.getLogger('Stock Audits');
logger.level = process.env.LOG_LEVEL;
export const listStockAuditsRouteHandler = async (
@@ -14,8 +14,8 @@ export const listStockAuditsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
const skip = (page - 1) * limit;
@@ -25,22 +25,22 @@ export const listStockAuditsRouteHandler = async (
// Lookup createdBy user
aggregateCommand.push({
$lookup: {
- from: "users",
- localField: "createdBy",
- foreignField: "_id",
- as: "createdBy",
+ from: 'users',
+ localField: 'createdBy',
+ foreignField: '_id',
+ as: 'createdBy',
},
});
- aggregateCommand.push({ $unwind: "$createdBy" });
+ aggregateCommand.push({ $unwind: '$createdBy' });
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
aggregateCommand.push({ $skip: skip });
@@ -50,11 +50,11 @@ export const listStockAuditsRouteHandler = async (
logger.trace(
`List of stock audits (Page ${page}, Limit ${limit}, Property ${property}):`,
- stockAudits,
+ stockAudits
);
res.send(stockAudits);
} catch (error) {
- logger.error("Error listing stock audits:", error);
+ logger.error('Error listing stock audits:', error);
res.status(500).send({ error: error });
}
};
@@ -66,24 +66,26 @@ export const getStockAuditRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("createdBy")
- .populate("items.filamentStock")
- .populate("items.partStock");
+ .populate('createdBy')
+ .populate('items.filamentStock')
+ .populate('items.partStock');
if (!stockAudit) {
logger.warn(`Stock audit not found with supplied id.`);
- return res.status(404).send({ error: "Stock audit not found." });
+ return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Stock audit with ID: ${id}:`, stockAudit);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...stockAudit._doc, auditLogs: auditLogs});
+ res.send({ ...stockAudit._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching stock audit:", error);
+ logger.error('Error fetching stock audit:', error);
res.status(500).send({ error: error.message });
}
};
@@ -92,29 +94,30 @@ export const newStockAuditRouteHandler = async (req, res) => {
try {
const newStockAudit = {
type: req.body.type,
- status: req.body.status || "pending",
+ status: req.body.status || 'pending',
notes: req.body.notes,
- items: req.body.items.map(item => ({
+ items: req.body.items.map((item) => ({
type: item.type,
- stock: item.type === "filament"
- ? new mongoose.Types.ObjectId(item.filamentStock)
- : new mongoose.Types.ObjectId(item.partStock),
+ stock:
+ item.type === 'filament'
+ ? new mongoose.Types.ObjectId(item.filamentStock)
+ : new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
- notes: item.notes
+ notes: item.notes,
})),
createdBy: new mongoose.Types.ObjectId(req.body.createdBy),
- completedAt: req.body.status === "completed" ? new Date() : null
+ completedAt: req.body.status === 'completed' ? new Date() : null,
};
const result = await stockAuditModel.create(newStockAudit);
if (!result) {
- logger.error("No stock audit created.");
- return res.status(500).send({ error: "No stock audit created." });
+ logger.error('No stock audit created.');
+ return res.status(500).send({ error: 'No stock audit created.' });
}
- return res.send({ status: "ok", id: result._id });
+ return res.send({ status: 'ok', id: result._id });
} catch (error) {
- logger.error("Error adding stock audit:", error);
+ logger.error('Error adding stock audit:', error);
return res.status(500).send({ error: error.message });
}
};
@@ -124,33 +127,30 @@ export const updateStockAuditRouteHandler = async (req, res) => {
const id = new mongoose.Types.ObjectId(req.params.id);
const updateData = {
...req.body,
- items: req.body.items?.map(item => ({
+ items: req.body.items?.map((item) => ({
type: item.type,
- stock: item.type === "filament"
- ? new mongoose.Types.ObjectId(item.filamentStock)
- : new mongoose.Types.ObjectId(item.partStock),
+ stock:
+ item.type === 'filament'
+ ? new mongoose.Types.ObjectId(item.filamentStock)
+ : new mongoose.Types.ObjectId(item.partStock),
expectedQuantity: item.expectedQuantity,
actualQuantity: item.actualQuantity,
- notes: item.notes
+ notes: item.notes,
})),
- completedAt: req.body.status === "completed" ? new Date() : null
+ completedAt: req.body.status === 'completed' ? new Date() : null,
};
- const result = await stockAuditModel.findByIdAndUpdate(
- id,
- { $set: updateData },
- { new: true }
- );
+ const result = await stockAuditModel.findByIdAndUpdate(id, { $set: updateData }, { new: true });
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
- return res.status(404).send({ error: "Stock audit not found." });
+ return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Updated stock audit with ID: ${id}:`, result);
res.send(result);
} catch (error) {
- logger.error("Error updating stock audit:", error);
+ logger.error('Error updating stock audit:', error);
res.status(500).send({ error: error.message });
}
};
@@ -162,13 +162,13 @@ export const deleteStockAuditRouteHandler = async (req, res) => {
if (!result) {
logger.warn(`Stock audit not found with supplied id.`);
- return res.status(404).send({ error: "Stock audit not found." });
+ return res.status(404).send({ error: 'Stock audit not found.' });
}
logger.trace(`Deleted stock audit with ID: ${id}`);
- res.send({ status: "ok" });
+ res.send({ status: 'ok' });
} catch (error) {
- logger.error("Error deleting stock audit:", error);
+ logger.error('Error deleting stock audit:', error);
res.status(500).send({ error: error.message });
}
-};
\ No newline at end of file
+};
diff --git a/src/services/inventory/stockevents.js b/src/services/inventory/stockevents.js
index da8ebe1..1e3e6d5 100644
--- a/src/services/inventory/stockevents.js
+++ b/src/services/inventory/stockevents.js
@@ -1,11 +1,10 @@
-import dotenv from "dotenv";
-import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-
+import dotenv from 'dotenv';
+import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
dotenv.config();
-const logger = log4js.getLogger("Stock Events");
+const logger = log4js.getLogger('Stock Events');
logger.level = process.env.LOG_LEVEL;
export const listStockEventsRouteHandler = async (
@@ -13,10 +12,10 @@ export const listStockEventsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
+ property = '',
filter = {},
- sort = "",
- order = "ascend"
+ sort = '',
+ order = 'ascend'
) => {
try {
const skip = (page - 1) * limit;
@@ -26,22 +25,22 @@ export const listStockEventsRouteHandler = async (
// Lookup filamentStock
aggregateCommand.push({
$lookup: {
- from: "filamentstocks",
- localField: "filamentStock",
- foreignField: "_id",
- as: "filamentStock",
+ from: 'filamentstocks',
+ localField: 'filamentStock',
+ foreignField: '_id',
+ as: 'filamentStock',
},
});
- aggregateCommand.push({ $unwind: "$filamentStock" });
+ aggregateCommand.push({ $unwind: '$filamentStock' });
// Conditionally lookup subJob only if it exists
aggregateCommand.push({
$lookup: {
- from: "subjobs",
- localField: "subJob",
- foreignField: "_id",
- as: "subJob",
+ from: 'subjobs',
+ localField: 'subJob',
+ foreignField: '_id',
+ as: 'subJob',
},
});
@@ -49,26 +48,26 @@ export const listStockEventsRouteHandler = async (
$addFields: {
subJob: {
$cond: {
- if: { $eq: [{ $size: "$subJob" }, 0] },
+ if: { $eq: [{ $size: '$subJob' }, 0] },
then: null,
- else: { $arrayElemAt: ["$subJob", 0] }
- }
- }
- }
+ else: { $arrayElemAt: ['$subJob', 0] },
+ },
+ },
+ },
});
if (filter != {}) {
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
- // Add sorting if sort parameter is provided
+ // Add sorting if sort parameter is provided
if (sort) {
- const sortOrder = order === "descend" ? -1 : 1;
+ const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@@ -82,11 +81,11 @@ export const listStockEventsRouteHandler = async (
logger.trace(
`List of stock events (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
- stockEvents,
+ stockEvents
);
res.send(stockEvents);
} catch (error) {
- logger.error("Error listing stock events:", error);
+ logger.error('Error listing stock events:', error);
res.status(500).send({ error: error });
}
};
@@ -98,19 +97,19 @@ export const getStockEventRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("filamentStock")
- .populate("subJob")
- .populate("job");
+ .populate('filamentStock')
+ .populate('subJob')
+ .populate('job');
if (!stockEvent) {
logger.warn(`Stock event not found with supplied id.`);
- return res.status(404).send({ error: "Stock event not found." });
+ return res.status(404).send({ error: 'Stock event not found.' });
}
logger.trace(`Stock event with ID: ${id}:`, stockEvent);
res.send(stockEvent);
} catch (error) {
- logger.error("Error fetching stock event:", error);
+ logger.error('Error fetching stock event:', error);
res.status(500).send({ error: error.message });
}
};
@@ -123,17 +122,17 @@ export const newStockEventRouteHandler = async (req, res) => {
subJob: req.body.subJob ? new mongoose.Types.ObjectId(req.body.subJob) : null,
job: req.body.job ? new mongoose.Types.ObjectId(req.body.job) : null,
filamentStock: new mongoose.Types.ObjectId(req.body.filamentStock),
- timestamp: new Date()
+ timestamp: new Date(),
};
const result = await stockEventModel.create(newStockEvent);
if (!result) {
- logger.error("No stock event created.");
- return res.status(500).send({ error: "No stock event created." });
+ logger.error('No stock event created.');
+ return res.status(500).send({ error: 'No stock event created.' });
}
- return res.send({ status: "ok", id: result._id });
+ return res.send({ status: 'ok', id: result._id });
} catch (error) {
- logger.error("Error adding stock event:", error);
+ logger.error('Error adding stock event:', error);
return res.status(500).send({ error: error.message });
}
-};
\ No newline at end of file
+};
diff --git a/src/services/management/auditlogs.js b/src/services/management/auditlogs.js
index d701fca..8164451 100644
--- a/src/services/management/auditlogs.js
+++ b/src/services/management/auditlogs.js
@@ -1,21 +1,13 @@
-import dotenv from "dotenv";
+import dotenv from 'dotenv';
import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
-import log4js from "log4js";
-import mongoose from "mongoose";
+import log4js from 'log4js';
+import mongoose from 'mongoose';
dotenv.config();
-
-const logger = log4js.getLogger("AuditLogs");
+const logger = log4js.getLogger('AuditLogs');
logger.level = process.env.LOG_LEVEL;
-export const listAuditLogsRouteHandler = async (
- req,
- res,
- page = 1,
- limit = 25,
- property = "",
- filter = {},
-) => {
+export const listAuditLogsRouteHandler = async (req, res, page = 1, limit = 25, filter = {}) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@@ -26,15 +18,12 @@ export const listAuditLogsRouteHandler = async (
.skip(skip)
.limit(Number(limit))
.sort({ createdAt: -1 })
- .populate('owner', 'name _id')
+ .populate('owner', 'name _id');
- logger.trace(
- `List of audit logs (Page ${page}, Limit ${limit}):`,
- auditLogs,
- );
+ logger.trace(`List of audit logs (Page ${page}, Limit ${limit}):`, auditLogs);
res.send(auditLogs);
} catch (error) {
- logger.error("Error listing audit logs:", error);
+ logger.error('Error listing audit logs:', error);
res.status(500).send({ error: error });
}
};
@@ -44,19 +33,23 @@ export const getAuditLogRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the audit log with the given ID
- const auditLog = await auditLogModel.findOne({
- _id: id,
- }).populate('printer').populate('owner').populate('target');
+ const auditLog = await auditLogModel
+ .findOne({
+ _id: id,
+ })
+ .populate('printer')
+ .populate('owner')
+ .populate('target');
if (!auditLog) {
logger.warn(`Audit log not found with supplied id.`);
- return res.status(404).send({ error: "Audit log not found." });
+ return res.status(404).send({ error: 'Audit log not found.' });
}
logger.trace(`Audit log with ID: ${id}:`, auditLog);
res.send(auditLog);
} catch (error) {
- logger.error("Error fetching audit log:", error);
+ logger.error('Error fetching audit log:', error);
res.status(500).send({ error: error.message });
}
};
diff --git a/src/services/management/filaments.js b/src/services/management/filaments.js
index 450c817..aab0b5c 100644
--- a/src/services/management/filaments.js
+++ b/src/services/management/filaments.js
@@ -1,14 +1,12 @@
-import dotenv from "dotenv";
-import { filamentModel } from "../../schemas/management/filament.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import { filamentModel } from '../../schemas/management/filament.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-
-const logger = log4js.getLogger("Filaments");
+const logger = log4js.getLogger('Filaments');
logger.level = process.env.LOG_LEVEL;
export const listFilamentsRouteHandler = async (
@@ -16,8 +14,8 @@ export const listFilamentsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -28,23 +26,23 @@ export const listFilamentsRouteHandler = async (
aggregateCommand.push({
$lookup: {
- from: "vendors", // The collection name (usually lowercase plural)
- localField: "vendor", // The field in your current model
- foreignField: "_id", // The field in the products collection
- as: "vendor", // The output field name
+ from: 'vendors', // The collection name (usually lowercase plural)
+ localField: 'vendor', // The field in your current model
+ foreignField: '_id', // The field in the products collection
+ as: 'vendor', // The output field name
},
});
- aggregateCommand.push({ $unwind: "$vendor" });
+ aggregateCommand.push({ $unwind: '$vendor' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@@ -58,11 +56,11 @@ export const listFilamentsRouteHandler = async (
logger.trace(
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
- filament,
+ filament
);
res.send(filament);
} catch (error) {
- logger.error("Error listing filaments:", error);
+ logger.error('Error listing filaments:', error);
res.status(500).send({ error: error });
}
};
@@ -76,22 +74,24 @@ export const getFilamentRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("vendor");
+ .populate('vendor');
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...filament._doc, auditLogs: auditLogs});
+ res.send({ ...filament._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching Filament:", error);
+ logger.error('Error fetching Filament:', error);
res.status(500).send({ error: error.message });
}
};
@@ -106,7 +106,7 @@ export const editFilamentRouteHandler = async (req, res) => {
if (!filament) {
// Error handling
logger.warn(`Filament not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
@@ -128,30 +128,20 @@ export const editFilamentRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- filament.toObject(),
- updateData,
- id,
- 'Filament',
- req.user._id,
- 'User'
- );
+ await newAuditLog(filament.toObject(), updateData, id, 'Filament', req.user._id, 'User');
- const result = await filamentModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await filamentModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No Filament updated.");
- return res.status(500).send({ error: "No filaments updated." });
+ logger.error('No Filament updated.');
+ return res.status(500).send({ error: 'No filaments updated.' });
}
} catch (updateError) {
- logger.error("Error updating filament:", updateError);
+ logger.error('Error updating filament:', updateError);
return res.status(500).send({ error: updateError.message });
}
- return res.send("OK");
+ return res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching filament:", fetchError);
+ logger.error('Error fetching filament:', fetchError);
return res.status(500).send({ error: fetchError.message });
}
};
@@ -177,23 +167,16 @@ export const newFilamentRouteHandler = async (req, res) => {
const result = await filamentModel.create(newFilament);
if (result.nCreated === 0) {
- logger.error("No filament created.");
- res.status(500).send({ error: "No filament created." });
+ logger.error('No filament created.');
+ res.status(500).send({ error: 'No filament created.' });
}
// Create audit log for new filament
- await newAuditLog(
- {},
- newFilament,
- result._id,
- 'Filament',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newFilament, result._id, 'Filament', req.user._id, 'User');
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error updating filament:", updateError);
+ logger.error('Error updating filament:', updateError);
res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/management/materials.js b/src/services/management/materials.js
index dd6891e..7fc847d 100644
--- a/src/services/management/materials.js
+++ b/src/services/management/materials.js
@@ -1,12 +1,10 @@
-import dotenv from "dotenv";
-import { materialModel } from "../../schemas/management/material.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import mongoose from "mongoose";
-
+import dotenv from 'dotenv';
+import { materialModel } from '../../schemas/management/material.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
dotenv.config();
-const logger = log4js.getLogger("Materials");
+const logger = log4js.getLogger('Materials');
logger.level = process.env.LOG_LEVEL;
export const listMaterialsRouteHandler = async (
@@ -14,8 +12,8 @@ export const listMaterialsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -29,9 +27,9 @@ export const listMaterialsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@@ -45,11 +43,11 @@ export const listMaterialsRouteHandler = async (
logger.trace(
`List of materials (Page ${page}, Limit ${limit}, Property ${property}):`,
- material,
+ material
);
res.send(material);
} catch (error) {
- logger.error("Error listing materials:", error);
+ logger.error('Error listing materials:', error);
res.status(500).send({ error: error });
}
};
@@ -65,13 +63,13 @@ export const getMaterialRouteHandler = async (req, res) => {
if (!material) {
logger.warn(`Material not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Material with ID: ${id}:`, material);
res.send(material);
} catch (error) {
- logger.error("Error fetching Material:", error);
+ logger.error('Error fetching Material:', error);
res.status(500).send({ error: error.message });
}
};
@@ -86,30 +84,26 @@ export const editMaterialRouteHandler = async (req, res) => {
if (!material) {
// Error handling
logger.warn(`Material not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Material with ID: ${id}:`, material);
try {
- const { createdAt, updatedAt, started_at, status, ...updateData } =
- req.body;
+ const updateData = req.body;
- const result = await materialModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await materialModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No Material updated.");
- res.status(500).send({ error: "No materials updated." });
+ logger.error('No Material updated.');
+ res.status(500).send({ error: 'No materials updated.' });
}
} catch (updateError) {
- logger.error("Error updating material:", updateError);
+ logger.error('Error updating material:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching material:", fetchError);
+ logger.error('Error fetching material:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -125,12 +119,12 @@ export const newMaterialRouteHandler = async (req, res) => {
const result = await materialModel.create(newMaterial);
if (result.nCreated === 0) {
- logger.error("No material created.");
- res.status(500).send({ error: "No material created." });
+ logger.error('No material created.');
+ res.status(500).send({ error: 'No material created.' });
}
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error updating material:", updateError);
+ logger.error('Error updating material:', updateError);
res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/management/notetypes.js b/src/services/management/notetypes.js
index 1a264e6..f642752 100644
--- a/src/services/management/notetypes.js
+++ b/src/services/management/notetypes.js
@@ -1,13 +1,13 @@
-import dotenv from "dotenv";
-import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("NoteTypes");
+const logger = log4js.getLogger('NoteTypes');
logger.level = process.env.LOG_LEVEL;
export const listNoteTypesRouteHandler = async (
@@ -15,8 +15,8 @@ export const listNoteTypesRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
const skip = (page - 1) * limit;
@@ -27,26 +27,25 @@ export const listNoteTypesRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } });
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } });
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } });
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
- console.log(aggregateCommand)
-
+ console.log(aggregateCommand);
noteTypes = await noteTypeModel.aggregate(aggregateCommand);
logger.trace(
`List of note types (Page ${page}, Limit ${limit}, Property ${property}):`,
- noteTypes,
+ noteTypes
);
res.send(noteTypes);
} catch (error) {
- logger.error("Error listing note types:", error);
+ logger.error('Error listing note types:', error);
res.status(500).send({ error: error });
}
};
@@ -60,18 +59,20 @@ export const getNoteTypeRouteHandler = async (req, res) => {
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
- return res.status(404).send({ error: "Note type not found." });
+ return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...noteType._doc, auditLogs: auditLogs});
+ res.send({ ...noteType._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching note type:", error);
+ logger.error('Error fetching note type:', error);
res.status(500).send({ error: error.message });
}
};
@@ -83,7 +84,7 @@ export const editNoteTypeRouteHandler = async (req, res) => {
if (!noteType) {
logger.warn(`Note type not found with supplied id.`);
- return res.status(404).send({ error: "Note type not found." });
+ return res.status(404).send({ error: 'Note type not found.' });
}
logger.trace(`Note type with ID: ${id}:`, noteType);
@@ -97,30 +98,20 @@ export const editNoteTypeRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- noteType.toObject(),
- updateData,
- id,
- 'NoteType',
- req.user._id,
- 'User'
- );
+ await newAuditLog(noteType.toObject(), updateData, id, 'NoteType', req.user._id, 'User');
- const result = await noteTypeModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await noteTypeModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No note type updated.");
- res.status(500).send({ error: "No note types updated." });
+ logger.error('No note type updated.');
+ res.status(500).send({ error: 'No note types updated.' });
}
} catch (updateError) {
- logger.error("Error updating note type:", updateError);
+ logger.error('Error updating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching note type:", fetchError);
+ logger.error('Error fetching note type:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -132,23 +123,16 @@ export const newNoteTypeRouteHandler = async (req, res) => {
const result = await noteTypeModel.create(newNoteType);
if (result.nCreated === 0) {
- logger.error("No note type created.");
- res.status(500).send({ error: "No note type created." });
+ logger.error('No note type created.');
+ res.status(500).send({ error: 'No note type created.' });
}
// Create audit log for new note type
- await newAuditLog(
- {},
- newNoteType,
- result._id,
- 'NoteType',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newNoteType, result._id, 'NoteType', req.user._id, 'User');
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error creating note type:", updateError);
+ logger.error('Error creating note type:', updateError);
res.status(500).send({ error: updateError.message });
}
-};
\ No newline at end of file
+};
diff --git a/src/services/management/parts.js b/src/services/management/parts.js
index e5d4f99..cbe4d9d 100644
--- a/src/services/management/parts.js
+++ b/src/services/management/parts.js
@@ -1,16 +1,15 @@
-import dotenv from "dotenv";
-import { partModel } from "../../schemas/management/part.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import multer from "multer";
-import fs from "fs";
-import path from "path";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
+import dotenv from 'dotenv';
+import { partModel } from '../../schemas/management/part.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import multer from 'multer';
+import fs from 'fs';
+import path from 'path';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Parts");
+const logger = log4js.getLogger('Parts');
logger.level = process.env.LOG_LEVEL;
// Set storage engine
@@ -18,7 +17,7 @@ const partsStorage = multer.diskStorage({
destination: process.env.PART_STORAGE,
filename: async function (req, file, cb) {
// Retrieve custom file name from request body
- const customFileName = req.params.id || "default"; // Default to 'default' if not provided
+ const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
// Create the final filename ensuring it ends with .g
const finalFilename = `${customFileName}.stl`;
@@ -34,7 +33,7 @@ const partUpload = multer({
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
},
-}).single("partFile"); // The name attribute of the file input in the HTML form
+}).single('partFile'); // The name attribute of the file input in the HTML form
// Check file type
function checkFileType(file, cb) {
@@ -47,7 +46,7 @@ function checkFileType(file, cb) {
console.log(file);
return cb(null, true);
} else {
- cb("Error: .stl files only!");
+ cb('Error: .stl files only!');
}
}
@@ -56,11 +55,11 @@ export const listPartsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
+ property = '',
filter = {},
- search = "",
- sort = "",
- order = "ascend"
+ search = '',
+ sort = '',
+ order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -69,40 +68,51 @@ export const listPartsRouteHandler = async (
let part;
let aggregateCommand = [];
+ if (search) {
+ // Add a text search match stage for name and brand fields
+ aggregateCommand.push({
+ $match: {
+ $text: {
+ $search: search,
+ },
+ },
+ });
+ }
+
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
logger.error(property);
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$lookup: {
- from: "products", // The collection name (usually lowercase plural)
- localField: "product", // The field in your current model
- foreignField: "_id", // The field in the products collection
- as: "product", // The output field name
+ from: 'products', // The collection name (usually lowercase plural)
+ localField: 'product', // The field in your current model
+ foreignField: '_id', // The field in the products collection
+ as: 'product', // The output field name
},
});
- aggregateCommand.push({ $unwind: "$product" });
+ aggregateCommand.push({ $unwind: '$product' });
aggregateCommand.push({
$project: {
name: 1,
_id: 1,
createdAt: 1,
updatedAt: 1,
- "product._id": 1,
- "product.name": 1,
+ 'product._id': 1,
+ 'product.name': 1,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
- const sortOrder = order === "descend" ? -1 : 1;
+ const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@@ -115,11 +125,11 @@ export const listPartsRouteHandler = async (
logger.trace(
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
- part,
+ part
);
res.send(part);
} catch (error) {
- logger.error("Error listing parts:", error);
+ logger.error('Error listing parts:', error);
res.status(500).send({ error: error });
}
};
@@ -133,22 +143,24 @@ export const getPartRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("product");
+ .populate('product');
if (!part) {
logger.warn(`Part not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}:`, part);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...part._doc, auditLogs: auditLogs});
+ res.send({ ...part._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching Part:", error);
+ logger.error('Error fetching Part:', error);
res.status(500).send({ error: error.message });
}
};
@@ -163,40 +175,29 @@ export const editPartRouteHandler = async (req, res) => {
if (!part) {
// Error handling
logger.warn(`Part not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}:`, part);
try {
- const { createdAt, updatedAt, started_at, status, ...updateData } =
- req.body;
+ const updateData = req.body;
// Create audit log before updating
- await newAuditLog(
- part.toObject(),
- updateData,
- id,
- 'Part',
- req.user._id,
- 'User'
- );
+ await newAuditLog(part.toObject(), updateData, id, 'Part', req.user._id, 'User');
- const result = await partModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await partModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No Part updated.");
- res.status(500).send({ error: "No parts updated." });
+ logger.error('No Part updated.');
+ res.status(500).send({ error: 'No parts updated.' });
}
} catch (updateError) {
- logger.error("Error updating part:", updateError);
+ logger.error('Error updating part:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching part:", fetchError);
+ logger.error('Error fetching part:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -215,20 +216,13 @@ export const newPartRouteHandler = async (req, res) => {
const results = await partModel.insertMany(partsToCreate);
if (!results.length) {
- logger.error("No parts created.");
- return res.status(500).send({ error: "No parts created." });
+ logger.error('No parts created.');
+ return res.status(500).send({ error: 'No parts created.' });
}
// Create audit logs for each new part
for (const result of results) {
- await newAuditLog(
- {},
- result.toObject(),
- result._id,
- 'Part',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, result.toObject(), result._id, 'Part', req.user._id, 'User');
}
return res.status(200).send(results);
@@ -244,19 +238,12 @@ export const newPartRouteHandler = async (req, res) => {
const result = await partModel.create(newPart);
// Create audit log for new part
- await newAuditLog(
- {},
- newPart,
- result._id,
- 'Part',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newPart, result._id, 'Part', req.user._id, 'User');
return res.status(200).send(result);
}
} catch (error) {
- logger.error("Error creating part(s):", error);
+ logger.error('Error creating part(s):', error);
return res.status(500).send({ error: error.message });
}
};
@@ -270,7 +257,7 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
if (!part) {
// Error handling
logger.warn(`Part not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Part with ID: ${id}`);
try {
@@ -282,22 +269,22 @@ export const uploadPartFileContentRouteHandler = async (req, res) => {
} else {
if (req.file == undefined) {
res.send({
- message: "No file selected!",
+ message: 'No file selected!',
});
} else {
res.send({
- status: "OK",
+ status: 'OK',
file: `${req.file.filename}`,
});
}
}
});
} catch (updateError) {
- logger.error("Error updating part:", updateError);
+ logger.error('Error updating part:', updateError);
res.status(500).send({ error: updateError.message });
}
} catch (fetchError) {
- logger.error("Error fetching part:", fetchError);
+ logger.error('Error fetching part:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -313,22 +300,22 @@ export const getPartFileContentRouteHandler = async (req, res) => {
if (!part) {
logger.warn(`Part not found with supplied id.`);
- return res.status(404).send({ error: "Part not found." });
+ return res.status(404).send({ error: 'Part not found.' });
}
logger.trace(`Returning part file contents with ID: ${id}:`);
- const filePath = path.join(process.env.PART_STORAGE, id + ".stl");
+ const filePath = path.join(process.env.PART_STORAGE, id + '.stl');
// Read the file
- fs.readFile(filePath, "utf8", (err, data) => {
+ fs.readFile(filePath, 'utf8', (err, data) => {
if (err) {
- if (err.code === "ENOENT") {
+ if (err.code === 'ENOENT') {
// File not found
- return res.status(404).send({ error: "File not found!" });
+ return res.status(404).send({ error: 'File not found!' });
} else {
// Other errors
- return res.status(500).send({ error: "Error reading file." });
+ return res.status(500).send({ error: 'Error reading file.' });
}
}
@@ -336,7 +323,7 @@ export const getPartFileContentRouteHandler = async (req, res) => {
res.send(data);
});
} catch (error) {
- logger.error("Error fetching Part:", error);
+ logger.error('Error fetching Part:', error);
res.status(500).send({ error: error.message });
}
};
diff --git a/src/services/management/products.js b/src/services/management/products.js
index cafc2c2..8d205f3 100644
--- a/src/services/management/products.js
+++ b/src/services/management/products.js
@@ -1,14 +1,13 @@
-import dotenv from "dotenv";
-import { productModel } from "../../schemas/management/product.schema.js";
-import { partModel } from "../../schemas/management/part.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
+import dotenv from 'dotenv';
+import { productModel } from '../../schemas/management/product.schema.js';
+import { partModel } from '../../schemas/management/part.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Products");
+const logger = log4js.getLogger('Products');
logger.level = process.env.LOG_LEVEL;
export const listProductsRouteHandler = async (
@@ -16,8 +15,8 @@ export const listProductsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -31,13 +30,13 @@ export const listProductsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
aggregateCommand.push({
$match: {
$or: [
{ [property]: null },
- { [property]: "" },
+ { [property]: '' },
{ [property]: [] },
{ [property]: {} },
{ [property]: { $exists: false } },
@@ -56,13 +55,10 @@ export const listProductsRouteHandler = async (
product = await productModel.aggregate(aggregateCommand);
- logger.trace(
- `List of products (Page ${page}, Limit ${limit}, Property ${property}):`,
- product,
- );
+ logger.trace(`List of products (Page ${page}, Limit ${limit}, Property ${property}):`, product);
res.send(product);
} catch (error) {
- logger.error("Error listing products:", error);
+ logger.error('Error listing products:', error);
res.status(500).send({ error: error });
}
};
@@ -76,23 +72,25 @@ export const getProductRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("vendor")
- .populate("parts");
+ .populate('vendor')
+ .populate('parts');
if (!product) {
logger.warn(`Product not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Product with ID: ${id}:`, product);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...product._doc, auditLogs: auditLogs});
+ res.send({ ...product._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching Product:", error);
+ logger.error('Error fetching Product:', error);
res.status(500).send({ error: error.message });
}
};
@@ -109,12 +107,12 @@ export const editProductRouteHandler = async (req, res) => {
if (!product) {
// Error handling
logger.warn(`Product not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Product with ID: ${id}:`, product);
} catch (fetchError) {
- logger.error("Error fetching product:", fetchError);
+ logger.error('Error fetching product:', fetchError);
res.status(500).send({ error: fetchError.message });
}
@@ -132,28 +130,18 @@ export const editProductRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- product.toObject(),
- updateData,
- id,
- 'Product',
- req.user._id,
- 'User'
- );
+ await newAuditLog(product.toObject(), updateData, id, 'Product', req.user._id, 'User');
- const result = await productModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await productModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No Product updated.");
- res.status(500).send({ error: "No products updated." });
+ logger.error('No Product updated.');
+ res.status(500).send({ error: 'No products updated.' });
}
} catch (updateError) {
- logger.error("Error updating product:", updateError);
+ logger.error('Error updating product:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
};
export const newProductRouteHandler = async (req, res) => {
@@ -172,19 +160,12 @@ export const newProductRouteHandler = async (req, res) => {
const newProductResult = await productModel.create(newProduct);
if (newProductResult.nCreated === 0) {
- logger.error("No product created.");
- res.status(500).send({ error: "No product created." });
+ logger.error('No product created.');
+ res.status(500).send({ error: 'No product created.' });
}
// Create audit log for new product
- await newAuditLog(
- {},
- newProduct,
- newProductResult._id,
- 'Product',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newProduct, newProductResult._id, 'Product', req.user._id, 'User');
const parts = req.body.parts || [];
const productId = newProductResult._id;
@@ -201,35 +182,28 @@ export const newProductRouteHandler = async (req, res) => {
const newPartResult = await partModel.create(newPart);
if (newPartResult.nCreated === 0) {
- logger.error("No parts created.");
- res.status(500).send({ error: "No parts created." });
+ logger.error('No parts created.');
+ res.status(500).send({ error: 'No parts created.' });
}
partIds.push(newPartResult._id);
// Create audit log for each new part
- await newAuditLog(
- {},
- newPart,
- newPartResult._id,
- 'Part',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newPart, newPartResult._id, 'Part', req.user._id, 'User');
}
const editProductResult = await productModel.updateOne(
{ _id: productId },
- { $set: { parts: partIds } },
+ { $set: { parts: partIds } }
);
if (editProductResult.nModified === 0) {
- logger.error("No product updated.");
- res.status(500).send({ error: "No products updated." });
+ logger.error('No product updated.');
+ res.status(500).send({ error: 'No products updated.' });
}
res.status(200).send({ ...newProductResult, parts: partIds });
} catch (updateError) {
- logger.error("Error updating product:", updateError);
+ logger.error('Error updating product:', updateError);
res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/management/spotlight.js b/src/services/management/spotlight.js
index bd98c91..dee714f 100644
--- a/src/services/management/spotlight.js
+++ b/src/services/management/spotlight.js
@@ -1,26 +1,25 @@
-import dotenv from "dotenv";
-import { jobModel } from "../../schemas/production/job.schema.js";
-import { subJobModel } from "../../schemas/production/subjob.schema.js";
-import log4js from "log4js";
-import { printerModel } from "../../schemas/production/printer.schema.js";
-import { filamentModel } from "../../schemas/management/filament.schema.js";
-import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
-import { partModel } from "../../schemas/management/part.schema.js";
-import { productModel } from "../../schemas/management/product.schema.js";
-import { vendorModel } from "../../schemas/management/vendor.schema.js";
-import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
-import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
-import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
-import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-import { userModel } from "../../schemas/management/user.schema.js";
-import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
-import { noteModel } from "../../schemas/misc/note.schema.js";
-import mongoose from "mongoose";
-
+import dotenv from 'dotenv';
+import { jobModel } from '../../schemas/production/job.schema.js';
+import { subJobModel } from '../../schemas/production/subjob.schema.js';
+import log4js from 'log4js';
+import { printerModel } from '../../schemas/production/printer.schema.js';
+import { filamentModel } from '../../schemas/management/filament.schema.js';
+import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
+import { partModel } from '../../schemas/management/part.schema.js';
+import { productModel } from '../../schemas/management/product.schema.js';
+import { vendorModel } from '../../schemas/management/vendor.schema.js';
+import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
+import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
+import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
+import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
+import { userModel } from '../../schemas/management/user.schema.js';
+import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
+import { noteModel } from '../../schemas/misc/note.schema.js';
+import mongoose from 'mongoose';
dotenv.config();
-const logger = log4js.getLogger("Jobs");
+const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
// Map prefixes to models and id fields
@@ -48,11 +47,11 @@ const PREFIX_MODEL_MAP = {
// Helper function to build search filter from query parameters
const buildSearchFilter = (params) => {
const filter = {};
-
+
for (const [key, value] of Object.entries(params)) {
// Skip pagination and limit parameters as they're not search filters
if (key === 'limit' || key === 'page') continue;
-
+
// Handle different field types
if (key === 'name') {
filter.name = { $regex: value, $options: 'i' }; // Case-insensitive search
@@ -72,21 +71,21 @@ const buildSearchFilter = (params) => {
filter[key] = { $regex: value, $options: 'i' };
}
}
-
+
return filter;
};
const trimSpotlightObject = (object) => {
-return {
- _id: object._id,
- name: object.name || undefined,
- state: object.state && object?.state.type? { type: object.state.type } : undefined,
- tags: object.tags || undefined,
- email: object.email || undefined,
- color: object.color || undefined,
- updatedAt: object.updatedAt || undefined,
- };
-}
+ return {
+ _id: object._id,
+ name: object.name || undefined,
+ state: object.state && object?.state.type ? { type: object.state.type } : undefined,
+ tags: object.tags || undefined,
+ email: object.email || undefined,
+ color: object.color || undefined,
+ updatedAt: object.updatedAt || undefined,
+ };
+};
export const getSpotlightRouteHandler = async (req, res) => {
try {
@@ -100,20 +99,20 @@ export const getSpotlightRouteHandler = async (req, res) => {
const delimiter = query.substring(3, 4);
const suffix = query.substring(4);
- if (delimiter == ":") {
+ if (delimiter == ':') {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
if (!prefixEntry || !prefixEntry.model) {
- res.status(400).send({ error: "Invalid or unsupported prefix" });
+ res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model, idField } = prefixEntry;
-
+
// Validate ObjectId if the idField is '_id'
if (idField === '_id' && !mongoose.Types.ObjectId.isValid(suffix)) {
res.status(404).send({ error: `${prefix} not found` });
return;
}
-
+
// Find the object by the correct field
const queryObj = {};
queryObj[idField] = suffix.toLowerCase();
@@ -123,47 +122,44 @@ export const getSpotlightRouteHandler = async (req, res) => {
return;
}
// Build the response with only the required fields
- const response = trimSpotlightObject(doc)
+ const response = trimSpotlightObject(doc);
res.status(200).send(response);
return;
}
- console.log(queryParams)
+ console.log(queryParams);
if (Object.keys(queryParams).length > 0) {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
- console.log(prefixEntry)
+ console.log(prefixEntry);
if (!prefixEntry || !prefixEntry.model) {
- res.status(400).send({ error: "Invalid or unsupported prefix" });
+ res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model } = prefixEntry;
-
+
// Use req.query for search parameters
-
+
if (Object.keys(queryParams).length === 0) {
- res.status(400).send({ error: "No search parameters provided" });
+ res.status(400).send({ error: 'No search parameters provided' });
return;
}
-
+
// Build search filter
const searchFilter = buildSearchFilter(queryParams);
-
+
// Perform search with limit
const limit = parseInt(req.query.limit) || 10;
- const docs = await model.find(searchFilter)
- .limit(limit)
- .sort({ updatedAt: -1 })
- .lean();
-
+ const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
+
// Format response
- const response = docs.map(doc => (trimSpotlightObject(doc)));
-
+ const response = docs.map((doc) => trimSpotlightObject(doc));
+
res.status(200).send(response);
return;
}
} catch (error) {
- logger.error("Error in spotlight lookup:", error);
+ logger.error('Error in spotlight lookup:', error);
res.status(500).send({ error: error });
}
};
diff --git a/src/services/management/users.js b/src/services/management/users.js
index c08dc19..41d2eb1 100644
--- a/src/services/management/users.js
+++ b/src/services/management/users.js
@@ -1,14 +1,12 @@
-import dotenv from "dotenv";
-import { userModel } from "../../schemas/management/user.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
+import dotenv from 'dotenv';
+import { userModel } from '../../schemas/management/user.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Users");
+const logger = log4js.getLogger('Users');
logger.level = process.env.LOG_LEVEL;
export const listUsersRouteHandler = async (
@@ -16,8 +14,8 @@ export const listUsersRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -31,10 +29,9 @@ export const listUsersRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
-
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
}
aggregateCommand.push({ $skip: skip });
@@ -44,13 +41,10 @@ export const listUsersRouteHandler = async (
user = await userModel.aggregate(aggregateCommand);
- logger.trace(
- `List of users (Page ${page}, Limit ${limit}, Property ${property}):`,
- user,
- );
+ logger.trace(`List of users (Page ${page}, Limit ${limit}, Property ${property}):`, user);
res.send(user);
} catch (error) {
- logger.error("Error listing users:", error);
+ logger.error('Error listing users:', error);
res.status(500).send({ error: error });
}
};
@@ -66,18 +60,20 @@ export const getUserRouteHandler = async (req, res) => {
if (!user) {
logger.warn(`User not found with supplied id.`);
- return res.status(404).send({ error: "User not found." });
+ return res.status(404).send({ error: 'User not found.' });
}
logger.trace(`User with ID: ${id}:`, user);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...user._doc, auditLogs: auditLogs});
+ res.send({ ...user._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching User:", error);
+ logger.error('Error fetching User:', error);
res.status(500).send({ error: error.message });
}
};
@@ -92,7 +88,7 @@ export const editUserRouteHandler = async (req, res) => {
if (!user) {
// Error handling
logger.warn(`User not found with supplied id.`);
- return res.status(404).send({ error: "User not found." });
+ return res.status(404).send({ error: 'User not found.' });
}
logger.trace(`User with ID: ${id}:`, user);
@@ -107,33 +103,23 @@ export const editUserRouteHandler = async (req, res) => {
email: req.body.email,
};
- console.log(req.user)
+ console.log(req.user);
// Create audit log before updating
- await newAuditLog(
- user.toObject(),
- updateData,
- id,
- 'User',
- req.user._id,
- 'User'
- );
+ await newAuditLog(user.toObject(), updateData, id, 'User', req.user._id, 'User');
- const result = await userModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await userModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No User updated.");
- res.status(500).send({ error: "No users updated." });
+ logger.error('No User updated.');
+ res.status(500).send({ error: 'No users updated.' });
}
} catch (updateError) {
- logger.error("Error updating user:", updateError);
+ logger.error('Error updating user:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching user:", fetchError);
+ logger.error('Error fetching user:', fetchError);
res.status(500).send({ error: fetchError.message });
}
-};
\ No newline at end of file
+};
diff --git a/src/services/management/vendors.js b/src/services/management/vendors.js
index d262b8b..52db923 100644
--- a/src/services/management/vendors.js
+++ b/src/services/management/vendors.js
@@ -1,14 +1,12 @@
-import dotenv from "dotenv";
-import { vendorModel } from "../../schemas/management/vendor.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
+import dotenv from 'dotenv';
+import { vendorModel } from '../../schemas/management/vendor.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Vendors");
+const logger = log4js.getLogger('Vendors');
logger.level = process.env.LOG_LEVEL;
export const listVendorsRouteHandler = async (
@@ -16,8 +14,8 @@ export const listVendorsRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -31,9 +29,9 @@ export const listVendorsRouteHandler = async (
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 } });
}
@@ -45,13 +43,10 @@ export const listVendorsRouteHandler = async (
vendor = await vendorModel.aggregate(aggregateCommand);
- logger.trace(
- `List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`,
- vendor,
- );
+ logger.trace(`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`, vendor);
res.send(vendor);
} catch (error) {
- logger.error("Error listing vendors:", error);
+ logger.error('Error listing vendors:', error);
res.status(500).send({ error: error });
}
};
@@ -67,18 +62,20 @@ export const getVendorRouteHandler = async (req, res) => {
if (!vendor) {
logger.warn(`Vendor not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Vendor with ID: ${id}:`, vendor);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...vendor._doc, auditLogs: auditLogs});
+ res.send({ ...vendor._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching Vendor:", error);
+ logger.error('Error fetching Vendor:', error);
res.status(500).send({ error: error.message });
}
};
@@ -93,7 +90,7 @@ export const editVendorRouteHandler = async (req, res) => {
if (!vendor) {
// Error handling
logger.warn(`Vendor not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Vendor with ID: ${id}:`, vendor);
@@ -109,33 +106,23 @@ export const editVendorRouteHandler = async (req, res) => {
email: req.body.email,
};
- console.log(req.user)
+ console.log(req.user);
// Create audit log before updating
- await newAuditLog(
- vendor.toObject(),
- updateData,
- id,
- 'Vendor',
- req.user._id,
- 'User'
- );
+ await newAuditLog(vendor.toObject(), updateData, id, 'Vendor', req.user._id, 'User');
- const result = await vendorModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await vendorModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No Vendor updated.");
- res.status(500).send({ error: "No vendors updated." });
+ logger.error('No Vendor updated.');
+ res.status(500).send({ error: 'No vendors updated.' });
}
} catch (updateError) {
- logger.error("Error updating vendor:", updateError);
+ logger.error('Error updating vendor:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching vendor:", fetchError);
+ logger.error('Error fetching vendor:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -147,8 +134,8 @@ export const newVendorRouteHandler = async (req, res) => {
const result = await vendorModel.create(newVendor);
if (result.nCreated === 0) {
- logger.error("No vendor created.");
- res.status(500).send({ error: "No vendor created." });
+ logger.error('No vendor created.');
+ res.status(500).send({ error: 'No vendor created.' });
}
// Create audit log for new vendor
@@ -161,9 +148,9 @@ export const newVendorRouteHandler = async (req, res) => {
'User'
);
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error updating vendor:", updateError);
+ logger.error('Error updating vendor:', updateError);
res.status(500).send({ error: updateError.message });
}
};
diff --git a/src/services/misc/auth.js b/src/services/misc/auth.js
index 8175577..6ea5f5d 100644
--- a/src/services/misc/auth.js
+++ b/src/services/misc/auth.js
@@ -1,37 +1,34 @@
-import dotenv from "dotenv";
-import { keycloak } from "../../keycloak.js";
-import log4js from "log4js";
-import axios from "axios";
-import { userModel } from "../../schemas/management/user.schema.js";
-
+import dotenv from 'dotenv';
+import { keycloak } from '../../keycloak.js';
+import log4js from 'log4js';
+import axios from 'axios';
+import { userModel } from '../../schemas/management/user.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Auth");
+const logger = log4js.getLogger('Auth');
logger.level = process.env.LOG_LEVEL;
// Login handler
export const loginRouteHandler = (req, res) => {
// Get the redirect URL from form data or default to production overview
- const redirectUrl = req.query.redirect_uri || "/production/overview";
+ const redirectUrl = req.query.redirect_uri || '/production/overview';
// Store the original URL to redirect after login
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
- const callbackUrl = encodeURIComponent(
- `${process.env.APP_URL_API}/auth/callback`,
- );
+ const callbackUrl = encodeURIComponent(`${process.env.APP_URL_API}/auth/callback`);
const state = encodeURIComponent(redirectUrl);
logger.warn(req.query.redirect_uri);
res.redirect(
- `${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`,
+ `${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`
);
};
// Function to fetch user from Keycloak and store in database and session
const fetchAndStoreUser = async (req, token) => {
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
-
+
try {
const response = await axios.post(
userInfoUrl,
@@ -60,13 +57,13 @@ const fetchAndStoreUser = async (req, token) => {
// Create or update user in database
const user = await createOrUpdateUser(userInfo);
const fullUserInfo = { ...userInfo, _id: user._id };
-
+
// Store user info in session
req.session.user = fullUserInfo;
-
+
return fullUserInfo;
} catch (error) {
- logger.error("Error fetching and storing user:", error);
+ logger.error('Error fetching and storing user:', error);
throw error;
}
};
@@ -77,22 +74,22 @@ export const loginCallbackRouteHandler = (req, res) => {
// Extract the code and state from the query parameters
const code = req.query.code;
- const state = req.query.state || "/production/overview";
+ const state = req.query.state || '/production/overview';
if (!code) {
- return res.status(400).send("Authorization code missing");
+ return res.status(400).send('Authorization code missing');
}
// Exchange the code for tokens manually
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
- const redirectUri = `${process.env.APP_URL_API || "http://localhost:8080"}/auth/callback`;
+ const redirectUri = `${process.env.APP_URL_API || 'http://localhost:8080'}/auth/callback`;
// Make a POST request to exchange the code for tokens
axios
.post(
tokenUrl,
new URLSearchParams({
- grant_type: "authorization_code",
+ grant_type: 'authorization_code',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
code: code,
@@ -100,9 +97,9 @@ export const loginCallbackRouteHandler = (req, res) => {
}).toString(),
{
headers: {
- "Content-Type": "application/x-www-form-urlencoded",
+ 'Content-Type': 'application/x-www-form-urlencoded',
},
- },
+ }
)
.then(async (response) => {
// Store tokens in session
@@ -112,29 +109,24 @@ export const loginCallbackRouteHandler = (req, res) => {
id_token: response.data.id_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
};
- req.session["keycloak-token"] = tokenData;
+ req.session['keycloak-token'] = tokenData;
try {
// Fetch and store user data
await fetchAndStoreUser(req, tokenData);
-
+
// Save session and redirect to the original URL
req.session.save(() => {
- res.redirect(
- (process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
- );
+ res.redirect((process.env.APP_URL_CLIENT || 'http://localhost:3000') + state);
});
} catch (error) {
- logger.error("Error during user setup:", error);
- res.status(500).send("Error setting up user session");
+ logger.error('Error during user setup:', error);
+ res.status(500).send('Error setting up user session');
}
})
.catch((error) => {
- console.error(
- "Token exchange error:",
- error.response?.data || error.message,
- );
- res.status(500).send("Authentication failed");
+ console.error('Token exchange error:', error.response?.data || error.message);
+ res.status(500).send('Authentication failed');
});
};
@@ -142,18 +134,18 @@ export const loginCallbackRouteHandler = (req, res) => {
const createOrUpdateUser = async (userInfo) => {
try {
const { username, email, name, firstName, lastName } = userInfo;
-
+
// Find existing user by username
const existingUser = await userModel.findOne({ username });
-
+
if (existingUser) {
// Check if any values have changed
- const hasChanges =
+ const hasChanges =
existingUser.email !== email ||
existingUser.name !== name ||
existingUser.firstName !== firstName ||
existingUser.lastName !== lastName;
-
+
if (hasChanges) {
// Update existing user only if there are changes
const updateData = {
@@ -161,18 +153,15 @@ const createOrUpdateUser = async (userInfo) => {
name,
firstName,
lastName,
- updatedAt: new Date()
+ updatedAt: new Date(),
};
-
- await userModel.updateOne(
- { username },
- { $set: updateData }
- );
-
+
+ await userModel.updateOne({ username }, { $set: updateData });
+
// Fetch the updated user to return
return await userModel.findOne({ username });
}
-
+
return existingUser;
} else {
// Create new user
@@ -181,14 +170,14 @@ const createOrUpdateUser = async (userInfo) => {
email,
name,
firstName,
- lastName
+ lastName,
});
-
+
await newUser.save();
return newUser;
}
} catch (error) {
- logger.error("Error creating/updating user:", error);
+ logger.error('Error creating/updating user:', error);
throw error;
}
};
@@ -197,31 +186,29 @@ export const userRouteHandler = (req, res) => {
if (req.session && req.session.user) {
res.json(req.session.user);
} else {
- res.status(401).json({ error: "Not authenticated" });
+ res.status(401).json({ error: 'Not authenticated' });
}
};
// Logout handler
export const logoutRouteHandler = (req, res) => {
// Get the redirect URL from query or default to login page
- const redirectUrl = req.query.redirect_uri || "/login";
+ const redirectUrl = req.query.redirect_uri || '/login';
// Destroy the session
req.session.destroy((err) => {
if (err) {
- logger.error("Error destroying session:", err);
- return res.status(500).json({ error: "Failed to logout" });
+ logger.error('Error destroying session:', err);
+ return res.status(500).json({ error: 'Failed to logout' });
}
// Construct the Keycloak logout URL with the redirect URI
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
- const encodedRedirectUri = encodeURIComponent(
- `${process.env.APP_URL_CLIENT}${redirectUrl}`,
- );
+ const encodedRedirectUri = encodeURIComponent(`${process.env.APP_URL_CLIENT}${redirectUrl}`);
// Redirect to Keycloak logout with the redirect URI
res.redirect(
- `${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`,
+ `${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`
);
});
};
@@ -245,36 +232,32 @@ export const getUserInfoHandler = (req, res) => {
email: token.content.email,
name:
token.content.name ||
- `${token.content.given_name || ""} ${token.content.family_name || ""}`.trim(),
+ `${token.content.given_name || ''} ${token.content.family_name || ''}`.trim(),
roles: token.content.realm_access?.roles || [],
};
return res.json(userInfo);
}
- return res.status(401).json({ error: "Not authenticated" });
+ return res.status(401).json({ error: 'Not authenticated' });
};
// Register route - Since we're using Keycloak, registration should be handled there
// This endpoint will redirect to Keycloak's registration page
export const registerRouteHandler = (req, res) => {
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
- const redirectUri = encodeURIComponent(
- process.env.APP_URL_CLIENT + "/auth/login",
- );
+ const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
res.redirect(
- `${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
+ `${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
);
};
// Forgot password handler - redirect to Keycloak's reset password page
export const forgotPasswordRouteHandler = (req, res) => {
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
- const redirectUri = encodeURIComponent(
- process.env.APP_URL_CLIENT + "/auth/login",
- );
+ const redirectUri = encodeURIComponent(process.env.APP_URL_CLIENT + '/auth/login');
res.redirect(
- `${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
+ `${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`
);
};
@@ -282,34 +265,34 @@ export const forgotPasswordRouteHandler = (req, res) => {
export const refreshTokenRouteHandler = (req, res) => {
if (
!req.session ||
- !req.session["keycloak-token"] ||
- !req.session["keycloak-token"].refresh_token
+ !req.session['keycloak-token'] ||
+ !req.session['keycloak-token'].refresh_token
) {
- return res.status(401).json({ error: "No refresh token available" });
+ return res.status(401).json({ error: 'No refresh token available' });
}
- const refreshToken = req.session["keycloak-token"].refresh_token;
+ const refreshToken = req.session['keycloak-token'].refresh_token;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
axios
.post(
tokenUrl,
new URLSearchParams({
- grant_type: "refresh_token",
+ grant_type: 'refresh_token',
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
refresh_token: refreshToken,
}).toString(),
{
headers: {
- "Content-Type": "application/x-www-form-urlencoded",
+ 'Content-Type': 'application/x-www-form-urlencoded',
},
- },
+ }
)
.then((response) => {
// Update session with new tokens
- req.session["keycloak-token"] = {
- ...req.session["keycloak-token"],
+ req.session['keycloak-token'] = {
+ ...req.session['keycloak-token'],
access_token: response.data.access_token,
refresh_token: response.data.refresh_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
@@ -319,22 +302,19 @@ export const refreshTokenRouteHandler = (req, res) => {
req.session.save(() => {
res.json({
access_token: response.data.access_token,
- expires_at: req.session["keycloak-token"].expires_at,
+ expires_at: req.session['keycloak-token'].expires_at,
});
});
})
.catch((error) => {
- logger.error(
- "Token refresh error:",
- error.response?.data || error.message,
- );
+ logger.error('Token refresh error:', error.response?.data || error.message);
// If refresh token is invalid, clear the session
if (error.response?.status === 400) {
req.session.destroy();
}
- res.status(500).json({ error: "Failed to refresh token" });
+ res.status(500).json({ error: 'Failed to refresh token' });
});
};
diff --git a/src/services/misc/notes.js b/src/services/misc/notes.js
index 0a54f60..da877f8 100644
--- a/src/services/misc/notes.js
+++ b/src/services/misc/notes.js
@@ -1,13 +1,12 @@
-import dotenv from "dotenv";
-import { noteModel } from "../../schemas/misc/note.schema.js";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
+import dotenv from 'dotenv';
+import { noteModel } from '../../schemas/misc/note.schema.js';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Notes");
+const logger = log4js.getLogger('Notes');
logger.level = process.env.LOG_LEVEL;
export const listNotesRouteHandler = async (
@@ -15,8 +14,8 @@ export const listNotesRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
- filter = {},
+ property = '',
+ filter = {}
) => {
try {
const skip = (page - 1) * limit;
@@ -24,56 +23,52 @@ export const listNotesRouteHandler = async (
let aggregateCommand = [];
if (Object.keys(filter).length > 0) {
- aggregateCommand.push({ $match: filter });
- }
+ aggregateCommand.push({ $match: filter });
+ }
aggregateCommand.push({
- $lookup: {
- from: "users", // The collection name (usually lowercase plural)
- localField: "user", // The field in your current model
- foreignField: "_id", // The field in the users collection
- as: "user", // The output field name
- },
- });
- aggregateCommand.push({ $unwind: "$user" });
- aggregateCommand.push({
- $lookup: {
- from: "notetypes", // The collection name (usually lowercase plural)
- localField: "noteType", // The field in your current model
- foreignField: "_id", // The field in the users collection
- as: "noteType", // The output field name
- },
- });
- aggregateCommand.push({ $unwind: "$noteType" });
- aggregateCommand.push({
- $project: {
- name: 1,
- _id: 1,
- createdAt: 1,
- updatedAt: 1,
- "noteType._id": 1,
- "noteType.name": 1,
- "noteType.color": 1,
- "user._id": 1,
- "user.name": 1,
- content: 1,
- parent: 1
- },
- });
+ $lookup: {
+ from: 'users', // The collection name (usually lowercase plural)
+ localField: 'user', // The field in your current model
+ foreignField: '_id', // The field in the users collection
+ as: 'user', // The output field name
+ },
+ });
+ aggregateCommand.push({ $unwind: '$user' });
+ aggregateCommand.push({
+ $lookup: {
+ from: 'notetypes', // The collection name (usually lowercase plural)
+ localField: 'noteType', // The field in your current model
+ foreignField: '_id', // The field in the users collection
+ as: 'noteType', // The output field name
+ },
+ });
+ aggregateCommand.push({ $unwind: '$noteType' });
+ aggregateCommand.push({
+ $project: {
+ name: 1,
+ _id: 1,
+ createdAt: 1,
+ updatedAt: 1,
+ 'noteType._id': 1,
+ 'noteType.name': 1,
+ 'noteType.color': 1,
+ 'user._id': 1,
+ 'user.name': 1,
+ content: 1,
+ parent: 1,
+ },
+ });
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
notes = await noteModel.aggregate(aggregateCommand);
-
- logger.trace(
- `List of notes (Page ${page}, Limit ${limit}, Property ${property}):`,
- notes,
- );
+ logger.trace(`List of notes (Page ${page}, Limit ${limit}, Property ${property}):`, notes);
res.send(notes);
} catch (error) {
- logger.error("Error listing notes:", error);
+ logger.error('Error listing notes:', error);
res.status(500).send({ error: error });
}
};
@@ -87,18 +82,20 @@ export const getNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
- return res.status(404).send({ error: "Note not found." });
+ return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...note._doc, auditLogs: auditLogs});
+ res.send({ ...note._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching note:", error);
+ logger.error('Error fetching note:', error);
res.status(500).send({ error: error.message });
}
};
@@ -110,7 +107,7 @@ export const editNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
- return res.status(404).send({ error: "Note not found." });
+ return res.status(404).send({ error: 'Note not found.' });
}
logger.trace(`Note with ID: ${id}:`, note);
@@ -124,30 +121,20 @@ export const editNoteRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- note.toObject(),
- updateData,
- id,
- 'Note',
- req.user._id,
- 'User'
- );
+ await newAuditLog(note.toObject(), updateData, id, 'Note', req.user._id, 'User');
- const result = await noteModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await noteModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No note updated.");
- res.status(500).send({ error: "No notes updated." });
+ logger.error('No note updated.');
+ res.status(500).send({ error: 'No notes updated.' });
}
} catch (updateError) {
- logger.error("Error updating note:", updateError);
+ logger.error('Error updating note:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching note:", fetchError);
+ logger.error('Error fetching note:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -159,23 +146,16 @@ export const newNoteRouteHandler = async (req, res) => {
const result = await noteModel.create(newNote);
if (result.nCreated === 0) {
- logger.error("No note created.");
- res.status(500).send({ error: "No note created." });
+ logger.error('No note created.');
+ res.status(500).send({ error: 'No note created.' });
}
// Create audit log for new note
- await newAuditLog(
- {},
- newNote,
- result._id,
- 'Note',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newNote, result._id, 'Note', req.user._id, 'User');
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error creating note:", updateError);
+ logger.error('Error creating note:', updateError);
res.status(500).send({ error: updateError.message });
}
};
@@ -187,13 +167,13 @@ export const deleteNoteRouteHandler = async (req, res) => {
if (!note) {
logger.warn(`Note not found with supplied id.`);
- return res.status(404).send({ error: "Note not found." });
+ return res.status(404).send({ error: 'Note not found.' });
}
// Check if the current user owns this note
if (note.user.toString() !== req.user._id.toString()) {
logger.warn(`User ${req.user._id} attempted to delete note ${id} owned by user ${note.user}`);
- return res.status(403).send({ error: "You can only delete your own notes." });
+ return res.status(403).send({ error: 'You can only delete your own notes.' });
}
logger.trace(`Deleting note with ID: ${id} and all its children`);
@@ -202,25 +182,16 @@ export const deleteNoteRouteHandler = async (req, res) => {
const deletedNoteIds = await recursivelyDeleteNotes(id);
// Create audit log for the deletion
- await newAuditLog(
- note.toObject(),
- {},
- id,
- 'Note',
- req.user._id,
- 'User',
- 'DELETE'
- );
+ await newAuditLog(note.toObject(), {}, id, 'Note', req.user._id, 'User', 'DELETE');
logger.info(`Successfully deleted note ${id} and ${deletedNoteIds.length - 1} child notes`);
- res.send({
- status: "ok",
+ res.send({
+ status: 'ok',
deletedNoteIds: deletedNoteIds,
- message: `Deleted ${deletedNoteIds.length} notes`
+ message: `Deleted ${deletedNoteIds.length} notes`,
});
-
} catch (error) {
- logger.error("Error deleting note:", error);
+ logger.error('Error deleting note:', error);
res.status(500).send({ error: error.message });
}
};
@@ -228,19 +199,19 @@ export const deleteNoteRouteHandler = async (req, res) => {
// Helper function to recursively delete notes and their children
const recursivelyDeleteNotes = async (noteId) => {
const deletedIds = [];
-
+
// Find all notes that have this note as their parent
const childNotes = await noteModel.find({ parent: noteId });
-
+
// Recursively delete all children first
for (const childNote of childNotes) {
const childDeletedIds = await recursivelyDeleteNotes(childNote._id);
deletedIds.push(...childDeletedIds);
}
-
+
// Delete the current note
await noteModel.deleteOne({ _id: noteId });
deletedIds.push(noteId);
-
+
return deletedIds;
-};
\ No newline at end of file
+};
diff --git a/src/services/misc/spotlight.js b/src/services/misc/spotlight.js
index d20df4f..9acc97e 100644
--- a/src/services/misc/spotlight.js
+++ b/src/services/misc/spotlight.js
@@ -1,26 +1,25 @@
-import dotenv from "dotenv";
-import log4js from "log4js";
-import mongoose from "mongoose";
-import { jobModel } from "../../schemas/production/job.schema.js";
-import { subJobModel } from "../../schemas/production/subjob.schema.js";
-import { printerModel } from "../../schemas/production/printer.schema.js";
-import { filamentModel } from "../../schemas/management/filament.schema.js";
-import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
-import { partModel } from "../../schemas/management/part.schema.js";
-import { productModel } from "../../schemas/management/product.schema.js";
-import { vendorModel } from "../../schemas/management/vendor.schema.js";
-import { filamentStockModel } from "../../schemas/inventory/filamentstock.schema.js";
-import { stockEventModel } from "../../schemas/inventory/stockevent.schema.js";
-import { stockAuditModel } from "../../schemas/inventory/stockaudit.schema.js";
-import { partStockModel } from "../../schemas/inventory/partstock.schema.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-import { userModel } from "../../schemas/management/user.schema.js";
-import { noteTypeModel } from "../../schemas/management/notetype.schema.js";
-import { noteModel } from "../../schemas/misc/note.schema.js";
-
+import dotenv from 'dotenv';
+import log4js from 'log4js';
+import mongoose from 'mongoose';
+import { jobModel } from '../../schemas/production/job.schema.js';
+import { subJobModel } from '../../schemas/production/subjob.schema.js';
+import { printerModel } from '../../schemas/production/printer.schema.js';
+import { filamentModel } from '../../schemas/management/filament.schema.js';
+import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
+import { partModel } from '../../schemas/management/part.schema.js';
+import { productModel } from '../../schemas/management/product.schema.js';
+import { vendorModel } from '../../schemas/management/vendor.schema.js';
+import { filamentStockModel } from '../../schemas/inventory/filamentstock.schema.js';
+import { stockEventModel } from '../../schemas/inventory/stockevent.schema.js';
+import { stockAuditModel } from '../../schemas/inventory/stockaudit.schema.js';
+import { partStockModel } from '../../schemas/inventory/partstock.schema.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
+import { userModel } from '../../schemas/management/user.schema.js';
+import { noteTypeModel } from '../../schemas/management/notetype.schema.js';
+import { noteModel } from '../../schemas/misc/note.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Jobs");
+const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
// Map prefixes to models and id fields
@@ -48,11 +47,11 @@ const PREFIX_MODEL_MAP = {
// Helper function to build search filter from query parameters
const buildSearchFilter = (params) => {
const filter = {};
-
+
for (const [key, value] of Object.entries(params)) {
// Skip pagination and limit parameters as they're not search filters
if (key === 'limit' || key === 'page') continue;
-
+
// Handle different field types
if (key === 'name') {
filter.name = { $regex: value, $options: 'i' }; // Case-insensitive search
@@ -72,21 +71,21 @@ const buildSearchFilter = (params) => {
filter[key] = { $regex: value, $options: 'i' };
}
}
-
+
return filter;
};
const trimSpotlightObject = (object) => {
-return {
- _id: object._id,
- name: object.name || undefined,
- state: object.state && object?.state.type? { type: object.state.type } : undefined,
- tags: object.tags || undefined,
- email: object.email || undefined,
- color: object.color || undefined,
- updatedAt: object.updatedAt || undefined,
- };
-}
+ return {
+ _id: object._id,
+ name: object.name || undefined,
+ state: object.state && object?.state.type ? { type: object.state.type } : undefined,
+ tags: object.tags || undefined,
+ email: object.email || undefined,
+ color: object.color || undefined,
+ updatedAt: object.updatedAt || undefined,
+ };
+};
export const getSpotlightRouteHandler = async (req, res) => {
try {
@@ -100,20 +99,20 @@ export const getSpotlightRouteHandler = async (req, res) => {
const delimiter = query.substring(3, 4);
const suffix = query.substring(4);
- if (delimiter == ":") {
+ if (delimiter == ':') {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
if (!prefixEntry || !prefixEntry.model) {
- res.status(400).send({ error: "Invalid or unsupported prefix" });
+ res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model, idField } = prefixEntry;
-
+
// Validate ObjectId if the idField is '_id'
if (idField === '_id' && !mongoose.Types.ObjectId.isValid(suffix)) {
res.status(404).send({ error: `${prefix} not found` });
return;
}
-
+
// Find the object by the correct field
const queryObj = {};
queryObj[idField] = suffix.toLowerCase();
@@ -123,47 +122,44 @@ export const getSpotlightRouteHandler = async (req, res) => {
return;
}
// Build the response with only the required fields
- const response = trimSpotlightObject(doc)
+ const response = trimSpotlightObject(doc);
res.status(200).send(response);
return;
}
- console.log(queryParams)
+ console.log(queryParams);
if (Object.keys(queryParams).length > 0) {
const prefixEntry = PREFIX_MODEL_MAP[prefix];
- console.log(prefixEntry)
+ console.log(prefixEntry);
if (!prefixEntry || !prefixEntry.model) {
- res.status(400).send({ error: "Invalid or unsupported prefix" });
+ res.status(400).send({ error: 'Invalid or unsupported prefix' });
return;
}
const { model } = prefixEntry;
-
+
// Use req.query for search parameters
-
+
if (Object.keys(queryParams).length === 0) {
- res.status(400).send({ error: "No search parameters provided" });
+ res.status(400).send({ error: 'No search parameters provided' });
return;
}
-
+
// Build search filter
const searchFilter = buildSearchFilter(queryParams);
-
+
// Perform search with limit
const limit = parseInt(req.query.limit) || 10;
- const docs = await model.find(searchFilter)
- .limit(limit)
- .sort({ updatedAt: -1 })
- .lean();
-
+ const docs = await model.find(searchFilter).limit(limit).sort({ updatedAt: -1 }).lean();
+
// Format response
- const response = docs.map(doc => (trimSpotlightObject(doc)));
-
+ const response = docs.map((doc) => trimSpotlightObject(doc));
+
res.status(200).send(response);
return;
}
} catch (error) {
- logger.error("Error in spotlight lookup:", error);
+ logger.error('Error in spotlight lookup:', error);
res.status(500).send({ error: error });
}
};
diff --git a/src/services/production/gcodefiles.js b/src/services/production/gcodefiles.js
index f4455e0..32deba1 100644
--- a/src/services/production/gcodefiles.js
+++ b/src/services/production/gcodefiles.js
@@ -1,21 +1,18 @@
-import dotenv from "dotenv";
-import { gcodeFileModel } from "../../schemas/production/gcodefile.schema.js";
-import { filamentModel } from "../../schemas/management/filament.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import multer from "multer";
-import crypto from "crypto";
-import path from "path";
-import fs from "fs";
-import mongoose from "mongoose";
-import { newAuditLog } from "../../util/index.js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
-
-import { extractConfigBlock } from "../../util/index.js";
+import dotenv from 'dotenv';
+import { gcodeFileModel } from '../../schemas/production/gcodefile.schema.js';
+import { filamentModel } from '../../schemas/management/filament.schema.js';
+import log4js from 'log4js';
+import multer from 'multer';
+import path from 'path';
+import fs from 'fs';
+import mongoose from 'mongoose';
+import { newAuditLog } from '../../utils.js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
+import { extractConfigBlock } from '../../utils.js';
dotenv.config();
-const logger = log4js.getLogger("GCodeFiles");
+const logger = log4js.getLogger('GCodeFiles');
logger.level = process.env.LOG_LEVEL;
// Set storage engine
@@ -23,7 +20,7 @@ const gcodeStorage = multer.diskStorage({
destination: process.env.GCODE_STORAGE,
filename: async function (req, file, cb) {
// Retrieve custom file name from request body
- const customFileName = req.params.id || "default"; // Default to 'default' if not provided
+ const customFileName = req.params.id || 'default'; // Default to 'default' if not provided
// Create the final filename ensuring it ends with .gcode
const finalFilename = `${customFileName}.gcode`;
@@ -40,7 +37,7 @@ const gcodeUpload = multer({
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
},
-}).single("gcodeFile"); // The name attribute of the file input in the HTML form
+}).single('gcodeFile'); // The name attribute of the file input in the HTML form
// Check file type
function checkFileType(file, cb) {
@@ -53,7 +50,7 @@ function checkFileType(file, cb) {
console.log(file);
return cb(null, true);
} else {
- cb("Error: .g, .gco, and .gcode files only!");
+ cb('Error: .g, .gco, and .gcode files only!');
}
}
@@ -62,11 +59,11 @@ export const listGCodeFilesRouteHandler = async (
res,
page = 1,
limit = 25,
- property = "",
+ property = '',
filter = {},
- search = "",
- sort = "",
- order = "ascend"
+ search = '',
+ sort = '',
+ order = 'ascend'
) => {
try {
// Calculate the skip value based on the page number and limit
@@ -88,60 +85,60 @@ export const listGCodeFilesRouteHandler = async (
aggregateCommand.push({
$lookup: {
- from: "filaments", // The name of the Filament collection
- localField: "filament",
- foreignField: "_id",
- as: "filament",
+ from: 'filaments', // The name of the Filament collection
+ localField: 'filament',
+ foreignField: '_id',
+ as: 'filament',
},
});
aggregateCommand.push({
$unwind: {
- path: "$filament",
+ path: '$filament',
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
aggregateCommand.push({
$addFields: {
- filament: "$filament",
+ filament: '$filament',
},
});
aggregateCommand.push({
$lookup: {
- from: "vendors", // The collection name (usually lowercase plural)
- localField: "filament.vendor", // The field in your current model
- foreignField: "_id", // The field in the products collection
- as: "filament.vendor", // The output field name
+ from: 'vendors', // The collection name (usually lowercase plural)
+ localField: 'filament.vendor', // The field in your current model
+ foreignField: '_id', // The field in the products collection
+ as: 'filament.vendor', // The output field name
},
});
- aggregateCommand.push({ $unwind: "$filament.vendor" });
+ aggregateCommand.push({ $unwind: '$filament.vendor' });
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
- if (property != "") {
+ if (property != '') {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
- aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
+ aggregateCommand.push({ $project: { _id: 0, [property]: '$_id' } }); // rename _id to the property name
} else {
aggregateCommand.push({
$project: {
- "filament.gcodeFileInfo.estimatedPrintingTimeNormalMode": 0,
+ 'filament.gcodeFileInfo.estimatedPrintingTimeNormalMode': 0,
url: 0,
- "filament.image": 0,
- "filament.createdAt": 0,
- "filament.updatedAt": 0,
+ 'filament.image': 0,
+ 'filament.createdAt': 0,
+ 'filament.updatedAt': 0,
},
});
}
// Add sorting if sort parameter is provided
if (sort) {
- const sortOrder = order === "descend" ? -1 : 1;
+ const sortOrder = order === 'descend' ? -1 : 1;
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
}
@@ -154,11 +151,11 @@ export const listGCodeFilesRouteHandler = async (
logger.trace(
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
- gcodeFile,
+ gcodeFile
);
res.send(gcodeFile);
} catch (error) {
- logger.error("Error listing gcode files:", error);
+ logger.error('Error listing gcode files:', error);
res.status(500).send({ error: error });
}
};
@@ -174,25 +171,22 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Returning GCode File contents with ID: ${id}:`);
- const filePath = path.join(
- process.env.GCODE_STORAGE,
- gcodeFile.gcodeFileName,
- );
+ const filePath = path.join(process.env.GCODE_STORAGE, gcodeFile.gcodeFileName);
// Read the file
- fs.readFile(filePath, "utf8", (err, data) => {
+ fs.readFile(filePath, 'utf8', (err, data) => {
if (err) {
- if (err.code === "ENOENT") {
+ if (err.code === 'ENOENT') {
// File not found
- return res.status(404).send({ error: "File not found!" });
+ return res.status(404).send({ error: 'File not found!' });
} else {
// Other errors
- return res.status(500).send({ error: "Error reading file." });
+ return res.status(500).send({ error: 'Error reading file.' });
}
}
@@ -200,7 +194,7 @@ export const getGCodeFileContentRouteHandler = async (req, res) => {
res.send(data);
});
} catch (error) {
- logger.error("Error fetching GCodeFile:", error);
+ logger.error('Error fetching GCodeFile:', error);
res.status(500).send({ error: error.message });
}
};
@@ -215,7 +209,7 @@ export const editGCodeFileRouteHandler = async (req, res) => {
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
@@ -228,30 +222,20 @@ export const editGCodeFileRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- gcodeFile.toObject(),
- updateData,
- id,
- 'GCodeFile',
- req.user._id,
- 'User'
- );
+ await newAuditLog(gcodeFile.toObject(), updateData, id, 'GCodeFile', req.user._id, 'User');
- const result = await gcodeFileModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await gcodeFileModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No gcodeFile updated.");
- res.status(500).send({ error: "No gcodeFiles updated." });
+ logger.error('No gcodeFile updated.');
+ res.status(500).send({ error: 'No gcodeFiles updated.' });
}
} catch (updateError) {
- logger.error("Error updating gcodeFile:", updateError);
+ logger.error('Error updating gcodeFile:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching gcodeFile:", fetchError);
+ logger.error('Error fetching gcodeFile:', fetchError);
//res.status(500).send({ error: fetchError.message });
}
};
@@ -269,11 +253,11 @@ export const newGCodeFileRouteHandler = async (req, res) => {
if (!filament) {
logger.warn(`Filament not found with supplied id.`);
- return res.status(404).send({ error: "Filament not found." });
+ return res.status(404).send({ error: 'Filament not found.' });
}
logger.trace(`Filament with ID: ${id}:`, filament);
} catch (error) {
- logger.error("Error fetching filament:", error);
+ logger.error('Error fetching filament:', error);
return res.status(500).send({ error: error.message });
}
@@ -289,23 +273,16 @@ export const newGCodeFileRouteHandler = async (req, res) => {
const result = await gcodeFileModel.create(newGCodeFile);
if (result.nCreated === 0) {
- logger.error("No gcode file created.");
- res.status(500).send({ error: "No gcode file created." });
+ logger.error('No gcode file created.');
+ res.status(500).send({ error: 'No gcode file created.' });
}
// Create audit log for new gcodefile
- await newAuditLog(
- {},
- newGCodeFile,
- result._id,
- 'GCodeFile',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newGCodeFile, result._id, 'GCodeFile', req.user._id, 'User');
- res.status(200).send({ status: "ok" });
+ res.status(200).send({ status: 'ok' });
} catch (updateError) {
- logger.error("Error creating gcode file:", updateError);
+ logger.error('Error creating gcode file:', updateError);
res.status(500).send({ error: updateError.message });
}
};
@@ -322,7 +299,7 @@ export const parseGCodeFileHandler = async (req, res) => {
if (req.file == undefined) {
return res.send({
- message: "No file selected!",
+ message: 'No file selected!',
});
}
@@ -331,7 +308,7 @@ export const parseGCodeFileHandler = async (req, res) => {
const filePath = path.join(req.file.destination, req.file.filename);
// Read the file content
- const fileContent = fs.readFileSync(filePath, "utf8");
+ const fileContent = fs.readFileSync(filePath, 'utf8');
// Extract the config block
const configInfo = extractConfigBlock(fileContent);
@@ -342,12 +319,12 @@ export const parseGCodeFileHandler = async (req, res) => {
// Optionally clean up the file after processing if it's not needed
fs.unlinkSync(filePath);
} catch (parseError) {
- logger.error("Error parsing GCode file:", parseError);
+ logger.error('Error parsing GCode file:', parseError);
res.status(500).send({ error: parseError.message });
}
});
} catch (error) {
- logger.error("Error in parseGCodeFileHandler:", error);
+ logger.error('Error in parseGCodeFileHandler:', error);
res.status(500).send({ error: error.message });
}
};
@@ -361,7 +338,7 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}`);
try {
@@ -373,15 +350,9 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
} else {
if (req.file == undefined) {
res.send({
- message: "No file selected!",
+ message: 'No file selected!',
});
} else {
- // Get the path to the uploaded file
- const filePath = path.join(req.file.destination, req.file.filename);
-
- // Read the file content
- const fileContent = fs.readFileSync(filePath, "utf8");
-
// Update the gcodeFile document with the filename and the extracted config
const result = await gcodeFileModel.updateOne(
{ _id: id },
@@ -389,27 +360,27 @@ export const uploadGCodeFileContentRouteHandler = async (req, res) => {
$set: {
gcodeFileName: req.file.filename,
},
- },
+ }
);
if (result.nModified === 0) {
- logger.error("No gcodeFile updated.");
- res.status(500).send({ error: "No gcodeFiles updated." });
+ logger.error('No gcodeFile updated.');
+ res.status(500).send({ error: 'No gcodeFiles updated.' });
}
res.send({
- status: "OK",
+ status: 'OK',
file: `${req.file.filename}`,
});
}
}
});
} catch (updateError) {
- logger.error("Error updating gcodeFile:", updateError);
+ logger.error('Error updating gcodeFile:', updateError);
res.status(500).send({ error: updateError.message });
}
} catch (fetchError) {
- logger.error("Error fetching gcodeFile:", fetchError);
+ logger.error('Error fetching gcodeFile:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
@@ -423,22 +394,24 @@ export const getGCodeFileRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("filament");
+ .populate('filament');
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
- const auditLogs = await auditLogModel.find({
- target: id
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: id,
+ })
+ .populate('owner');
- res.send({...gcodeFile._doc, auditLogs: auditLogs});
+ res.send({ ...gcodeFile._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching GCodeFile:", error);
+ logger.error('Error fetching GCodeFile:', error);
res.status(500).send({ error: error.message });
}
};
diff --git a/src/services/production/jobs.js b/src/services/production/jobs.js
index 8c2f160..04f817d 100644
--- a/src/services/production/jobs.js
+++ b/src/services/production/jobs.js
@@ -1,23 +1,16 @@
-import dotenv from "dotenv";
-import mongoose from "mongoose";
-import { jobModel } from "../../schemas/production/job.schema.js";
-import { subJobModel } from "../../schemas/production/subjob.schema.js";
-import { noteModel } from "../../schemas/misc/note.schema.js";
-import jwt from "jsonwebtoken";
-import log4js from "log4js";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import mongoose from 'mongoose';
+import { jobModel } from '../../schemas/production/job.schema.js';
+import { subJobModel } from '../../schemas/production/subjob.schema.js';
+import log4js from 'log4js';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Jobs");
+const logger = log4js.getLogger('Jobs');
logger.level = process.env.LOG_LEVEL;
-export const listJobsRouteHandler = async (
- req,
- res,
- page = 1,
- limit = 25,
-) => {
+export const listJobsRouteHandler = async (req, res, page = 1, limit = 25) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@@ -28,13 +21,13 @@ export const listJobsRouteHandler = async (
.sort({ createdAt: -1 })
.skip(skip)
.limit(limit)
- .populate("subJobs", "state")
- .populate("gcodeFile", "name");
+ .populate('subJobs', 'state')
+ .populate('gcodeFile', 'name');
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
res.send(jobs);
} catch (error) {
- logger.error("Error listing print jobs:", error);
+ logger.error('Error listing print jobs:', error);
res.status(500).send({ error: error });
}
};
@@ -48,26 +41,28 @@ export const getJobRouteHandler = async (req, res) => {
.findOne({
_id: id,
})
- .populate("printers", "name state")
- .populate("gcodeFile")
- .populate("subJobs")
- .populate("notes");
+ .populate('printers', 'name state')
+ .populate('gcodeFile')
+ .populate('subJobs')
+ .populate('notes');
if (!job) {
logger.warn(`Job not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Job with ID: ${id}:`, job);
- const targetIds = [id, ...job.subJobs.map(subJob => subJob._id)];
- const auditLogs = await auditLogModel.find({
- target: { $in: targetIds.map(id => new mongoose.Types.ObjectId(id)) }
- }).populate('owner');
+ const targetIds = [id, ...job.subJobs.map((subJob) => subJob._id)];
+ const auditLogs = await auditLogModel
+ .find({
+ target: { $in: targetIds.map((id) => new mongoose.Types.ObjectId(id)) },
+ })
+ .populate('owner');
- res.send({...job._doc, auditLogs: auditLogs});
+ res.send({ ...job._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching job:", error);
+ logger.error('Error fetching job:', error);
res.status(500).send({ error: error.message });
}
};
@@ -82,27 +77,23 @@ export const editJobRouteHandler = async (req, res) => {
if (!job) {
logger.warn(`Job not found with supplied id.`);
- return res.status(404).send({ error: "Print job not found." });
+ return res.status(404).send({ error: 'Print job not found.' });
}
logger.trace(`Job with ID: ${id}:`, job);
- const { createdAt, updatedAt, started_at, status, ...updateData } =
- req.body;
+ const updateData = req.body;
- const result = await jobModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await jobModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.warn("No jobs updated.");
- return res.status(400).send({ error: "No jobs updated." });
+ logger.warn('No jobs updated.');
+ return res.status(400).send({ error: 'No jobs updated.' });
}
- res.send({ message: "Print job updated successfully" });
+ res.send({ message: 'Print job updated successfully' });
} catch (error) {
- logger.error("Error updating job:", error);
+ logger.error('Error updating job:', error);
res.status(500).send({ error: error.message });
}
};
@@ -112,9 +103,7 @@ export const createJobRouteHandler = async (req, res) => {
const { gcodeFile, printers, quantity = 1 } = req.body;
if (!printers || printers.length === 0) {
- return res
- .status(400)
- .send({ error: "At least one printer must be specified" });
+ return res.status(400).send({ error: 'At least one printer must be specified' });
}
// Convert printer IDs to ObjectIds
@@ -122,14 +111,14 @@ export const createJobRouteHandler = async (req, res) => {
// Create new print job
const newJob = new jobModel({
- state: { type: "draft" },
+ state: { type: 'draft' },
printers: printerIds,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
quantity,
subJobs: [], // Initialize empty array for subjob references
createdAt: new Date(),
updatedAt: new Date(),
- startedAt: null
+ startedAt: null,
});
// Save the print job first to get its ID
@@ -143,25 +132,23 @@ export const createJobRouteHandler = async (req, res) => {
job: savedJob._id,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
subJobId: `subjob-${index + 1}`,
- state: { type: "draft" },
+ state: { type: 'draft' },
number: index + 1,
createdAt: new Date(),
updatedAt: new Date(),
});
return subJob.save();
- }),
+ })
);
// Update the print job with the subjob references
savedJob.subJobs = subJobs.map((subJob) => subJob._id);
await savedJob.save();
- logger.trace(
- `Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`,
- );
+ logger.trace(`Created new print job with ID: ${savedJob._id} and ${subJobs.length} subjobs`);
res.status(201).send({ job: savedJob, subJobs });
} catch (error) {
- logger.error("Error creating print job:", error);
+ logger.error('Error creating print job:', error);
res.status(500).send({ error: error.message });
}
};
@@ -171,10 +158,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
const stats = await jobModel.aggregate([
{
$group: {
- _id: "$state.type",
- count: { $sum: 1 }
- }
- }
+ _id: '$state.type',
+ count: { $sum: 1 },
+ },
+ },
]);
// Transform the results into a more readable format
@@ -183,11 +170,10 @@ export const getJobStatsRouteHandler = async (req, res) => {
return acc;
}, {});
- logger.trace("Print job stats by state:", formattedStats);
+ logger.trace('Print job stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
- logger.error("Error fetching print job stats:", error);
+ logger.error('Error fetching print job stats:', error);
res.status(500).send({ error: error.message });
}
};
-
diff --git a/src/services/production/printers.js b/src/services/production/printers.js
index 03f0e74..e281872 100644
--- a/src/services/production/printers.js
+++ b/src/services/production/printers.js
@@ -1,21 +1,16 @@
-import dotenv from "dotenv";
-import { printerModel } from "../../schemas/production/printer.schema.js";
-import log4js from "log4js";
-import { newAuditLog } from "../../util/index.js";
-import mongoose from "mongoose";
-import { auditLogModel } from "../../schemas/management/auditlog.schema.js";
+import dotenv from 'dotenv';
+import { printerModel } from '../../schemas/production/printer.schema.js';
+import log4js from 'log4js';
+import { newAuditLog } from '../../utils.js';
+import mongoose from 'mongoose';
+import { auditLogModel } from '../../schemas/management/auditlog.schema.js';
dotenv.config();
-const logger = log4js.getLogger("Printers");
+const logger = log4js.getLogger('Printers');
logger.level = process.env.LOG_LEVEL;
-export const listPrintersRouteHandler = async (
- req,
- res,
- page = 1,
- limit = 25,
-) => {
+export const listPrintersRouteHandler = async (req, res, page = 1, limit = 25) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
@@ -26,7 +21,7 @@ export const listPrintersRouteHandler = async (
logger.trace(`List of printers (Page ${page}, Limit ${limit}):`);
res.send(printers);
} catch (error) {
- logger.error("Error listing users:", error);
+ logger.error('Error listing users:', error);
res.status(500).send({ error: error });
}
};
@@ -38,41 +33,45 @@ export const getPrinterRouteHandler = async (req, res) => {
// Fetch the printer with the given remote address
const printer = await printerModel
.findOne({ _id: id })
- .populate("subJobs")
- .populate("currentJob")
+ .populate('subJobs')
+ .populate('currentJob')
.populate({
- path: "currentJob",
+ path: 'currentJob',
populate: {
- path: "gcodeFile",
+ path: 'gcodeFile',
},
})
- .populate("currentSubJob")
+ .populate('currentSubJob')
.populate({
- path: "subJobs",
+ path: 'subJobs',
populate: {
- path: "job",
+ path: 'job',
},
})
- .populate("vendor")
- .populate({ path: "currentFilamentStock",
+ .populate('vendor')
+ .populate({
+ path: 'currentFilamentStock',
populate: {
- path: "filament",
- },})
+ path: 'filament',
+ },
+ });
if (!printer) {
logger.warn(`Printer with id ${id} not found.`);
- return res.status(404).send({ error: "Printer not found" });
+ return res.status(404).send({ error: 'Printer not found' });
}
logger.trace(`Printer with id ${id}:`, printer);
- const auditLogs = await auditLogModel.find({
- target: new mongoose.Types.ObjectId(id)
- }).populate('owner');
+ const auditLogs = await auditLogModel
+ .find({
+ target: new mongoose.Types.ObjectId(id),
+ })
+ .populate('owner');
- res.send({...printer._doc, auditLogs: auditLogs});
+ res.send({ ...printer._doc, auditLogs: auditLogs });
} catch (error) {
- logger.error("Error fetching printer:", error);
+ logger.error('Error fetching printer:', error);
res.status(500).send({ error: error.message });
}
};
@@ -82,10 +81,10 @@ export const editPrinterRouteHandler = async (req, res) => {
try {
// Fetch the printer first to get the old state
const printer = await printerModel.findOne({ _id: id });
-
+
if (!printer) {
logger.warn(`Printer not found with supplied id.`);
- return res.status(404).send({ error: "Printer not found." });
+ return res.status(404).send({ error: 'Printer not found.' });
}
try {
@@ -98,55 +97,41 @@ export const editPrinterRouteHandler = async (req, res) => {
};
// Create audit log before updating
- await newAuditLog(
- printer.toObject(),
- updateData,
- id,
- 'Printer',
- req.user._id,
- 'User'
- );
+ await newAuditLog(printer.toObject(), updateData, id, 'Printer', req.user._id, 'User');
- const result = await printerModel.updateOne(
- { _id: id },
- { $set: updateData },
- );
+ const result = await printerModel.updateOne({ _id: id }, { $set: updateData });
if (result.nModified === 0) {
- logger.error("No printers updated.");
- res.status(500).send({ error: "No printers updated." });
+ logger.error('No printers updated.');
+ res.status(500).send({ error: 'No printers updated.' });
}
} catch (updateError) {
- logger.error("Error updating printer:", updateError);
+ logger.error('Error updating printer:', updateError);
res.status(500).send({ error: updateError.message });
}
- res.send("OK");
+ res.send('OK');
} catch (fetchError) {
- logger.error("Error fetching printer:", fetchError);
+ logger.error('Error fetching printer:', fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const createPrinterRouteHandler = async (req, res) => {
try {
- const { name, moonraker, tags = [], firmware = "n/a" } = req.body;
+ const { name, moonraker, tags = [], firmware = 'n/a' } = req.body;
// Validate required fields
if (!name || !moonraker) {
- logger.warn("Missing required fields in printer creation request");
+ logger.warn('Missing required fields in printer creation request');
return res.status(400).send({
- error:
- "Missing required fields. name and moonraker configuration are required.",
+ error: 'Missing required fields. name and moonraker configuration are required.',
});
}
// Validate moonraker configuration
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
- logger.warn(
- "Invalid moonraker configuration in printer creation request",
- );
+ logger.warn('Invalid moonraker configuration in printer creation request');
return res.status(400).send({
- error:
- "Invalid moonraker configuration. host, port, protocol are required.",
+ error: 'Invalid moonraker configuration. host, port, protocol are required.',
});
}
@@ -158,7 +143,7 @@ export const createPrinterRouteHandler = async (req, res) => {
firmware,
online: false,
state: {
- type: "offline",
+ type: 'offline',
},
});
@@ -166,19 +151,12 @@ export const createPrinterRouteHandler = async (req, res) => {
const savedPrinter = await newPrinter.save();
// Create audit log for new printer
- await newAuditLog(
- {},
- newPrinter.toObject(),
- savedPrinter._id,
- 'Printer',
- req.user._id,
- 'User'
- );
+ await newAuditLog({}, newPrinter.toObject(), savedPrinter._id, 'Printer', req.user._id, 'User');
logger.info(`Created new printer: ${name}`);
res.status(201).send(savedPrinter);
} catch (error) {
- logger.error("Error creating printer:", error);
+ logger.error('Error creating printer:', error);
res.status(500).send({ error: error.message });
}
};
@@ -188,10 +166,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
const stats = await printerModel.aggregate([
{
$group: {
- _id: "$state.type",
- count: { $sum: 1 }
- }
- }
+ _id: '$state.type',
+ count: { $sum: 1 },
+ },
+ },
]);
// Transform the results into a more readable format
@@ -200,10 +178,10 @@ export const getPrinterStatsRouteHandler = async (req, res) => {
return acc;
}, {});
- logger.trace("Printer stats by state:", formattedStats);
+ logger.trace('Printer stats by state:', formattedStats);
res.send(formattedStats);
} catch (error) {
- logger.error("Error fetching printer stats:", error);
+ logger.error('Error fetching printer stats:', error);
res.status(500).send({ error: error.message });
}
};
diff --git a/src/util/index.js b/src/utils.js
similarity index 75%
rename from src/util/index.js
rename to src/utils.js
index ecb98ef..b2b728d 100644
--- a/src/util/index.js
+++ b/src/utils.js
@@ -1,12 +1,12 @@
-import { ObjectId } from "mongodb"; // Only needed in Node.js with MongoDB driver
+import { ObjectId } from 'mongodb'; // Only needed in Node.js with MongoDB driver
function parseFilter(property, value) {
- if (typeof value === "string") {
+ if (typeof value === 'string') {
const trimmed = value.trim();
// Handle booleans
- if (trimmed.toLowerCase() === "true") return { [property]: true };
- if (trimmed.toLowerCase() === "false") return { [property]: false };
+ if (trimmed.toLowerCase() === 'true') return { [property]: true };
+ if (trimmed.toLowerCase() === 'false') return { [property]: false };
// Handle ObjectId (24-char hex)
if (/^[a-f\d]{24}$/i.test(trimmed) && trimmed.length >= 24) {
@@ -22,8 +22,8 @@ function parseFilter(property, value) {
return {
[property]: {
$regex: trimmed,
- $options: "i"
- }
+ $options: 'i',
+ },
};
}
@@ -41,29 +41,25 @@ function convertToCamelCase(obj) {
// Convert the key to camelCase
let camelKey = key
// First handle special cases with spaces, brackets and other characters
- .replace(/\s*\[.*?\]\s*/g, "") // Remove brackets and their contents
- .replace(/\s+/g, " ") // Normalize spaces
+ .replace(/\s*\[.*?\]\s*/g, '') // Remove brackets and their contents
+ .replace(/\s+/g, ' ') // Normalize spaces
.trim()
// Split by common separators (space, underscore, hyphen)
.split(/[\s_-]/)
// Convert to camelCase
.map((word, index) => {
// Remove any non-alphanumeric characters
- word = word.replace(/[^a-zA-Z0-9]/g, "");
+ word = word.replace(/[^a-zA-Z0-9]/g, '');
// Lowercase first word, uppercase others
return index === 0
? word.toLowerCase()
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
})
- .join("");
+ .join('');
// Handle values that are objects recursively
- if (
- value !== null &&
- typeof value === "object" &&
- !Array.isArray(value)
- ) {
+ if (value !== null && typeof value === 'object' && !Array.isArray(value)) {
result[camelKey] = convertToCamelCase(value);
} else {
result[camelKey] = value;
@@ -78,14 +74,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const configObject = {};
// Extract header information
- const headerBlockRegex =
- /; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
+ const headerBlockRegex = /; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
const headerBlockMatch = fileContent.match(headerBlockRegex);
if (headerBlockMatch && headerBlockMatch[1]) {
- const headerLines = headerBlockMatch[1].split("\n");
+ const headerLines = headerBlockMatch[1].split('\n');
headerLines.forEach((line) => {
- // Match lines with info after semicolon
- const headerLineRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
@@ -96,24 +89,22 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
let value = match[2].trim();
// Try to convert value to appropriate type
- if (!isNaN(value) && value !== "") {
+ if (!isNaN(value) && value !== '') {
value = Number(value);
}
configObject[key] = value;
} else {
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
match = line.match(simpleValueRegex);
- if (match && match[1] && !match[1].includes("HEADER_BLOCK")) {
+ if (match && match[1] && !match[1].includes('HEADER_BLOCK')) {
const text = match[1].trim();
// Extract slicer info
- const slicerMatch = text.match(
- /generated by (.*?) on (.*?) at (.*?)$/,
- );
+ const slicerMatch = text.match(/generated by (.*?) on (.*?) at (.*?)$/);
if (slicerMatch) {
- configObject["slicer"] = slicerMatch[1].trim();
- configObject["date"] = slicerMatch[2].trim();
- configObject["time"] = slicerMatch[3].trim();
+ configObject['slicer'] = slicerMatch[1].trim();
+ configObject['date'] = slicerMatch[2].trim();
+ configObject['time'] = slicerMatch[3].trim();
} else {
// Just add as a general header entry if it doesn't match any specific pattern
const key = `header_${Object.keys(configObject).length}`;
@@ -125,12 +116,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
}
// Extract thumbnail data
- const thumbnailBlockRegex =
- /; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
+ const thumbnailBlockRegex = /; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
- const thumbnailLines = thumbnailBlockMatch[1].split("\n");
- let base64Data = "";
+ const thumbnailLines = thumbnailBlockMatch[1].split('\n');
+ let base64Data = '';
let thumbnailInfo = {};
thumbnailLines.forEach((line) => {
@@ -142,13 +132,10 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
thumbnailInfo.width = parseInt(match[1], 10);
thumbnailInfo.height = parseInt(match[2], 10);
thumbnailInfo.size = parseInt(match[3], 10);
- } else if (
- line.trim().startsWith("; ") &&
- !line.includes("THUMBNAIL_BLOCK")
- ) {
+ } else if (line.trim().startsWith('; ') && !line.includes('THUMBNAIL_BLOCK')) {
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
const dataLine = line.trim().substring(2);
- if (dataLine && dataLine != "thumbnail end") {
+ if (dataLine && dataLine != 'thumbnail end') {
base64Data += dataLine;
}
}
@@ -164,12 +151,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
}
// Extract CONFIG_BLOCK
- const configBlockRegex =
- /; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
+ const configBlockRegex = /; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
const configBlockMatch = fileContent.match(configBlockRegex);
if (configBlockMatch && configBlockMatch[1]) {
// Extract each config line
- const configLines = configBlockMatch[1].split("\n");
+ const configLines = configBlockMatch[1].split('\n');
// Process each line
configLines.forEach((line) => {
// Check if the line starts with a semicolon and has an equals sign
@@ -179,11 +165,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
- if (value === "true" || value === "false") {
- value = value === "true";
- } else if (!isNaN(value) && value !== "") {
+ if (value === 'true' || value === 'false') {
+ value = value === 'true';
+ } else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
- if (!value.includes("%")) {
+ if (!value.includes('%')) {
value = Number(value);
}
}
@@ -197,31 +183,31 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
if (additionalVarsMatch && additionalVarsMatch[1]) {
- const additionalLines = additionalVarsMatch[1].split("\n");
+ const additionalLines = additionalVarsMatch[1].split('\n');
additionalLines.forEach((line) => {
// Match both standard format and the special case for "total filament cost"
const varRegex =
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
const match = line.match(varRegex);
if (match) {
- const key = match[1].replace(/\[([^\]]+)\]/g, "$1").trim();
+ const key = match[1].replace(/\[([^\]]+)\]/g, '$1').trim();
let value = match[2].trim();
// Clean up values - remove units in brackets and handle special cases
- if (key.includes("filament used")) {
+ if (key.includes('filament used')) {
// Extract just the numeric value, ignoring units in brackets
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
- } else if (key.includes("filament cost")) {
+ } else if (key.includes('filament cost')) {
// Extract just the numeric value
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
- } else if (key.includes("total layers count")) {
+ } else if (key.includes('total layers count')) {
value = parseInt(value, 10);
- } else if (key.includes("estimated printing time")) {
+ } else if (key.includes('estimated printing time')) {
// Keep as string but trim any additional whitespace
value = value.trim();
}
@@ -243,7 +229,7 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
const postConfigMatch = fileContent.match(postConfigParams);
if (postConfigMatch && postConfigMatch[1]) {
- const postConfigLines = postConfigMatch[1].split("\n");
+ const postConfigLines = postConfigMatch[1].split('\n');
postConfigLines.forEach((line) => {
// Match lines with format "; parameter_name = value"
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
@@ -253,11 +239,11 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
let value = match[2].trim();
// Try to convert value to appropriate type
- if (value === "true" || value === "false") {
- value = value === "true";
- } else if (!isNaN(value) && value !== "") {
+ if (value === 'true' || value === 'false') {
+ value = value === 'true';
+ } else if (!isNaN(value) && value !== '') {
// Check if it's a number (but not a percentage)
- if (!value.includes("%")) {
+ if (!value.includes('%')) {
value = Number(value);
}
}
@@ -276,32 +262,32 @@ function extractConfigBlock(fileContent, useCamelCase = true) {
function getChangedValues(oldObj, newObj) {
const changes = {};
-
+
// Check all keys in the new object
for (const key in newObj) {
// Skip if the key is _id or timestamps
if (key === '_id' || key === 'createdAt' || key === 'updatedAt') continue;
-
+
// If the old value is different from the new value, include it
if (JSON.stringify(oldObj[key]) !== JSON.stringify(newObj[key])) {
changes[key] = newObj[key];
}
}
-
+
return changes;
}
async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, ownerModel) {
- const { auditLogModel } = await import('../schemas/management/auditlog.schema.js');
-
+ const { auditLogModel } = await import('./schemas/management/auditlog.schema.js');
+
// Get only the changed values
const changedValues = getChangedValues(oldValue, newValue);
-
+
// If no values changed, don't create an audit log
if (Object.keys(changedValues).length === 0) {
return;
}
-
+
const auditLog = new auditLogModel({
oldValue,
newValue: changedValues,
@@ -314,9 +300,4 @@ async function newAuditLog(oldValue, newValue, targetId, targetModel, ownerId, o
await auditLog.save();
}
-export {
- parseFilter,
- convertToCamelCase,
- extractConfigBlock,
- newAuditLog
-};
+export { parseFilter, convertToCamelCase, extractConfigBlock, newAuditLog };