diff --git a/.gitignore b/.gitignore index eb61c54..94f603d 100644 --- a/.gitignore +++ b/.gitignore @@ -136,5 +136,7 @@ dist .nova -./gocdefile -./gcodefiles \ No newline at end of file +gocdefile/* +gcodefile +gcodefiles/* +gcodefiles \ No newline at end of file diff --git a/src/index.js b/src/index.js index 56fb430..3d97439 100644 --- a/src/index.js +++ b/src/index.js @@ -16,6 +16,8 @@ import { productRoutes, vendorRoutes, materialRoutes, + partStockRoutes, + filamentStockRoutes, } from "./routes/index.js"; import path from "path"; import * as fs from "fs"; @@ -71,6 +73,8 @@ app.use("/parts", partRoutes); app.use("/products", productRoutes); app.use("/vendors", vendorRoutes); app.use("/materials", materialRoutes); +app.use("/partstocks", partStockRoutes); +app.use("/filamentstocks", filamentStockRoutes); if (process.env.SCHEDULE_HOUR) { cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => { diff --git a/src/routes/filaments/index.js b/src/routes/filaments/index.js index 1898902..55742ab 100644 --- a/src/routes/filaments/index.js +++ b/src/routes/filaments/index.js @@ -14,7 +14,7 @@ import { router.get("/", isAuthenticated, (req, res) => { const { page, limit, property } = req.query; - const allowedFilters = ["type", "brand", "diameter", "color"]; + const allowedFilters = ["type", "vendor.name", "diameter", "color"]; const filter = {}; diff --git a/src/routes/filamentstocks/index.js b/src/routes/filamentstocks/index.js new file mode 100644 index 0000000..3215243 --- /dev/null +++ b/src/routes/filamentstocks/index.js @@ -0,0 +1,45 @@ +import express from "express"; +import { isAuthenticated } from "../../keycloak.js"; +import { parseStringIfNumber } from "../../util/index.js"; + +const router = express.Router(); +import { + listFilamentStocksRouteHandler, + getFilamentStockRouteHandler, + editFilamentStockRouteHandler, + newFilamentStockRouteHandler, +} from "../../services/filamentstocks/index.js"; + +// list of filamentStocks +router.get("/", isAuthenticated, (req, res) => { + const { page, limit, property } = req.query; + + const allowedFilters = ["country"]; + + const filter = {}; + + for (const [key, value] of Object.entries(req.query)) { + for (var i = 0; i < allowedFilters.length; i++) { + if (key == allowedFilters[i]) { + filter[key] = parseStringIfNumber(value); + } + } + } + + listFilamentStocksRouteHandler(req, res, page, limit, property, filter); +}); + +router.post("/", isAuthenticated, (req, res) => { + newFilamentStockRouteHandler(req, res); +}); + +router.get("/:id", isAuthenticated, (req, res) => { + getFilamentStockRouteHandler(req, res); +}); + +// update printer info +router.put("/:id", isAuthenticated, async (req, res) => { + editFilamentStockRouteHandler(req, res); +}); + +export default router; diff --git a/src/routes/gcodefiles/index.js b/src/routes/gcodefiles/index.js index 528d484..3677741 100644 --- a/src/routes/gcodefiles/index.js +++ b/src/routes/gcodefiles/index.js @@ -15,11 +15,11 @@ import { // list of printers router.get("/", isAuthenticated, (req, res) => { - const { page, limit, property, search } = req.query; + const { page, limit, property, search, sort, order } = req.query; const allowedFilters = [ "filament.type", - "filament.brand", + "filament.vendor.name", "filament.diameter", "filament.color", ]; @@ -34,7 +34,7 @@ router.get("/", isAuthenticated, (req, res) => { } } - listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search); + listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search, sort, order); }); // new pritner diff --git a/src/routes/index.js b/src/routes/index.js index 55b2051..05cfed8 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -10,6 +10,8 @@ import partRoutes from "./parts/index.js"; import productRoutes from "./products/index.js"; import vendorRoutes from "./vendors/index.js"; import materialRoutes from "./materials/index.js"; +import partStockRoutes from "./partstocks/index.js"; +import filamentStockRoutes from "./filamentstocks/index.js"; export { userRoutes, @@ -24,4 +26,6 @@ export { productRoutes, vendorRoutes, materialRoutes, + partStockRoutes, + filamentStockRoutes, }; diff --git a/src/routes/parts/index.js b/src/routes/parts/index.js index b692689..0316a0c 100644 --- a/src/routes/parts/index.js +++ b/src/routes/parts/index.js @@ -14,21 +14,21 @@ import { // list of parts router.get("/", isAuthenticated, (req, res) => { - const { page, limit, property } = req.query; + const { page, limit, property, sort, order } = req.query; - const allowedFilters = ["type", "brand", "diameter", "color"]; + const allowedFilters = ["products", "name"]; const filter = {}; for (const [key, value] of Object.entries(req.query)) { for (var i = 0; i < allowedFilters.length; i++) { if (key == allowedFilters[i]) { - filter[key] = parseStringIfNumber(value); + filter[key] = { $regex: parseStringIfNumber(value), $options: 'i' } } } } - listPartsRouteHandler(req, res, page, limit, property, filter); + listPartsRouteHandler(req, res, page, limit, property, filter, "", sort, order); }); router.post("/", isAuthenticated, (req, res) => { diff --git a/src/routes/partstocks/index.js b/src/routes/partstocks/index.js new file mode 100644 index 0000000..c364ba6 --- /dev/null +++ b/src/routes/partstocks/index.js @@ -0,0 +1,45 @@ +import express from "express"; +import { isAuthenticated } from "../../keycloak.js"; +import { parseStringIfNumber } from "../../util/index.js"; + +const router = express.Router(); +import { + listPartStocksRouteHandler, + getPartStockRouteHandler, + editPartStockRouteHandler, + newPartStockRouteHandler, +} from "../../services/partstocks/index.js"; + +// list of partStocks +router.get("/", isAuthenticated, (req, res) => { + const { page, limit, property } = req.query; + + const allowedFilters = ["country"]; + + const filter = {}; + + for (const [key, value] of Object.entries(req.query)) { + for (var i = 0; i < allowedFilters.length; i++) { + if (key == allowedFilters[i]) { + filter[key] = parseStringIfNumber(value); + } + } + } + + listPartStocksRouteHandler(req, res, page, limit, property, filter); +}); + +router.post("/", isAuthenticated, (req, res) => { + newPartStockRouteHandler(req, res); +}); + +router.get("/:id", isAuthenticated, (req, res) => { + getPartStockRouteHandler(req, res); +}); + +// update printer info +router.put("/:id", isAuthenticated, async (req, res) => { + editPartStockRouteHandler(req, res); +}); + +export default router; diff --git a/src/routes/printers/index.js b/src/routes/printers/index.js index 43d9bca..7c3bc8b 100644 --- a/src/routes/printers/index.js +++ b/src/routes/printers/index.js @@ -8,6 +8,7 @@ import { editPrinterRouteHandler, getPrinterRouteHandler, createPrinterRouteHandler, + getPrinterStatsRouteHandler, } from "../../services/printers/index.js"; // list of printers @@ -21,6 +22,11 @@ router.post("/", isAuthenticated, (req, res) => { createPrinterRouteHandler(req, res); }); +// get printer stats +router.get("/stats", isAuthenticated, (req, res) => { + getPrinterStatsRouteHandler(req, res); +}); + router.get("/:id", isAuthenticated, (req, res) => { getPrinterRouteHandler(req, res); }); @@ -30,4 +36,5 @@ router.put("/:id", isAuthenticated, async (req, res) => { editPrinterRouteHandler(req, res); }); + export default router; diff --git a/src/routes/printjobs/index.js b/src/routes/printjobs/index.js index 99320c2..2515125 100644 --- a/src/routes/printjobs/index.js +++ b/src/routes/printjobs/index.js @@ -7,6 +7,7 @@ import { getPrintJobRouteHandler, editPrintJobRouteHandler, createPrintJobRouteHandler, + getPrintJobStatsRouteHandler } from "../../services/printjobs/index.js"; // list of print jobs @@ -15,6 +16,11 @@ router.get("/", isAuthenticated, (req, res) => { listPrintJobsRouteHandler(req, res, page, limit); }); +// get printer stats +router.get("/stats", isAuthenticated, (req, res) => { + getPrintJobStatsRouteHandler(req, res); +}); + // create new print job router.post("/", isAuthenticated, (req, res) => { createPrintJobRouteHandler(req, res); diff --git a/src/routes/vendors/index.js b/src/routes/vendors/index.js index f6ce66f..304aa4b 100644 --- a/src/routes/vendors/index.js +++ b/src/routes/vendors/index.js @@ -14,7 +14,7 @@ import { router.get("/", isAuthenticated, (req, res) => { const { page, limit, property } = req.query; - const allowedFilters = ["type", "brand", "diameter", "color"]; + const allowedFilters = ["country"]; const filter = {}; diff --git a/src/schemas/filament.schema.js b/src/schemas/filament.schema.js index 7da3cd4..f1b9f68 100644 --- a/src/schemas/filament.schema.js +++ b/src/schemas/filament.schema.js @@ -1,4 +1,5 @@ import mongoose from "mongoose"; +const { Schema } = mongoose; const filamentSchema = new mongoose.Schema({ name: { required: true, type: String }, @@ -6,9 +7,9 @@ const filamentSchema = new mongoose.Schema({ url: { required: false, type: String }, image: { required: false, type: Buffer }, color: { required: true, type: String }, - brand: { required: true, type: String }, + vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true }, type: { required: true, type: String }, - price: { required: true, type: Number }, + cost: { required: true, type: Number }, diameter: { required: true, type: Number }, density: { required: true, type: Number }, createdAt: { required: true, type: Date }, diff --git a/src/schemas/filamentstock.schema.js b/src/schemas/filamentstock.schema.js new file mode 100644 index 0000000..6ad2242 --- /dev/null +++ b/src/schemas/filamentstock.schema.js @@ -0,0 +1,39 @@ +import mongoose from "mongoose"; +const { Schema } = mongoose; + +// Define the main filamentStock schema +const filamentStockSchema = new Schema( + { + state: { + type: { type: String, required: true }, + percent: { type: String, required: true }, + }, + startingGrossWeight: { type: Number, required: true }, + startingNetWeight: { type: Number, required: true }, + currentGrossWeight: { type: Number, required: true }, + currentNetWeight: { type: Number, required: true }, + filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" }, + stockEvents: [{ + type: { type: String, required: true }, + value: { type: Number, required: true }, + subJob: { type: mongoose.Schema.Types.ObjectId, ref: "PrintSubJob", required: false }, + job: { type: mongoose.Schema.Types.ObjectId, ref: "PrintJob", required: false }, + timestamp: { type: Date, default: Date.now } + }] + }, + { timestamps: true }, +); + +// Add virtual id getter +filamentStockSchema.virtual("id").get(function () { + return this._id.toHexString(); +}); + +// Configure JSON serialization to include virtuals +filamentStockSchema.set("toJSON", { virtuals: true }); + +// Create and export the model +export const filamentStockModel = mongoose.model( + "FilamentStock", + filamentStockSchema, +); diff --git a/src/schemas/gcodefile.schema.js b/src/schemas/gcodefile.schema.js index 974a1b6..8a96891 100644 --- a/src/schemas/gcodefile.schema.js +++ b/src/schemas/gcodefile.schema.js @@ -8,7 +8,7 @@ const gcodeFileSchema = new mongoose.Schema({ size: { type: Number, required: false }, filament: { type: Schema.Types.ObjectId, ref: "Filament", required: true }, parts: [{ type: Schema.Types.ObjectId, ref: "Part", required: true }], - price: { type: Number, required: false }, + cost: { type: Number, required: false }, createdAt: { type: Date }, updatedAt: { type: Date }, }); diff --git a/src/schemas/material.schema.js b/src/schemas/material.schema.js index 3e8b49b..ff09acc 100644 --- a/src/schemas/material.schema.js +++ b/src/schemas/material.schema.js @@ -4,6 +4,7 @@ const materialSchema = new mongoose.Schema({ name: { required: true, type: String }, url: { required: false, type: String }, image: { required: false, type: Buffer }, + tags: [{ type: String }], }); materialSchema.virtual("id").get(function () { diff --git a/src/schemas/part.schema.js b/src/schemas/part.schema.js index 606b6be..d9cc3df 100644 --- a/src/schemas/part.schema.js +++ b/src/schemas/part.schema.js @@ -5,7 +5,12 @@ const { Schema } = mongoose; const partSchema = new Schema( { name: { type: String, required: true }, - products: [{ type: mongoose.Schema.Types.ObjectId, ref: "Product" }], + fileName: { type: String, required: false }, + product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" }, + useGlobalPricing: { type: Boolean, default: true }, + marginOrPrice: { type: Boolean, default: false }, + margin: { type: Number, required: false }, + price: { type: Number, required: false }, }, { timestamps: true }, ); diff --git a/src/schemas/partstock.schema.js b/src/schemas/partstock.schema.js new file mode 100644 index 0000000..3c40b24 --- /dev/null +++ b/src/schemas/partstock.schema.js @@ -0,0 +1,23 @@ +import mongoose from "mongoose"; +const { Schema } = mongoose; + +// Define the main partStock schema +const partStockSchema = new Schema( + { + name: { type: String, required: true }, + fileName: { type: String, required: false }, + product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" }, + }, + { timestamps: true }, +); + +// Add virtual id getter +partStockSchema.virtual("id").get(function () { + return this._id.toHexString(); +}); + +// Configure JSON serialization to include virtuals +partStockSchema.set("toJSON", { virtuals: true }); + +// Create and export the model +export const partStockModel = mongoose.model("PartStock", partStockSchema); diff --git a/src/schemas/printer.schema.js b/src/schemas/printer.schema.js index b321bd8..8139a28 100644 --- a/src/schemas/printer.schema.js +++ b/src/schemas/printer.schema.js @@ -12,10 +12,19 @@ const moonrakerSchema = new Schema( { _id: false }, ); +// Define the alert schema +const alertSchema = new Schema( + { + priority: { type: String, required: true }, // order to show + type: { type: String, required: true }, // selectFilament, error, info, message, + }, + { timestamps: true, _id: false } +); + // Define the main printer schema const printerSchema = new Schema( { - printerName: { type: String, required: true }, + name: { type: String, required: true }, online: { type: Boolean, required: true, default: false }, state: { type: { type: String, required: true, default: "Offline" }, @@ -32,7 +41,10 @@ const printerSchema = new Schema( firmware: { type: String }, currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" }, currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" }, + currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" }, subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }], + vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null }, + alerts: [alertSchema], }, { timestamps: true }, ); diff --git a/src/schemas/printjob.schema.js b/src/schemas/printjob.schema.js index 378a7d8..2b94b1d 100644 --- a/src/schemas/printjob.schema.js +++ b/src/schemas/printjob.schema.js @@ -8,7 +8,7 @@ const printJobSchema = new mongoose.Schema({ printers: [{ type: Schema.Types.ObjectId, ref: "Printer", required: false }], createdAt: { required: true, type: Date }, updatedAt: { required: true, type: Date }, - startedAt: { required: true, type: Date }, + startedAt: { required: false, type: Date }, gcodeFile: { type: Schema.Types.ObjectId, ref: "GCodeFile", diff --git a/src/schemas/product.schema.js b/src/schemas/product.schema.js index 3e189c9..7e1cbe6 100644 --- a/src/schemas/product.schema.js +++ b/src/schemas/product.schema.js @@ -7,6 +7,10 @@ const productSchema = new Schema( name: { type: String, required: true }, tags: [{ type: String }], version: { type: String }, + marginOrPrice: { type: Boolean, default: false }, + margin: { type: Number, required: false }, + price: { type: Number, required: false }, + vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true }, parts: [{ type: mongoose.Schema.Types.ObjectId, ref: "Part" }], }, { timestamps: true }, @@ -21,4 +25,4 @@ productSchema.virtual("id").get(function () { productSchema.set("toJSON", { virtuals: true }); // Create and export the model -export const productModel = mongoose.model("product", productSchema); +export const productModel = mongoose.model("Product", productSchema); diff --git a/src/schemas/vendor.schema.js b/src/schemas/vendor.schema.js index 9e9d03a..315f9d1 100644 --- a/src/schemas/vendor.schema.js +++ b/src/schemas/vendor.schema.js @@ -4,8 +4,10 @@ const vendorSchema = new mongoose.Schema( { name: { required: true, type: String }, website: { required: false, type: String }, + email: { required: false, type: String }, + phone: { required: false, type: String }, contact: { required: false, type: String }, - image: { required: false, type: Buffer }, + country: { required: false, type: String }, }, { timestamps: true }, ); diff --git a/src/services/filaments/index.js b/src/services/filaments/index.js index 1346139..117a8d1 100644 --- a/src/services/filaments/index.js +++ b/src/services/filaments/index.js @@ -1,5 +1,5 @@ import dotenv from "dotenv"; -import { filamentModel } from "../../schemas/filament.schema.js" +import { filamentModel } from "../../schemas/filament.schema.js"; import jwt from "jsonwebtoken"; import log4js from "log4js"; import mongoose from "mongoose"; @@ -9,34 +9,55 @@ dotenv.config(); const logger = log4js.getLogger("Filaments"); logger.level = process.env.LOG_LEVEL; -export const listFilamentsRouteHandler = async (req, res, page = 1, limit = 25, property = "", filter = {}) => { +export const listFilamentsRouteHandler = async ( + req, + res, + page = 1, + limit = 25, + property = "", + filter = {}, +) => { try { // Calculate the skip value based on the page number and limit const skip = (page - 1) * limit; - let filament; let aggregateCommand = []; - - if (filter != {}) { // use filtering if present + + aggregateCommand.push({ + $lookup: { + from: "vendors", // The collection name (usually lowercase plural) + localField: "vendor", // The field in your current model + foreignField: "_id", // The field in the products collection + as: "vendor", // The output field name + }, + }); + + aggregateCommand.push({ $unwind: "$vendor" }); + + if (filter != {}) { + // use filtering if present aggregateCommand.push({ $match: filter }); } - + if (property != "") { - aggregateCommand.push({ $group: { _id: `$${property}` } }) // group all same properties - aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" }}); // rename _id to the property name + aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties + aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name } else { - aggregateCommand.push({ $project: { image: 0, url: 0 }}); + aggregateCommand.push({ $project: { image: 0, url: 0 } }); } - + aggregateCommand.push({ $skip: skip }); aggregateCommand.push({ $limit: Number(limit) }); - - console.log(aggregateCommand) - - filament = await filamentModel.aggregate(aggregateCommand) - - logger.trace(`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`, filament); + + console.log(aggregateCommand); + + filament = await filamentModel.aggregate(aggregateCommand); + + logger.trace( + `List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`, + filament, + ); res.send(filament); } catch (error) { logger.error("Error listing filaments:", error); @@ -49,10 +70,12 @@ export const getFilamentRouteHandler = async (req, res) => { // Get ID from params const id = new mongoose.Types.ObjectId(req.params.id); // Fetch the filament with the given remote address - const filament = await filamentModel.findOne({ - _id: id - }); - + const filament = await filamentModel + .findOne({ + _id: id, + }) + .populate("vendor"); + if (!filament) { logger.warn(`Filament not found with supplied id.`); return res.status(404).send({ error: "Print job not found." }); @@ -73,49 +96,77 @@ export const editFilamentRouteHandler = async (req, res) => { // Fetch the filament with the given remote address const filament = await filamentModel.findOne({ _id: id }); - if (!filament) { // Error handling + if (!filament) { + // Error handling logger.warn(`Filament not found with supplied id.`); return res.status(404).send({ error: "Print job not found." }); } logger.trace(`Filament with ID: ${id}:`, filament); - + try { - const { createdAt, updatedAt, started_at, status, ...updateData } = req.body; - + const updateData = { + updatedAt: new Date(), + name: req.body.name, + barcode: req.body.barcode, + url: req.body.url, + image: req.body.image, + color: req.body.color, + vendor: req.body.vendor.id, + type: req.body.type, + price: req.body.price, + diameter: req.body.diameter, + density: req.body.density, + emptySpoolWeight: req.body.emptySpoolWeight, + }; + const result = await filamentModel.updateOne( { _id: id }, - { $set: updateData } + { $set: updateData }, ); if (result.nModified === 0) { logger.error("No Filament updated."); - res.status(500).send({ error: "No filaments updated." }); + return res.status(500).send({ error: "No filaments updated." }); } } catch (updateError) { logger.error("Error updating filament:", updateError); - res.status(500).send({ error: updateError.message }); + return res.status(500).send({ error: updateError.message }); } - res.send("OK"); + return res.send("OK"); } catch (fetchError) { logger.error("Error fetching filament:", fetchError); - res.status(500).send({ error: fetchError.message }); + return res.status(500).send({ error: fetchError.message }); } }; export const newFilamentRouteHandler = async (req, res) => { + try { + const newFilament = { + createdAt: new Date(), + updatedAt: new Date(), + name: req.body.name, + barcode: req.body.barcode, + url: req.body.url, + image: req.body.image, + color: req.body.color, + vendor: req.body.vendor._id, + type: req.body.type, + cost: req.body.cost, + diameter: req.body.diameter, + density: req.body.density, + emptySpoolWeight: req.body.emptySpoolWeight, + }; - try { - let { ...newFilament } = req.body; - newFilament = { ...newFilament, createdAt: new Date(), updatedAt: new Date() } - - const result = await filamentModel.create(newFilament); - if (result.nCreated === 0) { - logger.error("No filament created."); - res.status(500).send({ error: "No filament created." }); - } - res.status(200).send({ status: "ok" }); - } catch (updateError) { - logger.error("Error updating filament:", updateError); - res.status(500).send({ error: updateError.message }); + const result = await filamentModel.create(newFilament); + + if (result.nCreated === 0) { + logger.error("No filament created."); + res.status(500).send({ error: "No filament created." }); } -}; \ No newline at end of file + + res.status(200).send({ status: "ok" }); + } catch (updateError) { + logger.error("Error updating filament:", updateError); + res.status(500).send({ error: updateError.message }); + } +}; diff --git a/src/services/filamentstocks/index.js b/src/services/filamentstocks/index.js new file mode 100644 index 0000000..4f874cf --- /dev/null +++ b/src/services/filamentstocks/index.js @@ -0,0 +1,197 @@ +import dotenv from "dotenv"; +import { filamentStockModel } from "../../schemas/filamentstock.schema.js"; +import { filamentModel } from "../../schemas/filament.schema.js"; +import jwt from "jsonwebtoken"; +import log4js from "log4js"; +import mongoose from "mongoose"; + +dotenv.config(); + +const logger = log4js.getLogger("Filament Stocks"); +logger.level = process.env.LOG_LEVEL; + +export const listFilamentStocksRouteHandler = async ( + req, + res, + page = 1, + limit = 25, + property = "", + filter = {}, +) => { + try { + // Calculate the skip value based on the page number and limit + const skip = (page - 1) * limit; + + let filamentStock; + let aggregateCommand = []; + + aggregateCommand.push({ + $lookup: { + from: "filaments", // The collection name (usually lowercase plural) + localField: "filament", // The field in your current model + foreignField: "_id", // The field in the products collection + as: "filament", // The output field name + }, + }); + + aggregateCommand.push({ $unwind: "$filament" }); + + if (filter != {}) { + // use filtering if present + aggregateCommand.push({ $match: filter }); + } + + if (property != "") { + aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties + aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name + } else { + aggregateCommand.push({ $project: { image: 0, url: 0 } }); + } + + aggregateCommand.push({ $skip: skip }); + aggregateCommand.push({ $limit: Number(limit) }); + + console.log(aggregateCommand); + + filamentStock = await filamentStockModel.aggregate(aggregateCommand); + + logger.trace( + `List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`, + filamentStock, + ); + res.send(filamentStock); + } catch (error) { + logger.error("Error listing filament stocks:", error); + res.status(500).send({ error: error }); + } +}; + +export const getFilamentStockRouteHandler = async (req, res) => { + try { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.params.id); + // Fetch the filamentStock with the given remote address + const filamentStock = await filamentStockModel + .findOne({ + _id: id, + }) + .populate("filament").populate({ + path: 'stockEvents', + populate: [ + { + path: 'subJob', + select: 'number' + }, + { + path: 'job', + select: 'startedAt' + } + ] + }); + + if (!filamentStock) { + logger.warn(`Filament stock not found with supplied id.`); + return res.status(404).send({ error: "Print job not found." }); + } + + logger.trace(`Filament stock with ID: ${id}:`, filamentStock); + res.send(filamentStock); + } catch (error) { + logger.error("Error fetching filament stock:", error); + res.status(500).send({ error: error.message }); + } +}; + +export const editFilamentStockRouteHandler = async (req, res) => { + try { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.params.id); + // Fetch the filamentStock with the given remote address + const filamentStock = await filamentStockModel.findOne({ _id: id }); + + if (!filamentStock) { + // Error handling + logger.warn(`Filament stock not found with supplied id.`); + return res.status(404).send({ error: "Filament stock not found." }); + } + + logger.trace(`Filament stock with ID: ${id}:`, filamentStock); + + try { + const updateData = { + updatedAt: new Date(), + contact: req.body.contact, + country: req.body.country, + name: req.body.name, + website: req.body.website, + phone: req.body.phone, + email: req.body.email, + }; + + const result = await filamentStockModel.updateOne( + { _id: id }, + { $set: updateData }, + ); + if (result.nModified === 0) { + logger.error("No filament stock updated."); + res.status(500).send({ error: "No filament stocks updated." }); + } + } catch (updateError) { + logger.error("Error updating filament stock:", updateError); + res.status(500).send({ error: updateError.message }); + } + res.send("OK"); + } catch (fetchError) { + logger.error("Error fetching filament stock:", fetchError); + res.status(500).send({ error: fetchError.message }); + } +}; + +export const newFilamentStockRouteHandler = async (req, res) => { + var filament = null; + + try { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.body.filament._id); + // Fetch the filament with the given remote address + filament = await filamentModel.findOne({ + _id: id, + }); + + if (!filament) { + logger.warn(`Filament not found with supplied id.`); + return res.status(404).send({ error: "Filament not found." }); + } + + logger.trace(`Filament with ID: ${id}:`, filament); + } catch (error) { + logger.error("Error fetching filament:", error); + return res.status(500).send({ error: error.message }); + } + + try { + logger.warn(req.body); + const startingGrossWeight = req.body.startingGrossWeight; + const newFilamentStock = { + startingGrossWeight: startingGrossWeight, + startingNetWeight: startingGrossWeight - filament.emptySpoolWeight, + currentGrossWeight: startingGrossWeight, + currentNetWeight: startingGrossWeight - filament.emptySpoolWeight, + filament: req.body.filament._id, + state: { + type: "unconsumed", + percent: 0, + }, + }; + + const result = await filamentStockModel.create(newFilamentStock); + if (result.nCreated === 0) { + logger.error("No filament stock created."); + return res.status(500).send({ error: "No filament stock created." }); + } + return res.send({ status: "ok" }); + } catch (updateError) { + logger.error("Error adding filament stock:", updateError); + return res.status(500).send({ error: updateError.message }); + } +}; diff --git a/src/services/gcodefiles/index.js b/src/services/gcodefiles/index.js index 48b22ba..c4f3b82 100644 --- a/src/services/gcodefiles/index.js +++ b/src/services/gcodefiles/index.js @@ -1,5 +1,6 @@ import dotenv from "dotenv"; import { gcodeFileModel } from "../../schemas/gcodefile.schema.js"; +import { filamentModel } from "../../schemas/filament.schema.js"; import jwt from "jsonwebtoken"; import log4js from "log4js"; import multer from "multer"; @@ -62,6 +63,8 @@ export const listGCodeFilesRouteHandler = async ( property = "", filter = {}, search = "", + sort = "", + order = "ascend" ) => { try { // Calculate the skip value based on the page number and limit @@ -103,6 +106,17 @@ export const listGCodeFilesRouteHandler = async ( }, }); + aggregateCommand.push({ + $lookup: { + from: "vendors", // The collection name (usually lowercase plural) + localField: "filament.vendor", // The field in your current model + foreignField: "_id", // The field in the products collection + as: "filament.vendor", // The output field name + }, + }); + + aggregateCommand.push({ $unwind: "$filament.vendor" }); + if (filter != {}) { // use filtering if present aggregateCommand.push({ $match: filter }); @@ -123,6 +137,12 @@ export const listGCodeFilesRouteHandler = async ( }); } + // Add sorting if sort parameter is provided + if (sort) { + const sortOrder = order === "descend" ? -1 : 1; + aggregateCommand.push({ $sort: { [sort]: sortOrder } }); + } + aggregateCommand.push({ $skip: skip }); aggregateCommand.push({ $limit: Number(limit) }); @@ -131,7 +151,7 @@ export const listGCodeFilesRouteHandler = async ( gcodeFile = await gcodeFileModel.aggregate(aggregateCommand); logger.trace( - `List of gcode files (Page ${page}, Limit ${limit}, Property ${property}):`, + `List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`, gcodeFile, ); res.send(gcodeFile); @@ -199,10 +219,12 @@ export const editGCodeFileRouteHandler = async (req, res) => { logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile); try { - const { createdAt, updatedAt, started_at, status, ...updateData } = - req.body; + const updateData = { + updatedAt: new Date(), + name: req.body.name, + filament: req.body?.filament?._id, + }; - console.log("Update data", updateData); const result = await gcodeFileModel.updateOne( { _id: id }, { $set: updateData }, @@ -223,22 +245,44 @@ export const editGCodeFileRouteHandler = async (req, res) => { }; export const newGCodeFileRouteHandler = async (req, res) => { + var filament = null; + try { - let { ...newGCodeFile } = req.body; - newGCodeFile = { - ...newGCodeFile, + // Get ID from params + const id = new mongoose.Types.ObjectId(req.body.filament._id); + // Fetch the filament with the given remote address + filament = await filamentModel.findOne({ + _id: id, + }); + + if (!filament) { + logger.warn(`Filament not found with supplied id.`); + return res.status(404).send({ error: "Filament not found." }); + } + logger.trace(`Filament with ID: ${id}:`, filament); + } catch (error) { + logger.error("Error fetching filament:", error); + return res.status(500).send({ error: error.message }); + } + + try { + const newGCodeFile = { createdAt: new Date(), updatedAt: new Date(), + gcodeFileInfo: req.body.gcodeFileInfo, + filament: req.body.filament._id, + name: req.body.name, + cost: (filament.cost / 1000) * req.body.gcodeFileInfo.filamentUsedG, }; const result = await gcodeFileModel.create(newGCodeFile); if (result.nCreated === 0) { logger.error("No gcode file created."); - res.status(500).send({ error: "No filament created." }); + res.status(500).send({ error: "No gcode file created." }); } res.status(200).send(result); } catch (updateError) { - logger.error("Error updating filament:", updateError); + logger.error("Error creating gcode file:", updateError); res.status(500).send({ error: updateError.message }); } }; diff --git a/src/services/parts/index.js b/src/services/parts/index.js index 4e0eb64..b5dcb80 100644 --- a/src/services/parts/index.js +++ b/src/services/parts/index.js @@ -56,6 +56,9 @@ export const listPartsRouteHandler = async ( limit = 25, property = "", filter = {}, + search = "", + sort = "", + order = "ascend" ) => { try { // Calculate the skip value based on the page number and limit @@ -70,10 +73,35 @@ export const listPartsRouteHandler = async ( } if (property != "") { + logger.error(property); aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name } else { - aggregateCommand.push({ $project: { image: 0, url: 0 } }); + aggregateCommand.push({ + $lookup: { + from: "products", // The collection name (usually lowercase plural) + localField: "product", // The field in your current model + foreignField: "_id", // The field in the products collection + as: "product", // The output field name + }, + }); + aggregateCommand.push({ $unwind: "$product" }); + aggregateCommand.push({ + $project: { + name: 1, + _id: 1, + createdAt: 1, + updatedAt: 1, + "product._id": 1, + "product.name": 1, + }, + }); + } + + // Add sorting if sort parameter is provided + if (sort) { + const sortOrder = order === "descend" ? -1 : 1; + aggregateCommand.push({ $sort: { [sort]: sortOrder } }); } aggregateCommand.push({ $skip: skip }); @@ -84,7 +112,7 @@ export const listPartsRouteHandler = async ( part = await partModel.aggregate(aggregateCommand); logger.trace( - `List of parts (Page ${page}, Limit ${limit}, Property ${property}):`, + `List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`, part, ); res.send(part); @@ -99,9 +127,11 @@ export const getPartRouteHandler = async (req, res) => { // Get ID from params const id = new mongoose.Types.ObjectId(req.params.id); // Fetch the part with the given remote address - const part = await partModel.findOne({ - _id: id, - }); + const part = await partModel + .findOne({ + _id: id, + }) + .populate("product"); if (!part) { logger.warn(`Part not found with supplied id.`); @@ -156,18 +186,37 @@ export const editPartRouteHandler = async (req, res) => { export const newPartRouteHandler = async (req, res) => { try { - let { ...newPart } = req.body; - newPart = { ...newPart, createdAt: new Date(), updatedAt: new Date() }; + if (Array.isArray(req.body)) { + // Handle array of parts + const partsToCreate = req.body.map((part) => ({ + createdAt: new Date(), + updatedAt: new Date(), + name: part.name, + products: part?.products, + fileName: part?.fileName, + })); - const result = await partModel.create(newPart); - if (result.nCreated === 0) { - logger.error("No part created."); - res.status(500).send({ error: "No part created." }); + const results = await partModel.insertMany(partsToCreate); + if (!results.length) { + logger.error("No parts created."); + return res.status(500).send({ error: "No parts created." }); + } + return res.status(200).send(results); + } else { + // Handle single part + const newPart = { + createdAt: new Date(), + updatedAt: new Date(), + name: req.body.name, + products: req.body?.products, + fileName: req.body?.fileName, + }; + const result = await partModel.create(newPart); + return res.status(200).send(result); } - res.status(200).send(result); - } catch (updateError) { - logger.error("Error updating part:", updateError); - res.status(500).send({ error: updateError.message }); + } catch (error) { + logger.error("Error creating part(s):", error); + return res.status(500).send({ error: error.message }); } }; diff --git a/src/services/partstocks/index.js b/src/services/partstocks/index.js new file mode 100644 index 0000000..0a4a422 --- /dev/null +++ b/src/services/partstocks/index.js @@ -0,0 +1,143 @@ +import dotenv from "dotenv"; +import { partStockModel } from "../../schemas/partstock.schema.js"; +import jwt from "jsonwebtoken"; +import log4js from "log4js"; +import mongoose from "mongoose"; + +dotenv.config(); + +const logger = log4js.getLogger("PartStocks"); +logger.level = process.env.LOG_LEVEL; + +export const listPartStocksRouteHandler = async ( + req, + res, + page = 1, + limit = 25, + property = "", + filter = {}, +) => { + try { + // Calculate the skip value based on the page number and limit + const skip = (page - 1) * limit; + + let partStock; + let aggregateCommand = []; + + if (filter != {}) { + // use filtering if present + aggregateCommand.push({ $match: filter }); + } + + if (property != "") { + aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties + aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name + } else { + aggregateCommand.push({ $project: { image: 0, url: 0 } }); + } + + aggregateCommand.push({ $skip: skip }); + aggregateCommand.push({ $limit: Number(limit) }); + + console.log(aggregateCommand); + + partStock = await partStockModel.aggregate(aggregateCommand); + + logger.trace( + `List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`, + partStock, + ); + res.send(partStock); + } catch (error) { + logger.error("Error listing partStocks:", error); + res.status(500).send({ error: error }); + } +}; + +export const getPartStockRouteHandler = async (req, res) => { + try { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.params.id); + // Fetch the partStock with the given remote address + const partStock = await partStockModel.findOne({ + _id: id, + }); + + if (!partStock) { + logger.warn(`PartStock not found with supplied id.`); + return res.status(404).send({ error: "Print job not found." }); + } + + logger.trace(`PartStock with ID: ${id}:`, partStock); + res.send(partStock); + } catch (error) { + logger.error("Error fetching PartStock:", error); + res.status(500).send({ error: error.message }); + } +}; + +export const editPartStockRouteHandler = async (req, res) => { + try { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.params.id); + // Fetch the partStock with the given remote address + const partStock = await partStockModel.findOne({ _id: id }); + + if (!partStock) { + // Error handling + logger.warn(`PartStock not found with supplied id.`); + return res.status(404).send({ error: "Print job not found." }); + } + + logger.trace(`PartStock with ID: ${id}:`, partStock); + + try { + const updateData = { + updatedAt: new Date(), + contact: req.body.contact, + country: req.body.country, + name: req.body.name, + website: req.body.website, + phone: req.body.phone, + email: req.body.email, + }; + + const result = await partStockModel.updateOne( + { _id: id }, + { $set: updateData }, + ); + if (result.nModified === 0) { + logger.error("No PartStock updated."); + res.status(500).send({ error: "No partStocks updated." }); + } + } catch (updateError) { + logger.error("Error updating partStock:", updateError); + res.status(500).send({ error: updateError.message }); + } + res.send("OK"); + } catch (fetchError) { + logger.error("Error fetching partStock:", fetchError); + res.status(500).send({ error: fetchError.message }); + } +}; + +export const newPartStockRouteHandler = async (req, res) => { + try { + let { ...newPartStock } = req.body; + newPartStock = { + ...newPartStock, + createdAt: new Date(), + updatedAt: new Date(), + }; + + const result = await partStockModel.create(newPartStock); + if (result.nCreated === 0) { + logger.error("No partStock created."); + res.status(500).send({ error: "No partStock created." }); + } + res.status(200).send({ status: "ok" }); + } catch (updateError) { + logger.error("Error updating partStock:", updateError); + res.status(500).send({ error: updateError.message }); + } +}; diff --git a/src/services/printers/index.js b/src/services/printers/index.js index 5ef7825..dc4790f 100644 --- a/src/services/printers/index.js +++ b/src/services/printers/index.js @@ -33,22 +33,28 @@ export const getPrinterRouteHandler = async (req, res) => { try { // Fetch the printer with the given remote address - const printer = await printerModel.findOne({ _id: id }) - .populate('subJobs') - .populate('currentJob') + const printer = await printerModel + .findOne({ _id: id }) + .populate("subJobs") + .populate("currentJob") .populate({ - path: 'currentJob', + path: "currentJob", populate: { - path: 'gcodeFile' - } + path: "gcodeFile", + }, }) - .populate('currentSubJob') + .populate("currentSubJob") .populate({ - path: 'subJobs', + path: "subJobs", populate: { - path: 'printJob' - } - }); + path: "printJob", + }, + }) + .populate("vendor") + .populate({ path: "currentFilamentStock", + populate: { + path: "filament", + },}) if (!printer) { logger.warn(`Printer with id ${id} not found.`); @@ -66,11 +72,18 @@ export const getPrinterRouteHandler = async (req, res) => { export const editPrinterRouteHandler = async (req, res) => { const id = req.params.id; try { - try { + const updateData = { + updatedAt: new Date(), + moonraker: req.body.moonraker, + tags: req.body.tags, + name: req.body.name, + vendor: req.body.vendor.id, + }; + const result = await printerModel.updateOne( { _id: id }, - { $set: req.body }, + { $set: updateData }, ); if (result.nModified === 0) { logger.error("No printers updated."); @@ -89,48 +102,72 @@ export const editPrinterRouteHandler = async (req, res) => { export const createPrinterRouteHandler = async (req, res) => { try { - const { - printerName, - moonraker, - tags = [], - firmware = "n/a", - } = req.body; + const { name, moonraker, tags = [], firmware = "n/a" } = req.body; // Validate required fields - if (!printerName || !moonraker) { + if (!name || !moonraker) { logger.warn("Missing required fields in printer creation request"); - return res.status(400).send({ - error: "Missing required fields. printerName and moonraker configuration are required." + return res.status(400).send({ + error: + "Missing required fields. name and moonraker configuration are required.", }); } // Validate moonraker configuration if (!moonraker.host || !moonraker.port || !moonraker.protocol) { - logger.warn("Invalid moonraker configuration in printer creation request"); - return res.status(400).send({ - error: "Invalid moonraker configuration. host, port, protocol are required." + logger.warn( + "Invalid moonraker configuration in printer creation request", + ); + return res.status(400).send({ + error: + "Invalid moonraker configuration. host, port, protocol are required.", }); } // Create new printer instance const newPrinter = new printerModel({ - printerName, + name, moonraker, tags, firmware, online: false, state: { - type: "offline" - } + type: "offline", + }, }); // Save the printer const savedPrinter = await newPrinter.save(); - - logger.info(`Created new printer: ${printerName}`); + + logger.info(`Created new printer: ${name}`); res.status(201).send(savedPrinter); } catch (error) { logger.error("Error creating printer:", error); res.status(500).send({ error: error.message }); } }; + +export const getPrinterStatsRouteHandler = async (req, res) => { + try { + const stats = await printerModel.aggregate([ + { + $group: { + _id: "$state.type", + count: { $sum: 1 } + } + } + ]); + + // Transform the results into a more readable format + const formattedStats = stats.reduce((acc, curr) => { + acc[curr._id] = curr.count; + return acc; + }, {}); + + logger.trace("Printer stats by state:", formattedStats); + res.send(formattedStats); + } catch (error) { + logger.error("Error fetching printer stats:", error); + res.status(500).send({ error: error.message }); + } +}; diff --git a/src/services/printjobs/index.js b/src/services/printjobs/index.js index 29b3171..a01f224 100644 --- a/src/services/printjobs/index.js +++ b/src/services/printjobs/index.js @@ -46,7 +46,7 @@ export const getPrintJobRouteHandler = async (req, res) => { .findOne({ _id: id, }) - .populate("printers", "printerName state") + .populate("printers", "name state") .populate("gcodeFile") .populate("subJobs"); @@ -120,7 +120,7 @@ export const createPrintJobRouteHandler = async (req, res) => { subJobs: [], // Initialize empty array for subjob references createdAt: new Date(), updatedAt: new Date(), - startedAt: new Date(), + startedAt: null }); // Save the print job first to get its ID @@ -156,3 +156,29 @@ export const createPrintJobRouteHandler = async (req, res) => { res.status(500).send({ error: error.message }); } }; + +export const getPrintJobStatsRouteHandler = async (req, res) => { + try { + const stats = await printJobModel.aggregate([ + { + $group: { + _id: "$state.type", + count: { $sum: 1 } + } + } + ]); + + // Transform the results into a more readable format + const formattedStats = stats.reduce((acc, curr) => { + acc[curr._id] = curr.count; + return acc; + }, {}); + + logger.trace("Print job stats by state:", formattedStats); + res.send(formattedStats); + } catch (error) { + logger.error("Error fetching print job stats:", error); + res.status(500).send({ error: error.message }); + } +}; + diff --git a/src/services/products/index.js b/src/services/products/index.js index fd45cde..17d4e7e 100644 --- a/src/services/products/index.js +++ b/src/services/products/index.js @@ -1,5 +1,6 @@ import dotenv from "dotenv"; import { productModel } from "../../schemas/product.schema.js"; +import { partModel } from "../../schemas/part.schema.js"; import log4js from "log4js"; import mongoose from "mongoose"; @@ -29,8 +30,19 @@ export const listProductsRouteHandler = async ( } if (property != "") { - aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties - aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name + // Match documents where the specified property is either null, undefined, empty string, empty array or empty object + aggregateCommand.push({ + $match: { + $or: [ + { [property]: null }, + { [property]: "" }, + { [property]: [] }, + { [property]: {} }, + { [property]: { $exists: false } }, + ], + }, + }); + aggregateCommand.push({ $project: { _id: 1, [property]: 1 } }); } else { aggregateCommand.push({ $project: { image: 0, url: 0 } }); } @@ -58,9 +70,12 @@ export const getProductRouteHandler = async (req, res) => { // Get ID from params const id = new mongoose.Types.ObjectId(req.params.id); // Fetch the product with the given remote address - const product = await productModel.findOne({ - _id: id, - }); + const product = await productModel + .findOne({ + _id: id, + }) + .populate("vendor") + .populate("parts"); if (!product) { logger.warn(`Product not found with supplied id.`); @@ -76,9 +91,10 @@ export const getProductRouteHandler = async (req, res) => { }; export const editProductRouteHandler = async (req, res) => { + // Get ID from params + const id = new mongoose.Types.ObjectId(req.params.id); + try { - // Get ID from params - const id = new mongoose.Types.ObjectId(req.params.id); // Fetch the product with the given remote address const product = await productModel.findOne({ _id: id }); @@ -89,45 +105,93 @@ export const editProductRouteHandler = async (req, res) => { } logger.trace(`Product with ID: ${id}:`, product); - - try { - const { createdAt, updatedAt, started_at, status, ...updateData } = - req.body; - - const result = await productModel.updateOne( - { _id: id }, - { $set: updateData }, - ); - if (result.nModified === 0) { - logger.error("No Product updated."); - res.status(500).send({ error: "No products updated." }); - } - } catch (updateError) { - logger.error("Error updating product:", updateError); - res.status(500).send({ error: updateError.message }); - } - res.send("OK"); } catch (fetchError) { logger.error("Error fetching product:", fetchError); res.status(500).send({ error: fetchError.message }); } + + try { + const updateData = { + updatedAt: new Date(), + name: req.body?.name, + vendor: req.body?.vendor?.id, + tags: req.body?.tags, + version: req.body?.version, + parts: req.body?.parts, + margin: req.body.margin, + price: req.body.price, + marginOrPrice: req.body.marginOrPrice, + }; + + console.log("ID:", id); + + const result = await productModel.updateOne( + { _id: id }, + { $set: updateData }, + ); + if (result.nModified === 0) { + logger.error("No Product updated."); + res.status(500).send({ error: "No products updated." }); + } + } catch (updateError) { + logger.error("Error updating product:", updateError); + res.status(500).send({ error: updateError.message }); + } + res.send("OK"); }; export const newProductRouteHandler = async (req, res) => { try { - let { ...newProduct } = req.body; - newProduct = { - ...newProduct, + const newProduct = { createdAt: new Date(), updatedAt: new Date(), + name: req.body.name, + vendor: req.body.vendor.id, + parts: partIds, + margin: req.body.margin, + price: req.body.price, + marginOrPrice: req.body.marginOrPrice, }; - const result = await productModel.create(newProduct); - if (result.nCreated === 0) { + const newProductResult = await productModel.create(newProduct); + + if (newProductResult.nCreated === 0) { logger.error("No product created."); res.status(500).send({ error: "No product created." }); } - res.status(200).send(result); + + const parts = req.body.parts || []; + const productId = newProductResult._id; + + var partIds = []; + + for (const part of parts) { + const newPart = { + createdAt: new Date(), + updatedAt: new Date(), + name: part.name, + product: productId, + }; + + const newPartResult = await partModel.create(newPart); + if (newPartResult.nCreated === 0) { + logger.error("No parts created."); + res.status(500).send({ error: "No parts created." }); + } + partIds.push(newPartResult._id); + } + + const editProductResult = await productModel.updateOne( + { _id: productId }, + { $set: { parts: partIds } }, + ); + + if (editProductResult.nModified === 0) { + logger.error("No product updated."); + res.status(500).send({ error: "No products updated." }); + } + + res.status(200).send({ ...newProductResult, parts: partIds }); } catch (updateError) { logger.error("Error updating product:", updateError); res.status(500).send({ error: updateError.message }); diff --git a/src/services/spotlight/index.js b/src/services/spotlight/index.js index 0f3e920..1af8131 100644 --- a/src/services/spotlight/index.js +++ b/src/services/spotlight/index.js @@ -14,7 +14,7 @@ logger.level = process.env.LOG_LEVEL; const formatPrintersResponse = (printers) => { return printers.map((printer) => ({ id: printer.id, - name: printer.printerName, + name: printer.name, link: `/production/printers/info?printerId=${printer.id}`, printer: printer, })); diff --git a/src/services/vendors/index.js b/src/services/vendors/index.js index f9bb36a..d9ee5e6 100644 --- a/src/services/vendors/index.js +++ b/src/services/vendors/index.js @@ -92,8 +92,15 @@ export const editVendorRouteHandler = async (req, res) => { logger.trace(`Vendor with ID: ${id}:`, vendor); try { - const { createdAt, updatedAt, started_at, status, ...updateData } = - req.body; + const updateData = { + updatedAt: new Date(), + contact: req.body.contact, + country: req.body.country, + name: req.body.name, + website: req.body.website, + phone: req.body.phone, + email: req.body.email, + }; const result = await vendorModel.updateOne( { _id: id },