Added filament stock and added some filtering too.
This commit is contained in:
parent
9fdb546af9
commit
5a5701088a
6
.gitignore
vendored
6
.gitignore
vendored
@ -136,5 +136,7 @@ dist
|
|||||||
|
|
||||||
.nova
|
.nova
|
||||||
|
|
||||||
./gocdefile
|
gocdefile/*
|
||||||
./gcodefiles
|
gcodefile
|
||||||
|
gcodefiles/*
|
||||||
|
gcodefiles
|
||||||
@ -16,6 +16,8 @@ import {
|
|||||||
productRoutes,
|
productRoutes,
|
||||||
vendorRoutes,
|
vendorRoutes,
|
||||||
materialRoutes,
|
materialRoutes,
|
||||||
|
partStockRoutes,
|
||||||
|
filamentStockRoutes,
|
||||||
} from "./routes/index.js";
|
} from "./routes/index.js";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
@ -71,6 +73,8 @@ app.use("/parts", partRoutes);
|
|||||||
app.use("/products", productRoutes);
|
app.use("/products", productRoutes);
|
||||||
app.use("/vendors", vendorRoutes);
|
app.use("/vendors", vendorRoutes);
|
||||||
app.use("/materials", materialRoutes);
|
app.use("/materials", materialRoutes);
|
||||||
|
app.use("/partstocks", partStockRoutes);
|
||||||
|
app.use("/filamentstocks", filamentStockRoutes);
|
||||||
|
|
||||||
if (process.env.SCHEDULE_HOUR) {
|
if (process.env.SCHEDULE_HOUR) {
|
||||||
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
|
||||||
|
|||||||
@ -14,7 +14,7 @@ import {
|
|||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
const allowedFilters = ["type", "vendor.name", "diameter", "color"];
|
||||||
|
|
||||||
const filter = {};
|
const filter = {};
|
||||||
|
|
||||||
|
|||||||
45
src/routes/filamentstocks/index.js
Normal file
45
src/routes/filamentstocks/index.js
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import express from "express";
|
||||||
|
import { isAuthenticated } from "../../keycloak.js";
|
||||||
|
import { parseStringIfNumber } from "../../util/index.js";
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
import {
|
||||||
|
listFilamentStocksRouteHandler,
|
||||||
|
getFilamentStockRouteHandler,
|
||||||
|
editFilamentStockRouteHandler,
|
||||||
|
newFilamentStockRouteHandler,
|
||||||
|
} from "../../services/filamentstocks/index.js";
|
||||||
|
|
||||||
|
// list of filamentStocks
|
||||||
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
|
const allowedFilters = ["country"];
|
||||||
|
|
||||||
|
const filter = {};
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
|
if (key == allowedFilters[i]) {
|
||||||
|
filter[key] = parseStringIfNumber(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
listFilamentStocksRouteHandler(req, res, page, limit, property, filter);
|
||||||
|
});
|
||||||
|
|
||||||
|
router.post("/", isAuthenticated, (req, res) => {
|
||||||
|
newFilamentStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get("/:id", isAuthenticated, (req, res) => {
|
||||||
|
getFilamentStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
// update printer info
|
||||||
|
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||||
|
editFilamentStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
||||||
@ -15,11 +15,11 @@ import {
|
|||||||
|
|
||||||
// list of printers
|
// list of printers
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property, search } = req.query;
|
const { page, limit, property, search, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = [
|
const allowedFilters = [
|
||||||
"filament.type",
|
"filament.type",
|
||||||
"filament.brand",
|
"filament.vendor.name",
|
||||||
"filament.diameter",
|
"filament.diameter",
|
||||||
"filament.color",
|
"filament.color",
|
||||||
];
|
];
|
||||||
@ -34,7 +34,7 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search);
|
listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search, sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
// new pritner
|
// new pritner
|
||||||
|
|||||||
@ -10,6 +10,8 @@ import partRoutes from "./parts/index.js";
|
|||||||
import productRoutes from "./products/index.js";
|
import productRoutes from "./products/index.js";
|
||||||
import vendorRoutes from "./vendors/index.js";
|
import vendorRoutes from "./vendors/index.js";
|
||||||
import materialRoutes from "./materials/index.js";
|
import materialRoutes from "./materials/index.js";
|
||||||
|
import partStockRoutes from "./partstocks/index.js";
|
||||||
|
import filamentStockRoutes from "./filamentstocks/index.js";
|
||||||
|
|
||||||
export {
|
export {
|
||||||
userRoutes,
|
userRoutes,
|
||||||
@ -24,4 +26,6 @@ export {
|
|||||||
productRoutes,
|
productRoutes,
|
||||||
vendorRoutes,
|
vendorRoutes,
|
||||||
materialRoutes,
|
materialRoutes,
|
||||||
|
partStockRoutes,
|
||||||
|
filamentStockRoutes,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -14,21 +14,21 @@ import {
|
|||||||
|
|
||||||
// list of parts
|
// list of parts
|
||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property, sort, order } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
const allowedFilters = ["products", "name"];
|
||||||
|
|
||||||
const filter = {};
|
const filter = {};
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(req.query)) {
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
for (var i = 0; i < allowedFilters.length; i++) {
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
if (key == allowedFilters[i]) {
|
if (key == allowedFilters[i]) {
|
||||||
filter[key] = parseStringIfNumber(value);
|
filter[key] = { $regex: parseStringIfNumber(value), $options: 'i' }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
listPartsRouteHandler(req, res, page, limit, property, filter);
|
listPartsRouteHandler(req, res, page, limit, property, filter, "", sort, order);
|
||||||
});
|
});
|
||||||
|
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post("/", isAuthenticated, (req, res) => {
|
||||||
|
|||||||
45
src/routes/partstocks/index.js
Normal file
45
src/routes/partstocks/index.js
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import express from "express";
|
||||||
|
import { isAuthenticated } from "../../keycloak.js";
|
||||||
|
import { parseStringIfNumber } from "../../util/index.js";
|
||||||
|
|
||||||
|
const router = express.Router();
|
||||||
|
import {
|
||||||
|
listPartStocksRouteHandler,
|
||||||
|
getPartStockRouteHandler,
|
||||||
|
editPartStockRouteHandler,
|
||||||
|
newPartStockRouteHandler,
|
||||||
|
} from "../../services/partstocks/index.js";
|
||||||
|
|
||||||
|
// list of partStocks
|
||||||
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
|
const allowedFilters = ["country"];
|
||||||
|
|
||||||
|
const filter = {};
|
||||||
|
|
||||||
|
for (const [key, value] of Object.entries(req.query)) {
|
||||||
|
for (var i = 0; i < allowedFilters.length; i++) {
|
||||||
|
if (key == allowedFilters[i]) {
|
||||||
|
filter[key] = parseStringIfNumber(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
listPartStocksRouteHandler(req, res, page, limit, property, filter);
|
||||||
|
});
|
||||||
|
|
||||||
|
router.post("/", isAuthenticated, (req, res) => {
|
||||||
|
newPartStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
router.get("/:id", isAuthenticated, (req, res) => {
|
||||||
|
getPartStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
// update printer info
|
||||||
|
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||||
|
editPartStockRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default router;
|
||||||
@ -8,6 +8,7 @@ import {
|
|||||||
editPrinterRouteHandler,
|
editPrinterRouteHandler,
|
||||||
getPrinterRouteHandler,
|
getPrinterRouteHandler,
|
||||||
createPrinterRouteHandler,
|
createPrinterRouteHandler,
|
||||||
|
getPrinterStatsRouteHandler,
|
||||||
} from "../../services/printers/index.js";
|
} from "../../services/printers/index.js";
|
||||||
|
|
||||||
// list of printers
|
// list of printers
|
||||||
@ -21,6 +22,11 @@ router.post("/", isAuthenticated, (req, res) => {
|
|||||||
createPrinterRouteHandler(req, res);
|
createPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// get printer stats
|
||||||
|
router.get("/stats", isAuthenticated, (req, res) => {
|
||||||
|
getPrinterStatsRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
router.get("/:id", isAuthenticated, (req, res) => {
|
router.get("/:id", isAuthenticated, (req, res) => {
|
||||||
getPrinterRouteHandler(req, res);
|
getPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
@ -30,4 +36,5 @@ router.put("/:id", isAuthenticated, async (req, res) => {
|
|||||||
editPrinterRouteHandler(req, res);
|
editPrinterRouteHandler(req, res);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
export default router;
|
export default router;
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
getPrintJobRouteHandler,
|
getPrintJobRouteHandler,
|
||||||
editPrintJobRouteHandler,
|
editPrintJobRouteHandler,
|
||||||
createPrintJobRouteHandler,
|
createPrintJobRouteHandler,
|
||||||
|
getPrintJobStatsRouteHandler
|
||||||
} from "../../services/printjobs/index.js";
|
} from "../../services/printjobs/index.js";
|
||||||
|
|
||||||
// list of print jobs
|
// list of print jobs
|
||||||
@ -15,6 +16,11 @@ router.get("/", isAuthenticated, (req, res) => {
|
|||||||
listPrintJobsRouteHandler(req, res, page, limit);
|
listPrintJobsRouteHandler(req, res, page, limit);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// get printer stats
|
||||||
|
router.get("/stats", isAuthenticated, (req, res) => {
|
||||||
|
getPrintJobStatsRouteHandler(req, res);
|
||||||
|
});
|
||||||
|
|
||||||
// create new print job
|
// create new print job
|
||||||
router.post("/", isAuthenticated, (req, res) => {
|
router.post("/", isAuthenticated, (req, res) => {
|
||||||
createPrintJobRouteHandler(req, res);
|
createPrintJobRouteHandler(req, res);
|
||||||
|
|||||||
2
src/routes/vendors/index.js
vendored
2
src/routes/vendors/index.js
vendored
@ -14,7 +14,7 @@ import {
|
|||||||
router.get("/", isAuthenticated, (req, res) => {
|
router.get("/", isAuthenticated, (req, res) => {
|
||||||
const { page, limit, property } = req.query;
|
const { page, limit, property } = req.query;
|
||||||
|
|
||||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
const allowedFilters = ["country"];
|
||||||
|
|
||||||
const filter = {};
|
const filter = {};
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import mongoose from "mongoose";
|
import mongoose from "mongoose";
|
||||||
|
const { Schema } = mongoose;
|
||||||
|
|
||||||
const filamentSchema = new mongoose.Schema({
|
const filamentSchema = new mongoose.Schema({
|
||||||
name: { required: true, type: String },
|
name: { required: true, type: String },
|
||||||
@ -6,9 +7,9 @@ const filamentSchema = new mongoose.Schema({
|
|||||||
url: { required: false, type: String },
|
url: { required: false, type: String },
|
||||||
image: { required: false, type: Buffer },
|
image: { required: false, type: Buffer },
|
||||||
color: { required: true, type: String },
|
color: { required: true, type: String },
|
||||||
brand: { required: true, type: String },
|
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true },
|
||||||
type: { required: true, type: String },
|
type: { required: true, type: String },
|
||||||
price: { required: true, type: Number },
|
cost: { required: true, type: Number },
|
||||||
diameter: { required: true, type: Number },
|
diameter: { required: true, type: Number },
|
||||||
density: { required: true, type: Number },
|
density: { required: true, type: Number },
|
||||||
createdAt: { required: true, type: Date },
|
createdAt: { required: true, type: Date },
|
||||||
|
|||||||
39
src/schemas/filamentstock.schema.js
Normal file
39
src/schemas/filamentstock.schema.js
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
import mongoose from "mongoose";
|
||||||
|
const { Schema } = mongoose;
|
||||||
|
|
||||||
|
// Define the main filamentStock schema
|
||||||
|
const filamentStockSchema = new Schema(
|
||||||
|
{
|
||||||
|
state: {
|
||||||
|
type: { type: String, required: true },
|
||||||
|
percent: { type: String, required: true },
|
||||||
|
},
|
||||||
|
startingGrossWeight: { type: Number, required: true },
|
||||||
|
startingNetWeight: { type: Number, required: true },
|
||||||
|
currentGrossWeight: { type: Number, required: true },
|
||||||
|
currentNetWeight: { type: Number, required: true },
|
||||||
|
filament: { type: mongoose.Schema.Types.ObjectId, ref: "Filament" },
|
||||||
|
stockEvents: [{
|
||||||
|
type: { type: String, required: true },
|
||||||
|
value: { type: Number, required: true },
|
||||||
|
subJob: { type: mongoose.Schema.Types.ObjectId, ref: "PrintSubJob", required: false },
|
||||||
|
job: { type: mongoose.Schema.Types.ObjectId, ref: "PrintJob", required: false },
|
||||||
|
timestamp: { type: Date, default: Date.now }
|
||||||
|
}]
|
||||||
|
},
|
||||||
|
{ timestamps: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add virtual id getter
|
||||||
|
filamentStockSchema.virtual("id").get(function () {
|
||||||
|
return this._id.toHexString();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Configure JSON serialization to include virtuals
|
||||||
|
filamentStockSchema.set("toJSON", { virtuals: true });
|
||||||
|
|
||||||
|
// Create and export the model
|
||||||
|
export const filamentStockModel = mongoose.model(
|
||||||
|
"FilamentStock",
|
||||||
|
filamentStockSchema,
|
||||||
|
);
|
||||||
@ -8,7 +8,7 @@ const gcodeFileSchema = new mongoose.Schema({
|
|||||||
size: { type: Number, required: false },
|
size: { type: Number, required: false },
|
||||||
filament: { type: Schema.Types.ObjectId, ref: "Filament", required: true },
|
filament: { type: Schema.Types.ObjectId, ref: "Filament", required: true },
|
||||||
parts: [{ type: Schema.Types.ObjectId, ref: "Part", required: true }],
|
parts: [{ type: Schema.Types.ObjectId, ref: "Part", required: true }],
|
||||||
price: { type: Number, required: false },
|
cost: { type: Number, required: false },
|
||||||
createdAt: { type: Date },
|
createdAt: { type: Date },
|
||||||
updatedAt: { type: Date },
|
updatedAt: { type: Date },
|
||||||
});
|
});
|
||||||
|
|||||||
@ -4,6 +4,7 @@ const materialSchema = new mongoose.Schema({
|
|||||||
name: { required: true, type: String },
|
name: { required: true, type: String },
|
||||||
url: { required: false, type: String },
|
url: { required: false, type: String },
|
||||||
image: { required: false, type: Buffer },
|
image: { required: false, type: Buffer },
|
||||||
|
tags: [{ type: String }],
|
||||||
});
|
});
|
||||||
|
|
||||||
materialSchema.virtual("id").get(function () {
|
materialSchema.virtual("id").get(function () {
|
||||||
|
|||||||
@ -5,7 +5,12 @@ const { Schema } = mongoose;
|
|||||||
const partSchema = new Schema(
|
const partSchema = new Schema(
|
||||||
{
|
{
|
||||||
name: { type: String, required: true },
|
name: { type: String, required: true },
|
||||||
products: [{ type: mongoose.Schema.Types.ObjectId, ref: "Product" }],
|
fileName: { type: String, required: false },
|
||||||
|
product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" },
|
||||||
|
useGlobalPricing: { type: Boolean, default: true },
|
||||||
|
marginOrPrice: { type: Boolean, default: false },
|
||||||
|
margin: { type: Number, required: false },
|
||||||
|
price: { type: Number, required: false },
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
);
|
);
|
||||||
|
|||||||
23
src/schemas/partstock.schema.js
Normal file
23
src/schemas/partstock.schema.js
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import mongoose from "mongoose";
|
||||||
|
const { Schema } = mongoose;
|
||||||
|
|
||||||
|
// Define the main partStock schema
|
||||||
|
const partStockSchema = new Schema(
|
||||||
|
{
|
||||||
|
name: { type: String, required: true },
|
||||||
|
fileName: { type: String, required: false },
|
||||||
|
product: { type: mongoose.Schema.Types.ObjectId, ref: "Product" },
|
||||||
|
},
|
||||||
|
{ timestamps: true },
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add virtual id getter
|
||||||
|
partStockSchema.virtual("id").get(function () {
|
||||||
|
return this._id.toHexString();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Configure JSON serialization to include virtuals
|
||||||
|
partStockSchema.set("toJSON", { virtuals: true });
|
||||||
|
|
||||||
|
// Create and export the model
|
||||||
|
export const partStockModel = mongoose.model("PartStock", partStockSchema);
|
||||||
@ -12,10 +12,19 @@ const moonrakerSchema = new Schema(
|
|||||||
{ _id: false },
|
{ _id: false },
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Define the alert schema
|
||||||
|
const alertSchema = new Schema(
|
||||||
|
{
|
||||||
|
priority: { type: String, required: true }, // order to show
|
||||||
|
type: { type: String, required: true }, // selectFilament, error, info, message,
|
||||||
|
},
|
||||||
|
{ timestamps: true, _id: false }
|
||||||
|
);
|
||||||
|
|
||||||
// Define the main printer schema
|
// Define the main printer schema
|
||||||
const printerSchema = new Schema(
|
const printerSchema = new Schema(
|
||||||
{
|
{
|
||||||
printerName: { type: String, required: true },
|
name: { type: String, required: true },
|
||||||
online: { type: Boolean, required: true, default: false },
|
online: { type: Boolean, required: true, default: false },
|
||||||
state: {
|
state: {
|
||||||
type: { type: String, required: true, default: "Offline" },
|
type: { type: String, required: true, default: "Offline" },
|
||||||
@ -32,7 +41,10 @@ const printerSchema = new Schema(
|
|||||||
firmware: { type: String },
|
firmware: { type: String },
|
||||||
currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" },
|
currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" },
|
||||||
currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" },
|
currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" },
|
||||||
|
currentFilamentStock: { type: Schema.Types.ObjectId, ref: "FilamentStock" },
|
||||||
subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }],
|
subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }],
|
||||||
|
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", default: null },
|
||||||
|
alerts: [alertSchema],
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
);
|
);
|
||||||
|
|||||||
@ -8,7 +8,7 @@ const printJobSchema = new mongoose.Schema({
|
|||||||
printers: [{ type: Schema.Types.ObjectId, ref: "Printer", required: false }],
|
printers: [{ type: Schema.Types.ObjectId, ref: "Printer", required: false }],
|
||||||
createdAt: { required: true, type: Date },
|
createdAt: { required: true, type: Date },
|
||||||
updatedAt: { required: true, type: Date },
|
updatedAt: { required: true, type: Date },
|
||||||
startedAt: { required: true, type: Date },
|
startedAt: { required: false, type: Date },
|
||||||
gcodeFile: {
|
gcodeFile: {
|
||||||
type: Schema.Types.ObjectId,
|
type: Schema.Types.ObjectId,
|
||||||
ref: "GCodeFile",
|
ref: "GCodeFile",
|
||||||
|
|||||||
@ -7,6 +7,10 @@ const productSchema = new Schema(
|
|||||||
name: { type: String, required: true },
|
name: { type: String, required: true },
|
||||||
tags: [{ type: String }],
|
tags: [{ type: String }],
|
||||||
version: { type: String },
|
version: { type: String },
|
||||||
|
marginOrPrice: { type: Boolean, default: false },
|
||||||
|
margin: { type: Number, required: false },
|
||||||
|
price: { type: Number, required: false },
|
||||||
|
vendor: { type: Schema.Types.ObjectId, ref: "Vendor", required: true },
|
||||||
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: "Part" }],
|
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: "Part" }],
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
@ -21,4 +25,4 @@ productSchema.virtual("id").get(function () {
|
|||||||
productSchema.set("toJSON", { virtuals: true });
|
productSchema.set("toJSON", { virtuals: true });
|
||||||
|
|
||||||
// Create and export the model
|
// Create and export the model
|
||||||
export const productModel = mongoose.model("product", productSchema);
|
export const productModel = mongoose.model("Product", productSchema);
|
||||||
|
|||||||
@ -4,8 +4,10 @@ const vendorSchema = new mongoose.Schema(
|
|||||||
{
|
{
|
||||||
name: { required: true, type: String },
|
name: { required: true, type: String },
|
||||||
website: { required: false, type: String },
|
website: { required: false, type: String },
|
||||||
|
email: { required: false, type: String },
|
||||||
|
phone: { required: false, type: String },
|
||||||
contact: { required: false, type: String },
|
contact: { required: false, type: String },
|
||||||
image: { required: false, type: Buffer },
|
country: { required: false, type: String },
|
||||||
},
|
},
|
||||||
{ timestamps: true },
|
{ timestamps: true },
|
||||||
);
|
);
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import { filamentModel } from "../../schemas/filament.schema.js"
|
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import log4js from "log4js";
|
import log4js from "log4js";
|
||||||
import mongoose from "mongoose";
|
import mongoose from "mongoose";
|
||||||
@ -9,21 +9,39 @@ dotenv.config();
|
|||||||
const logger = log4js.getLogger("Filaments");
|
const logger = log4js.getLogger("Filaments");
|
||||||
logger.level = process.env.LOG_LEVEL;
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
export const listFilamentsRouteHandler = async (req, res, page = 1, limit = 25, property = "", filter = {}) => {
|
export const listFilamentsRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
page = 1,
|
||||||
|
limit = 25,
|
||||||
|
property = "",
|
||||||
|
filter = {},
|
||||||
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
const skip = (page - 1) * limit;
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
|
||||||
let filament;
|
let filament;
|
||||||
let aggregateCommand = [];
|
let aggregateCommand = [];
|
||||||
|
|
||||||
if (filter != {}) { // use filtering if present
|
aggregateCommand.push({
|
||||||
|
$lookup: {
|
||||||
|
from: "vendors", // The collection name (usually lowercase plural)
|
||||||
|
localField: "vendor", // The field in your current model
|
||||||
|
foreignField: "_id", // The field in the products collection
|
||||||
|
as: "vendor", // The output field name
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
aggregateCommand.push({ $unwind: "$vendor" });
|
||||||
|
|
||||||
|
if (filter != {}) {
|
||||||
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != "") {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }) // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
@ -32,11 +50,14 @@ export const listFilamentsRouteHandler = async (req, res, page = 1, limit = 25,
|
|||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
console.log(aggregateCommand)
|
console.log(aggregateCommand);
|
||||||
|
|
||||||
filament = await filamentModel.aggregate(aggregateCommand)
|
filament = await filamentModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`, filament);
|
logger.trace(
|
||||||
|
`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
|
filament,
|
||||||
|
);
|
||||||
res.send(filament);
|
res.send(filament);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error listing filaments:", error);
|
logger.error("Error listing filaments:", error);
|
||||||
@ -49,9 +70,11 @@ export const getFilamentRouteHandler = async (req, res) => {
|
|||||||
// Get ID from params
|
// Get ID from params
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
// Fetch the filament with the given remote address
|
// Fetch the filament with the given remote address
|
||||||
const filament = await filamentModel.findOne({
|
const filament = await filamentModel
|
||||||
_id: id
|
.findOne({
|
||||||
});
|
_id: id,
|
||||||
|
})
|
||||||
|
.populate("vendor");
|
||||||
|
|
||||||
if (!filament) {
|
if (!filament) {
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
@ -73,7 +96,8 @@ export const editFilamentRouteHandler = async (req, res) => {
|
|||||||
// Fetch the filament with the given remote address
|
// Fetch the filament with the given remote address
|
||||||
const filament = await filamentModel.findOne({ _id: id });
|
const filament = await filamentModel.findOne({ _id: id });
|
||||||
|
|
||||||
if (!filament) { // Error handling
|
if (!filament) {
|
||||||
|
// Error handling
|
||||||
logger.warn(`Filament not found with supplied id.`);
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
return res.status(404).send({ error: "Print job not found." });
|
return res.status(404).send({ error: "Print job not found." });
|
||||||
}
|
}
|
||||||
@ -81,38 +105,65 @@ export const editFilamentRouteHandler = async (req, res) => {
|
|||||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } = req.body;
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
barcode: req.body.barcode,
|
||||||
|
url: req.body.url,
|
||||||
|
image: req.body.image,
|
||||||
|
color: req.body.color,
|
||||||
|
vendor: req.body.vendor.id,
|
||||||
|
type: req.body.type,
|
||||||
|
price: req.body.price,
|
||||||
|
diameter: req.body.diameter,
|
||||||
|
density: req.body.density,
|
||||||
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
|
};
|
||||||
|
|
||||||
const result = await filamentModel.updateOne(
|
const result = await filamentModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
{ $set: updateData }
|
{ $set: updateData },
|
||||||
);
|
);
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No Filament updated.");
|
logger.error("No Filament updated.");
|
||||||
res.status(500).send({ error: "No filaments updated." });
|
return res.status(500).send({ error: "No filaments updated." });
|
||||||
}
|
}
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament:", updateError);
|
logger.error("Error updating filament:", updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
return res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
return res.send("OK");
|
||||||
} catch (fetchError) {
|
} catch (fetchError) {
|
||||||
logger.error("Error fetching filament:", fetchError);
|
logger.error("Error fetching filament:", fetchError);
|
||||||
res.status(500).send({ error: fetchError.message });
|
return res.status(500).send({ error: fetchError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export const newFilamentRouteHandler = async (req, res) => {
|
export const newFilamentRouteHandler = async (req, res) => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let { ...newFilament } = req.body;
|
const newFilament = {
|
||||||
newFilament = { ...newFilament, createdAt: new Date(), updatedAt: new Date() }
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
barcode: req.body.barcode,
|
||||||
|
url: req.body.url,
|
||||||
|
image: req.body.image,
|
||||||
|
color: req.body.color,
|
||||||
|
vendor: req.body.vendor._id,
|
||||||
|
type: req.body.type,
|
||||||
|
cost: req.body.cost,
|
||||||
|
diameter: req.body.diameter,
|
||||||
|
density: req.body.density,
|
||||||
|
emptySpoolWeight: req.body.emptySpoolWeight,
|
||||||
|
};
|
||||||
|
|
||||||
const result = await filamentModel.create(newFilament);
|
const result = await filamentModel.create(newFilament);
|
||||||
|
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No filament created.");
|
logger.error("No filament created.");
|
||||||
res.status(500).send({ error: "No filament created." });
|
res.status(500).send({ error: "No filament created." });
|
||||||
}
|
}
|
||||||
|
|
||||||
res.status(200).send({ status: "ok" });
|
res.status(200).send({ status: "ok" });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament:", updateError);
|
logger.error("Error updating filament:", updateError);
|
||||||
|
|||||||
197
src/services/filamentstocks/index.js
Normal file
197
src/services/filamentstocks/index.js
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
import dotenv from "dotenv";
|
||||||
|
import { filamentStockModel } from "../../schemas/filamentstock.schema.js";
|
||||||
|
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||||
|
import jwt from "jsonwebtoken";
|
||||||
|
import log4js from "log4js";
|
||||||
|
import mongoose from "mongoose";
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
const logger = log4js.getLogger("Filament Stocks");
|
||||||
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
|
export const listFilamentStocksRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
page = 1,
|
||||||
|
limit = 25,
|
||||||
|
property = "",
|
||||||
|
filter = {},
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
// Calculate the skip value based on the page number and limit
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
let filamentStock;
|
||||||
|
let aggregateCommand = [];
|
||||||
|
|
||||||
|
aggregateCommand.push({
|
||||||
|
$lookup: {
|
||||||
|
from: "filaments", // The collection name (usually lowercase plural)
|
||||||
|
localField: "filament", // The field in your current model
|
||||||
|
foreignField: "_id", // The field in the products collection
|
||||||
|
as: "filament", // The output field name
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
aggregateCommand.push({ $unwind: "$filament" });
|
||||||
|
|
||||||
|
if (filter != {}) {
|
||||||
|
// use filtering if present
|
||||||
|
aggregateCommand.push({ $match: filter });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (property != "") {
|
||||||
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
|
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||||
|
} else {
|
||||||
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregateCommand.push({ $skip: skip });
|
||||||
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
|
console.log(aggregateCommand);
|
||||||
|
|
||||||
|
filamentStock = await filamentStockModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
|
logger.trace(
|
||||||
|
`List of filamentStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
|
filamentStock,
|
||||||
|
);
|
||||||
|
res.send(filamentStock);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error listing filament stocks:", error);
|
||||||
|
res.status(500).send({ error: error });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getFilamentStockRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
// Fetch the filamentStock with the given remote address
|
||||||
|
const filamentStock = await filamentStockModel
|
||||||
|
.findOne({
|
||||||
|
_id: id,
|
||||||
|
})
|
||||||
|
.populate("filament").populate({
|
||||||
|
path: 'stockEvents',
|
||||||
|
populate: [
|
||||||
|
{
|
||||||
|
path: 'subJob',
|
||||||
|
select: 'number'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: 'job',
|
||||||
|
select: 'startedAt'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!filamentStock) {
|
||||||
|
logger.warn(`Filament stock not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Print job not found." });
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||||
|
res.send(filamentStock);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching filament stock:", error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const editFilamentStockRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
// Fetch the filamentStock with the given remote address
|
||||||
|
const filamentStock = await filamentStockModel.findOne({ _id: id });
|
||||||
|
|
||||||
|
if (!filamentStock) {
|
||||||
|
// Error handling
|
||||||
|
logger.warn(`Filament stock not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Filament stock not found." });
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.trace(`Filament stock with ID: ${id}:`, filamentStock);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
contact: req.body.contact,
|
||||||
|
country: req.body.country,
|
||||||
|
name: req.body.name,
|
||||||
|
website: req.body.website,
|
||||||
|
phone: req.body.phone,
|
||||||
|
email: req.body.email,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await filamentStockModel.updateOne(
|
||||||
|
{ _id: id },
|
||||||
|
{ $set: updateData },
|
||||||
|
);
|
||||||
|
if (result.nModified === 0) {
|
||||||
|
logger.error("No filament stock updated.");
|
||||||
|
res.status(500).send({ error: "No filament stocks updated." });
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
logger.error("Error updating filament stock:", updateError);
|
||||||
|
res.status(500).send({ error: updateError.message });
|
||||||
|
}
|
||||||
|
res.send("OK");
|
||||||
|
} catch (fetchError) {
|
||||||
|
logger.error("Error fetching filament stock:", fetchError);
|
||||||
|
res.status(500).send({ error: fetchError.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const newFilamentStockRouteHandler = async (req, res) => {
|
||||||
|
var filament = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.body.filament._id);
|
||||||
|
// Fetch the filament with the given remote address
|
||||||
|
filament = await filamentModel.findOne({
|
||||||
|
_id: id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!filament) {
|
||||||
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Filament not found." });
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching filament:", error);
|
||||||
|
return res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
logger.warn(req.body);
|
||||||
|
const startingGrossWeight = req.body.startingGrossWeight;
|
||||||
|
const newFilamentStock = {
|
||||||
|
startingGrossWeight: startingGrossWeight,
|
||||||
|
startingNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
||||||
|
currentGrossWeight: startingGrossWeight,
|
||||||
|
currentNetWeight: startingGrossWeight - filament.emptySpoolWeight,
|
||||||
|
filament: req.body.filament._id,
|
||||||
|
state: {
|
||||||
|
type: "unconsumed",
|
||||||
|
percent: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await filamentStockModel.create(newFilamentStock);
|
||||||
|
if (result.nCreated === 0) {
|
||||||
|
logger.error("No filament stock created.");
|
||||||
|
return res.status(500).send({ error: "No filament stock created." });
|
||||||
|
}
|
||||||
|
return res.send({ status: "ok" });
|
||||||
|
} catch (updateError) {
|
||||||
|
logger.error("Error adding filament stock:", updateError);
|
||||||
|
return res.status(500).send({ error: updateError.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
@ -1,5 +1,6 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
|
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
|
||||||
|
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||||
import jwt from "jsonwebtoken";
|
import jwt from "jsonwebtoken";
|
||||||
import log4js from "log4js";
|
import log4js from "log4js";
|
||||||
import multer from "multer";
|
import multer from "multer";
|
||||||
@ -62,6 +63,8 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
property = "",
|
property = "",
|
||||||
filter = {},
|
filter = {},
|
||||||
search = "",
|
search = "",
|
||||||
|
sort = "",
|
||||||
|
order = "ascend"
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -103,6 +106,17 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
aggregateCommand.push({
|
||||||
|
$lookup: {
|
||||||
|
from: "vendors", // The collection name (usually lowercase plural)
|
||||||
|
localField: "filament.vendor", // The field in your current model
|
||||||
|
foreignField: "_id", // The field in the products collection
|
||||||
|
as: "filament.vendor", // The output field name
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
aggregateCommand.push({ $unwind: "$filament.vendor" });
|
||||||
|
|
||||||
if (filter != {}) {
|
if (filter != {}) {
|
||||||
// use filtering if present
|
// use filtering if present
|
||||||
aggregateCommand.push({ $match: filter });
|
aggregateCommand.push({ $match: filter });
|
||||||
@ -123,6 +137,12 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add sorting if sort parameter is provided
|
||||||
|
if (sort) {
|
||||||
|
const sortOrder = order === "descend" ? -1 : 1;
|
||||||
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
|
}
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
aggregateCommand.push({ $limit: Number(limit) });
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
@ -131,7 +151,7 @@ export const listGCodeFilesRouteHandler = async (
|
|||||||
gcodeFile = await gcodeFileModel.aggregate(aggregateCommand);
|
gcodeFile = await gcodeFileModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||||
gcodeFile,
|
gcodeFile,
|
||||||
);
|
);
|
||||||
res.send(gcodeFile);
|
res.send(gcodeFile);
|
||||||
@ -199,10 +219,12 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
|||||||
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = {
|
||||||
req.body;
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
filament: req.body?.filament?._id,
|
||||||
|
};
|
||||||
|
|
||||||
console.log("Update data", updateData);
|
|
||||||
const result = await gcodeFileModel.updateOne(
|
const result = await gcodeFileModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
{ $set: updateData },
|
{ $set: updateData },
|
||||||
@ -223,22 +245,44 @@ export const editGCodeFileRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const newGCodeFileRouteHandler = async (req, res) => {
|
export const newGCodeFileRouteHandler = async (req, res) => {
|
||||||
|
var filament = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let { ...newGCodeFile } = req.body;
|
// Get ID from params
|
||||||
newGCodeFile = {
|
const id = new mongoose.Types.ObjectId(req.body.filament._id);
|
||||||
...newGCodeFile,
|
// Fetch the filament with the given remote address
|
||||||
|
filament = await filamentModel.findOne({
|
||||||
|
_id: id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!filament) {
|
||||||
|
logger.warn(`Filament not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Filament not found." });
|
||||||
|
}
|
||||||
|
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching filament:", error);
|
||||||
|
return res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const newGCodeFile = {
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
gcodeFileInfo: req.body.gcodeFileInfo,
|
||||||
|
filament: req.body.filament._id,
|
||||||
|
name: req.body.name,
|
||||||
|
cost: (filament.cost / 1000) * req.body.gcodeFileInfo.filamentUsedG,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await gcodeFileModel.create(newGCodeFile);
|
const result = await gcodeFileModel.create(newGCodeFile);
|
||||||
if (result.nCreated === 0) {
|
if (result.nCreated === 0) {
|
||||||
logger.error("No gcode file created.");
|
logger.error("No gcode file created.");
|
||||||
res.status(500).send({ error: "No filament created." });
|
res.status(500).send({ error: "No gcode file created." });
|
||||||
}
|
}
|
||||||
res.status(200).send(result);
|
res.status(200).send(result);
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating filament:", updateError);
|
logger.error("Error creating gcode file:", updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@ -56,6 +56,9 @@ export const listPartsRouteHandler = async (
|
|||||||
limit = 25,
|
limit = 25,
|
||||||
property = "",
|
property = "",
|
||||||
filter = {},
|
filter = {},
|
||||||
|
search = "",
|
||||||
|
sort = "",
|
||||||
|
order = "ascend"
|
||||||
) => {
|
) => {
|
||||||
try {
|
try {
|
||||||
// Calculate the skip value based on the page number and limit
|
// Calculate the skip value based on the page number and limit
|
||||||
@ -70,10 +73,35 @@ export const listPartsRouteHandler = async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != "") {
|
||||||
|
logger.error(property);
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({
|
||||||
|
$lookup: {
|
||||||
|
from: "products", // The collection name (usually lowercase plural)
|
||||||
|
localField: "product", // The field in your current model
|
||||||
|
foreignField: "_id", // The field in the products collection
|
||||||
|
as: "product", // The output field name
|
||||||
|
},
|
||||||
|
});
|
||||||
|
aggregateCommand.push({ $unwind: "$product" });
|
||||||
|
aggregateCommand.push({
|
||||||
|
$project: {
|
||||||
|
name: 1,
|
||||||
|
_id: 1,
|
||||||
|
createdAt: 1,
|
||||||
|
updatedAt: 1,
|
||||||
|
"product._id": 1,
|
||||||
|
"product.name": 1,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add sorting if sort parameter is provided
|
||||||
|
if (sort) {
|
||||||
|
const sortOrder = order === "descend" ? -1 : 1;
|
||||||
|
aggregateCommand.push({ $sort: { [sort]: sortOrder } });
|
||||||
}
|
}
|
||||||
|
|
||||||
aggregateCommand.push({ $skip: skip });
|
aggregateCommand.push({ $skip: skip });
|
||||||
@ -84,7 +112,7 @@ export const listPartsRouteHandler = async (
|
|||||||
part = await partModel.aggregate(aggregateCommand);
|
part = await partModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
logger.trace(
|
logger.trace(
|
||||||
`List of parts (Page ${page}, Limit ${limit}, Property ${property}):`,
|
`List of parts (Page ${page}, Limit ${limit}, Property ${property}, Sort ${sort}, Order ${order}):`,
|
||||||
part,
|
part,
|
||||||
);
|
);
|
||||||
res.send(part);
|
res.send(part);
|
||||||
@ -99,9 +127,11 @@ export const getPartRouteHandler = async (req, res) => {
|
|||||||
// Get ID from params
|
// Get ID from params
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
// Fetch the part with the given remote address
|
// Fetch the part with the given remote address
|
||||||
const part = await partModel.findOne({
|
const part = await partModel
|
||||||
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
});
|
})
|
||||||
|
.populate("product");
|
||||||
|
|
||||||
if (!part) {
|
if (!part) {
|
||||||
logger.warn(`Part not found with supplied id.`);
|
logger.warn(`Part not found with supplied id.`);
|
||||||
@ -156,18 +186,37 @@ export const editPartRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
export const newPartRouteHandler = async (req, res) => {
|
export const newPartRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
let { ...newPart } = req.body;
|
if (Array.isArray(req.body)) {
|
||||||
newPart = { ...newPart, createdAt: new Date(), updatedAt: new Date() };
|
// Handle array of parts
|
||||||
|
const partsToCreate = req.body.map((part) => ({
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: part.name,
|
||||||
|
products: part?.products,
|
||||||
|
fileName: part?.fileName,
|
||||||
|
}));
|
||||||
|
|
||||||
const result = await partModel.create(newPart);
|
const results = await partModel.insertMany(partsToCreate);
|
||||||
if (result.nCreated === 0) {
|
if (!results.length) {
|
||||||
logger.error("No part created.");
|
logger.error("No parts created.");
|
||||||
res.status(500).send({ error: "No part created." });
|
return res.status(500).send({ error: "No parts created." });
|
||||||
}
|
}
|
||||||
res.status(200).send(result);
|
return res.status(200).send(results);
|
||||||
} catch (updateError) {
|
} else {
|
||||||
logger.error("Error updating part:", updateError);
|
// Handle single part
|
||||||
res.status(500).send({ error: updateError.message });
|
const newPart = {
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
products: req.body?.products,
|
||||||
|
fileName: req.body?.fileName,
|
||||||
|
};
|
||||||
|
const result = await partModel.create(newPart);
|
||||||
|
return res.status(200).send(result);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error creating part(s):", error);
|
||||||
|
return res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
143
src/services/partstocks/index.js
Normal file
143
src/services/partstocks/index.js
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import dotenv from "dotenv";
|
||||||
|
import { partStockModel } from "../../schemas/partstock.schema.js";
|
||||||
|
import jwt from "jsonwebtoken";
|
||||||
|
import log4js from "log4js";
|
||||||
|
import mongoose from "mongoose";
|
||||||
|
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
const logger = log4js.getLogger("PartStocks");
|
||||||
|
logger.level = process.env.LOG_LEVEL;
|
||||||
|
|
||||||
|
export const listPartStocksRouteHandler = async (
|
||||||
|
req,
|
||||||
|
res,
|
||||||
|
page = 1,
|
||||||
|
limit = 25,
|
||||||
|
property = "",
|
||||||
|
filter = {},
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
// Calculate the skip value based on the page number and limit
|
||||||
|
const skip = (page - 1) * limit;
|
||||||
|
|
||||||
|
let partStock;
|
||||||
|
let aggregateCommand = [];
|
||||||
|
|
||||||
|
if (filter != {}) {
|
||||||
|
// use filtering if present
|
||||||
|
aggregateCommand.push({ $match: filter });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (property != "") {
|
||||||
|
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||||
|
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||||
|
} else {
|
||||||
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
|
}
|
||||||
|
|
||||||
|
aggregateCommand.push({ $skip: skip });
|
||||||
|
aggregateCommand.push({ $limit: Number(limit) });
|
||||||
|
|
||||||
|
console.log(aggregateCommand);
|
||||||
|
|
||||||
|
partStock = await partStockModel.aggregate(aggregateCommand);
|
||||||
|
|
||||||
|
logger.trace(
|
||||||
|
`List of partStocks (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||||
|
partStock,
|
||||||
|
);
|
||||||
|
res.send(partStock);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error listing partStocks:", error);
|
||||||
|
res.status(500).send({ error: error });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getPartStockRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
// Fetch the partStock with the given remote address
|
||||||
|
const partStock = await partStockModel.findOne({
|
||||||
|
_id: id,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!partStock) {
|
||||||
|
logger.warn(`PartStock not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Print job not found." });
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||||
|
res.send(partStock);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching PartStock:", error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const editPartStockRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Get ID from params
|
||||||
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
// Fetch the partStock with the given remote address
|
||||||
|
const partStock = await partStockModel.findOne({ _id: id });
|
||||||
|
|
||||||
|
if (!partStock) {
|
||||||
|
// Error handling
|
||||||
|
logger.warn(`PartStock not found with supplied id.`);
|
||||||
|
return res.status(404).send({ error: "Print job not found." });
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.trace(`PartStock with ID: ${id}:`, partStock);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
contact: req.body.contact,
|
||||||
|
country: req.body.country,
|
||||||
|
name: req.body.name,
|
||||||
|
website: req.body.website,
|
||||||
|
phone: req.body.phone,
|
||||||
|
email: req.body.email,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await partStockModel.updateOne(
|
||||||
|
{ _id: id },
|
||||||
|
{ $set: updateData },
|
||||||
|
);
|
||||||
|
if (result.nModified === 0) {
|
||||||
|
logger.error("No PartStock updated.");
|
||||||
|
res.status(500).send({ error: "No partStocks updated." });
|
||||||
|
}
|
||||||
|
} catch (updateError) {
|
||||||
|
logger.error("Error updating partStock:", updateError);
|
||||||
|
res.status(500).send({ error: updateError.message });
|
||||||
|
}
|
||||||
|
res.send("OK");
|
||||||
|
} catch (fetchError) {
|
||||||
|
logger.error("Error fetching partStock:", fetchError);
|
||||||
|
res.status(500).send({ error: fetchError.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const newPartStockRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
let { ...newPartStock } = req.body;
|
||||||
|
newPartStock = {
|
||||||
|
...newPartStock,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await partStockModel.create(newPartStock);
|
||||||
|
if (result.nCreated === 0) {
|
||||||
|
logger.error("No partStock created.");
|
||||||
|
res.status(500).send({ error: "No partStock created." });
|
||||||
|
}
|
||||||
|
res.status(200).send({ status: "ok" });
|
||||||
|
} catch (updateError) {
|
||||||
|
logger.error("Error updating partStock:", updateError);
|
||||||
|
res.status(500).send({ error: updateError.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
@ -33,22 +33,28 @@ export const getPrinterRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Fetch the printer with the given remote address
|
// Fetch the printer with the given remote address
|
||||||
const printer = await printerModel.findOne({ _id: id })
|
const printer = await printerModel
|
||||||
.populate('subJobs')
|
.findOne({ _id: id })
|
||||||
.populate('currentJob')
|
.populate("subJobs")
|
||||||
|
.populate("currentJob")
|
||||||
.populate({
|
.populate({
|
||||||
path: 'currentJob',
|
path: "currentJob",
|
||||||
populate: {
|
populate: {
|
||||||
path: 'gcodeFile'
|
path: "gcodeFile",
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.populate('currentSubJob')
|
.populate("currentSubJob")
|
||||||
.populate({
|
.populate({
|
||||||
path: 'subJobs',
|
path: "subJobs",
|
||||||
populate: {
|
populate: {
|
||||||
path: 'printJob'
|
path: "printJob",
|
||||||
}
|
},
|
||||||
});
|
})
|
||||||
|
.populate("vendor")
|
||||||
|
.populate({ path: "currentFilamentStock",
|
||||||
|
populate: {
|
||||||
|
path: "filament",
|
||||||
|
},})
|
||||||
|
|
||||||
if (!printer) {
|
if (!printer) {
|
||||||
logger.warn(`Printer with id ${id} not found.`);
|
logger.warn(`Printer with id ${id} not found.`);
|
||||||
@ -66,11 +72,18 @@ export const getPrinterRouteHandler = async (req, res) => {
|
|||||||
export const editPrinterRouteHandler = async (req, res) => {
|
export const editPrinterRouteHandler = async (req, res) => {
|
||||||
const id = req.params.id;
|
const id = req.params.id;
|
||||||
try {
|
try {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const updateData = {
|
||||||
|
updatedAt: new Date(),
|
||||||
|
moonraker: req.body.moonraker,
|
||||||
|
tags: req.body.tags,
|
||||||
|
name: req.body.name,
|
||||||
|
vendor: req.body.vendor.id,
|
||||||
|
};
|
||||||
|
|
||||||
const result = await printerModel.updateOne(
|
const result = await printerModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
{ $set: req.body },
|
{ $set: updateData },
|
||||||
);
|
);
|
||||||
if (result.nModified === 0) {
|
if (result.nModified === 0) {
|
||||||
logger.error("No printers updated.");
|
logger.error("No printers updated.");
|
||||||
@ -89,48 +102,72 @@ export const editPrinterRouteHandler = async (req, res) => {
|
|||||||
|
|
||||||
export const createPrinterRouteHandler = async (req, res) => {
|
export const createPrinterRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
const {
|
const { name, moonraker, tags = [], firmware = "n/a" } = req.body;
|
||||||
printerName,
|
|
||||||
moonraker,
|
|
||||||
tags = [],
|
|
||||||
firmware = "n/a",
|
|
||||||
} = req.body;
|
|
||||||
|
|
||||||
// Validate required fields
|
// Validate required fields
|
||||||
if (!printerName || !moonraker) {
|
if (!name || !moonraker) {
|
||||||
logger.warn("Missing required fields in printer creation request");
|
logger.warn("Missing required fields in printer creation request");
|
||||||
return res.status(400).send({
|
return res.status(400).send({
|
||||||
error: "Missing required fields. printerName and moonraker configuration are required."
|
error:
|
||||||
|
"Missing required fields. name and moonraker configuration are required.",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate moonraker configuration
|
// Validate moonraker configuration
|
||||||
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
|
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
|
||||||
logger.warn("Invalid moonraker configuration in printer creation request");
|
logger.warn(
|
||||||
|
"Invalid moonraker configuration in printer creation request",
|
||||||
|
);
|
||||||
return res.status(400).send({
|
return res.status(400).send({
|
||||||
error: "Invalid moonraker configuration. host, port, protocol are required."
|
error:
|
||||||
|
"Invalid moonraker configuration. host, port, protocol are required.",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new printer instance
|
// Create new printer instance
|
||||||
const newPrinter = new printerModel({
|
const newPrinter = new printerModel({
|
||||||
printerName,
|
name,
|
||||||
moonraker,
|
moonraker,
|
||||||
tags,
|
tags,
|
||||||
firmware,
|
firmware,
|
||||||
online: false,
|
online: false,
|
||||||
state: {
|
state: {
|
||||||
type: "offline"
|
type: "offline",
|
||||||
}
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Save the printer
|
// Save the printer
|
||||||
const savedPrinter = await newPrinter.save();
|
const savedPrinter = await newPrinter.save();
|
||||||
|
|
||||||
logger.info(`Created new printer: ${printerName}`);
|
logger.info(`Created new printer: ${name}`);
|
||||||
res.status(201).send(savedPrinter);
|
res.status(201).send(savedPrinter);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error("Error creating printer:", error);
|
logger.error("Error creating printer:", error);
|
||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const getPrinterStatsRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await printerModel.aggregate([
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: "$state.type",
|
||||||
|
count: { $sum: 1 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Transform the results into a more readable format
|
||||||
|
const formattedStats = stats.reduce((acc, curr) => {
|
||||||
|
acc[curr._id] = curr.count;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
logger.trace("Printer stats by state:", formattedStats);
|
||||||
|
res.send(formattedStats);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching printer stats:", error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|||||||
@ -46,7 +46,7 @@ export const getPrintJobRouteHandler = async (req, res) => {
|
|||||||
.findOne({
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
})
|
})
|
||||||
.populate("printers", "printerName state")
|
.populate("printers", "name state")
|
||||||
.populate("gcodeFile")
|
.populate("gcodeFile")
|
||||||
.populate("subJobs");
|
.populate("subJobs");
|
||||||
|
|
||||||
@ -120,7 +120,7 @@ export const createPrintJobRouteHandler = async (req, res) => {
|
|||||||
subJobs: [], // Initialize empty array for subjob references
|
subJobs: [], // Initialize empty array for subjob references
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
startedAt: new Date(),
|
startedAt: null
|
||||||
});
|
});
|
||||||
|
|
||||||
// Save the print job first to get its ID
|
// Save the print job first to get its ID
|
||||||
@ -156,3 +156,29 @@ export const createPrintJobRouteHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: error.message });
|
res.status(500).send({ error: error.message });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const getPrintJobStatsRouteHandler = async (req, res) => {
|
||||||
|
try {
|
||||||
|
const stats = await printJobModel.aggregate([
|
||||||
|
{
|
||||||
|
$group: {
|
||||||
|
_id: "$state.type",
|
||||||
|
count: { $sum: 1 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Transform the results into a more readable format
|
||||||
|
const formattedStats = stats.reduce((acc, curr) => {
|
||||||
|
acc[curr._id] = curr.count;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
logger.trace("Print job stats by state:", formattedStats);
|
||||||
|
res.send(formattedStats);
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error fetching print job stats:", error);
|
||||||
|
res.status(500).send({ error: error.message });
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
|||||||
@ -1,5 +1,6 @@
|
|||||||
import dotenv from "dotenv";
|
import dotenv from "dotenv";
|
||||||
import { productModel } from "../../schemas/product.schema.js";
|
import { productModel } from "../../schemas/product.schema.js";
|
||||||
|
import { partModel } from "../../schemas/part.schema.js";
|
||||||
import log4js from "log4js";
|
import log4js from "log4js";
|
||||||
import mongoose from "mongoose";
|
import mongoose from "mongoose";
|
||||||
|
|
||||||
@ -29,8 +30,19 @@ export const listProductsRouteHandler = async (
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (property != "") {
|
if (property != "") {
|
||||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
// Match documents where the specified property is either null, undefined, empty string, empty array or empty object
|
||||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
aggregateCommand.push({
|
||||||
|
$match: {
|
||||||
|
$or: [
|
||||||
|
{ [property]: null },
|
||||||
|
{ [property]: "" },
|
||||||
|
{ [property]: [] },
|
||||||
|
{ [property]: {} },
|
||||||
|
{ [property]: { $exists: false } },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
aggregateCommand.push({ $project: { _id: 1, [property]: 1 } });
|
||||||
} else {
|
} else {
|
||||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||||
}
|
}
|
||||||
@ -58,9 +70,12 @@ export const getProductRouteHandler = async (req, res) => {
|
|||||||
// Get ID from params
|
// Get ID from params
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
// Fetch the product with the given remote address
|
// Fetch the product with the given remote address
|
||||||
const product = await productModel.findOne({
|
const product = await productModel
|
||||||
|
.findOne({
|
||||||
_id: id,
|
_id: id,
|
||||||
});
|
})
|
||||||
|
.populate("vendor")
|
||||||
|
.populate("parts");
|
||||||
|
|
||||||
if (!product) {
|
if (!product) {
|
||||||
logger.warn(`Product not found with supplied id.`);
|
logger.warn(`Product not found with supplied id.`);
|
||||||
@ -76,9 +91,10 @@ export const getProductRouteHandler = async (req, res) => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const editProductRouteHandler = async (req, res) => {
|
export const editProductRouteHandler = async (req, res) => {
|
||||||
try {
|
|
||||||
// Get ID from params
|
// Get ID from params
|
||||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||||
|
|
||||||
|
try {
|
||||||
// Fetch the product with the given remote address
|
// Fetch the product with the given remote address
|
||||||
const product = await productModel.findOne({ _id: id });
|
const product = await productModel.findOne({ _id: id });
|
||||||
|
|
||||||
@ -89,10 +105,25 @@ export const editProductRouteHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
logger.trace(`Product with ID: ${id}:`, product);
|
logger.trace(`Product with ID: ${id}:`, product);
|
||||||
|
} catch (fetchError) {
|
||||||
|
logger.error("Error fetching product:", fetchError);
|
||||||
|
res.status(500).send({ error: fetchError.message });
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = {
|
||||||
req.body;
|
updatedAt: new Date(),
|
||||||
|
name: req.body?.name,
|
||||||
|
vendor: req.body?.vendor?.id,
|
||||||
|
tags: req.body?.tags,
|
||||||
|
version: req.body?.version,
|
||||||
|
parts: req.body?.parts,
|
||||||
|
margin: req.body.margin,
|
||||||
|
price: req.body.price,
|
||||||
|
marginOrPrice: req.body.marginOrPrice,
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log("ID:", id);
|
||||||
|
|
||||||
const result = await productModel.updateOne(
|
const result = await productModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
@ -107,27 +138,60 @@ export const editProductRouteHandler = async (req, res) => {
|
|||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
}
|
}
|
||||||
res.send("OK");
|
res.send("OK");
|
||||||
} catch (fetchError) {
|
|
||||||
logger.error("Error fetching product:", fetchError);
|
|
||||||
res.status(500).send({ error: fetchError.message });
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const newProductRouteHandler = async (req, res) => {
|
export const newProductRouteHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
let { ...newProduct } = req.body;
|
const newProduct = {
|
||||||
newProduct = {
|
|
||||||
...newProduct,
|
|
||||||
createdAt: new Date(),
|
createdAt: new Date(),
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
name: req.body.name,
|
||||||
|
vendor: req.body.vendor.id,
|
||||||
|
parts: partIds,
|
||||||
|
margin: req.body.margin,
|
||||||
|
price: req.body.price,
|
||||||
|
marginOrPrice: req.body.marginOrPrice,
|
||||||
};
|
};
|
||||||
|
|
||||||
const result = await productModel.create(newProduct);
|
const newProductResult = await productModel.create(newProduct);
|
||||||
if (result.nCreated === 0) {
|
|
||||||
|
if (newProductResult.nCreated === 0) {
|
||||||
logger.error("No product created.");
|
logger.error("No product created.");
|
||||||
res.status(500).send({ error: "No product created." });
|
res.status(500).send({ error: "No product created." });
|
||||||
}
|
}
|
||||||
res.status(200).send(result);
|
|
||||||
|
const parts = req.body.parts || [];
|
||||||
|
const productId = newProductResult._id;
|
||||||
|
|
||||||
|
var partIds = [];
|
||||||
|
|
||||||
|
for (const part of parts) {
|
||||||
|
const newPart = {
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
name: part.name,
|
||||||
|
product: productId,
|
||||||
|
};
|
||||||
|
|
||||||
|
const newPartResult = await partModel.create(newPart);
|
||||||
|
if (newPartResult.nCreated === 0) {
|
||||||
|
logger.error("No parts created.");
|
||||||
|
res.status(500).send({ error: "No parts created." });
|
||||||
|
}
|
||||||
|
partIds.push(newPartResult._id);
|
||||||
|
}
|
||||||
|
|
||||||
|
const editProductResult = await productModel.updateOne(
|
||||||
|
{ _id: productId },
|
||||||
|
{ $set: { parts: partIds } },
|
||||||
|
);
|
||||||
|
|
||||||
|
if (editProductResult.nModified === 0) {
|
||||||
|
logger.error("No product updated.");
|
||||||
|
res.status(500).send({ error: "No products updated." });
|
||||||
|
}
|
||||||
|
|
||||||
|
res.status(200).send({ ...newProductResult, parts: partIds });
|
||||||
} catch (updateError) {
|
} catch (updateError) {
|
||||||
logger.error("Error updating product:", updateError);
|
logger.error("Error updating product:", updateError);
|
||||||
res.status(500).send({ error: updateError.message });
|
res.status(500).send({ error: updateError.message });
|
||||||
|
|||||||
@ -14,7 +14,7 @@ logger.level = process.env.LOG_LEVEL;
|
|||||||
const formatPrintersResponse = (printers) => {
|
const formatPrintersResponse = (printers) => {
|
||||||
return printers.map((printer) => ({
|
return printers.map((printer) => ({
|
||||||
id: printer.id,
|
id: printer.id,
|
||||||
name: printer.printerName,
|
name: printer.name,
|
||||||
link: `/production/printers/info?printerId=${printer.id}`,
|
link: `/production/printers/info?printerId=${printer.id}`,
|
||||||
printer: printer,
|
printer: printer,
|
||||||
}));
|
}));
|
||||||
|
|||||||
11
src/services/vendors/index.js
vendored
11
src/services/vendors/index.js
vendored
@ -92,8 +92,15 @@ export const editVendorRouteHandler = async (req, res) => {
|
|||||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
const updateData = {
|
||||||
req.body;
|
updatedAt: new Date(),
|
||||||
|
contact: req.body.contact,
|
||||||
|
country: req.body.country,
|
||||||
|
name: req.body.name,
|
||||||
|
website: req.body.website,
|
||||||
|
phone: req.body.phone,
|
||||||
|
email: req.body.email,
|
||||||
|
};
|
||||||
|
|
||||||
const result = await vendorModel.updateOne(
|
const result = await vendorModel.updateOne(
|
||||||
{ _id: id },
|
{ _id: id },
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user