Added missing files
This commit is contained in:
parent
d3f91d302a
commit
29d17bb0f9
127
src/keycloak.js
Normal file
127
src/keycloak.js
Normal file
@ -0,0 +1,127 @@
|
||||
import Keycloak from "keycloak-connect";
|
||||
import session from "express-session";
|
||||
import dotenv from "dotenv";
|
||||
import axios from "axios";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Keycloak");
|
||||
logger.level = process.env.LOG_LEVEL || "info";
|
||||
|
||||
// Initialize Keycloak
|
||||
const keycloakConfig = {
|
||||
realm: process.env.KEYCLOAK_REALM || "farm-control",
|
||||
"auth-server-url": process.env.KEYCLOAK_URL || "http://localhost:8080/auth",
|
||||
"ssl-required": process.env.NODE_ENV === "production" ? "external" : "none",
|
||||
resource: process.env.KEYCLOAK_CLIENT_ID || "farmcontrol-client",
|
||||
"confidential-port": 0,
|
||||
"bearer-only": true,
|
||||
"public-client": false,
|
||||
"use-resource-role-mappings": true,
|
||||
"verify-token-audience": true,
|
||||
credentials: {
|
||||
secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
},
|
||||
};
|
||||
|
||||
const memoryStore = new session.MemoryStore();
|
||||
|
||||
var expressSession = session({
|
||||
secret: process.env.SESSION_SECRET || "n00Dl3s23!",
|
||||
resave: false,
|
||||
saveUninitialized: true, // Set this to true to ensure session is initialized
|
||||
store: memoryStore,
|
||||
cookie: {
|
||||
maxAge: 1800000, // 30 minutes
|
||||
},
|
||||
});
|
||||
|
||||
var keycloak = new Keycloak({ store: memoryStore }, keycloakConfig);
|
||||
|
||||
// Custom middleware to check if the user is authenticated
|
||||
const isAuthenticated = async (req, res, next) => {
|
||||
let token = null;
|
||||
|
||||
// Try to get token from Authorization header
|
||||
const authHeader = req.headers.authorization;
|
||||
if (authHeader && authHeader.startsWith('Bearer ')) {
|
||||
token = authHeader.substring(7);
|
||||
|
||||
try {
|
||||
// Verify token with Keycloak introspection endpoint
|
||||
const response = await axios.post(
|
||||
`${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token/introspect`,
|
||||
new URLSearchParams({
|
||||
token: token,
|
||||
client_id: process.env.KEYCLOAK_CLIENT_ID,
|
||||
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
|
||||
}),
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const introspection = response.data;
|
||||
if (!introspection.active) {
|
||||
logger.info("Token is not active");
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
|
||||
// Parse token to extract user info
|
||||
const decodedToken = jwt.decode(token);
|
||||
req.user = {
|
||||
id: decodedToken.sub,
|
||||
username: decodedToken.preferred_username,
|
||||
email: decodedToken.email,
|
||||
name: decodedToken.name,
|
||||
roles: extractRoles(decodedToken),
|
||||
};
|
||||
|
||||
return next();
|
||||
} catch (error) {
|
||||
logger.error("Token verification error:", error.message);
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to session-based authentication
|
||||
if (req.session && req.session["keycloak-token"]) {
|
||||
const sessionToken = req.session["keycloak-token"];
|
||||
if (sessionToken.expires_at > new Date().getTime()) {
|
||||
return next();
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(401).json({ error: "Not authenticated" });
|
||||
};
|
||||
|
||||
// Helper function to extract roles from token
|
||||
function extractRoles(token) {
|
||||
const roles = [];
|
||||
|
||||
// Extract realm roles
|
||||
if (token.realm_access && token.realm_access.roles) {
|
||||
roles.push(...token.realm_access.roles);
|
||||
}
|
||||
|
||||
// Extract client roles
|
||||
if (token.resource_access) {
|
||||
for (const client in token.resource_access) {
|
||||
if (token.resource_access[client].roles) {
|
||||
roles.push(
|
||||
...token.resource_access[client].roles.map(
|
||||
(role) => `${client}:${role}`
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
export { keycloak, expressSession, isAuthenticated };
|
||||
45
src/routes/filaments/index.js
Normal file
45
src/routes/filaments/index.js
Normal file
@ -0,0 +1,45 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listFilamentsRouteHandler,
|
||||
getFilamentRouteHandler,
|
||||
editFilamentRouteHandler,
|
||||
newFilamentRouteHandler,
|
||||
} from "../../services/filaments/index.js";
|
||||
|
||||
// list of filaments
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listFilamentsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newFilamentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getFilamentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editFilamentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
45
src/routes/materials/index.js
Normal file
45
src/routes/materials/index.js
Normal file
@ -0,0 +1,45 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listMaterialsRouteHandler,
|
||||
getMaterialRouteHandler,
|
||||
editMaterialRouteHandler,
|
||||
newMaterialRouteHandler,
|
||||
} from "../../services/materials/index.js";
|
||||
|
||||
// list of materials
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listMaterialsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newMaterialRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getMaterialRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editMaterialRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
55
src/routes/parts/index.js
Normal file
55
src/routes/parts/index.js
Normal file
@ -0,0 +1,55 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listPartsRouteHandler,
|
||||
getPartRouteHandler,
|
||||
editPartRouteHandler,
|
||||
newPartRouteHandler,
|
||||
uploadPartFileContentRouteHandler,
|
||||
getPartFileContentRouteHandler,
|
||||
} from "../../services/parts/index.js";
|
||||
|
||||
// list of parts
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listPartsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.post("/:id/content", isAuthenticated, (req, res) => {
|
||||
uploadPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id/content", isAuthenticated, (req, res) => {
|
||||
getPartFileContentRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editPartRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
45
src/routes/products/index.js
Normal file
45
src/routes/products/index.js
Normal file
@ -0,0 +1,45 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listProductsRouteHandler,
|
||||
getProductRouteHandler,
|
||||
editProductRouteHandler,
|
||||
newProductRouteHandler,
|
||||
} from "../../services/products/index.js";
|
||||
|
||||
// list of products
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listProductsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editProductRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
11
src/routes/spotlight/index.js
Normal file
11
src/routes/spotlight/index.js
Normal file
@ -0,0 +1,11 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
|
||||
const router = express.Router();
|
||||
import { getSpotlightRouteHandler } from "../../services/spotlight/index.js";
|
||||
|
||||
router.get("/:query", isAuthenticated, (req, res) => {
|
||||
getSpotlightRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
45
src/routes/vendors/index.js
vendored
Normal file
45
src/routes/vendors/index.js
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import express from "express";
|
||||
import { isAuthenticated } from "../../keycloak.js";
|
||||
import { parseStringIfNumber } from "../../util/index.js";
|
||||
|
||||
const router = express.Router();
|
||||
import {
|
||||
listVendorsRouteHandler,
|
||||
getVendorRouteHandler,
|
||||
editVendorRouteHandler,
|
||||
newVendorRouteHandler,
|
||||
} from "../../services/vendors/index.js";
|
||||
|
||||
// list of vendors
|
||||
router.get("/", isAuthenticated, (req, res) => {
|
||||
const { page, limit, property } = req.query;
|
||||
|
||||
const allowedFilters = ["type", "brand", "diameter", "color"];
|
||||
|
||||
const filter = {};
|
||||
|
||||
for (const [key, value] of Object.entries(req.query)) {
|
||||
for (var i = 0; i < allowedFilters.length; i++) {
|
||||
if (key == allowedFilters[i]) {
|
||||
filter[key] = parseStringIfNumber(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
listVendorsRouteHandler(req, res, page, limit, property, filter);
|
||||
});
|
||||
|
||||
router.post("/", isAuthenticated, (req, res) => {
|
||||
newVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
router.get("/:id", isAuthenticated, (req, res) => {
|
||||
getVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
// update printer info
|
||||
router.put("/:id", isAuthenticated, async (req, res) => {
|
||||
editVendorRouteHandler(req, res);
|
||||
});
|
||||
|
||||
export default router;
|
||||
25
src/schemas/filament.schema.js
Normal file
25
src/schemas/filament.schema.js
Normal file
@ -0,0 +1,25 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const filamentSchema = new mongoose.Schema({
|
||||
name: { required: true, type: String },
|
||||
barcode: { required: false, type: String },
|
||||
url: { required: false, type: String },
|
||||
image: { required: false, type: Buffer },
|
||||
color: { required: true, type: String },
|
||||
brand: { required: true, type: String },
|
||||
type: { required: true, type: String },
|
||||
price: { required: true, type: Number },
|
||||
diameter: { required: true, type: Number },
|
||||
density: { required: true, type: Number },
|
||||
createdAt: { required: true, type: Date },
|
||||
updatedAt: { required: true, type: Date },
|
||||
emptySpoolWeight: { required: true, type: Number },
|
||||
});
|
||||
|
||||
filamentSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
filamentSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const filamentModel = mongoose.model("Filament", filamentSchema);
|
||||
17
src/schemas/host.schema.js
Normal file
17
src/schemas/host.schema.js
Normal file
@ -0,0 +1,17 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const hostSchema = new mongoose.Schema({
|
||||
online: { required: true, type: Boolean },
|
||||
hostId: { required: true, type: String },
|
||||
connectedAt: { required: true, type: Date },
|
||||
status: { type: { required: true, type: String } },
|
||||
});
|
||||
|
||||
hostSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
hostSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const hostModel = mongoose.model("Host", hostSchema);
|
||||
15
src/schemas/material.schema.js
Normal file
15
src/schemas/material.schema.js
Normal file
@ -0,0 +1,15 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const materialSchema = new mongoose.Schema({
|
||||
name: { required: true, type: String },
|
||||
url: { required: false, type: String },
|
||||
image: { required: false, type: Buffer },
|
||||
});
|
||||
|
||||
materialSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
materialSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const materialModel = mongoose.model("Material", materialSchema);
|
||||
22
src/schemas/part.schema.js
Normal file
22
src/schemas/part.schema.js
Normal file
@ -0,0 +1,22 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the main part schema
|
||||
const partSchema = new Schema(
|
||||
{
|
||||
name: { type: String, required: true },
|
||||
products: [{ type: mongoose.Schema.Types.ObjectId, ref: "Product" }],
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
partSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
partSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
// Create and export the model
|
||||
export const partModel = mongoose.model("Part", partSchema);
|
||||
51
src/schemas/printsubjob.schema.js
Normal file
51
src/schemas/printsubjob.schema.js
Normal file
@ -0,0 +1,51 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
const printSubJobSchema = new mongoose.Schema({
|
||||
printer: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "Printer",
|
||||
required: true,
|
||||
},
|
||||
printJob: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "PrintJob",
|
||||
required: true,
|
||||
},
|
||||
subJobId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
gcodeFile: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: "GCodeFile",
|
||||
required: true,
|
||||
},
|
||||
state: {
|
||||
type: { required: true, type: String },
|
||||
percent: { required: false, type: Number },
|
||||
},
|
||||
number: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
},
|
||||
updatedAt: {
|
||||
type: Date,
|
||||
default: Date.now,
|
||||
},
|
||||
});
|
||||
|
||||
printSubJobSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
printSubJobSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const printSubJobModel = mongoose.model(
|
||||
"PrintSubJob",
|
||||
printSubJobSchema,
|
||||
);
|
||||
24
src/schemas/product.schema.js
Normal file
24
src/schemas/product.schema.js
Normal file
@ -0,0 +1,24 @@
|
||||
import mongoose from "mongoose";
|
||||
const { Schema } = mongoose;
|
||||
|
||||
// Define the main product schema
|
||||
const productSchema = new Schema(
|
||||
{
|
||||
name: { type: String, required: true },
|
||||
tags: [{ type: String }],
|
||||
version: { type: String },
|
||||
parts: [{ type: mongoose.Schema.Types.ObjectId, ref: "Part" }],
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
// Add virtual id getter
|
||||
productSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
// Configure JSON serialization to include virtuals
|
||||
productSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
// Create and export the model
|
||||
export const productModel = mongoose.model("product", productSchema);
|
||||
19
src/schemas/vendor.schema.js
Normal file
19
src/schemas/vendor.schema.js
Normal file
@ -0,0 +1,19 @@
|
||||
import mongoose from "mongoose";
|
||||
|
||||
const vendorSchema = new mongoose.Schema(
|
||||
{
|
||||
name: { required: true, type: String },
|
||||
website: { required: false, type: String },
|
||||
contact: { required: false, type: String },
|
||||
image: { required: false, type: Buffer },
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
vendorSchema.virtual("id").get(function () {
|
||||
return this._id.toHexString();
|
||||
});
|
||||
|
||||
vendorSchema.set("toJSON", { virtuals: true });
|
||||
|
||||
export const vendorModel = mongoose.model("Vendor", vendorSchema);
|
||||
121
src/services/filaments/index.js
Normal file
121
src/services/filaments/index.js
Normal file
@ -0,0 +1,121 @@
|
||||
import dotenv from "dotenv";
|
||||
import { filamentModel } from "../../schemas/filament.schema.js"
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Filaments");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listFilamentsRouteHandler = async (req, res, page = 1, limit = 25, property = "", filter = {}) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
|
||||
let filament;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) { // use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }) // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" }}); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 }});
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand)
|
||||
|
||||
filament = await filamentModel.aggregate(aggregateCommand)
|
||||
|
||||
logger.trace(`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`, filament);
|
||||
res.send(filament);
|
||||
} catch (error) {
|
||||
logger.error("Error listing filaments:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getFilamentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the filament with the given remote address
|
||||
const filament = await filamentModel.findOne({
|
||||
_id: id
|
||||
});
|
||||
|
||||
if (!filament) {
|
||||
logger.warn(`Filament not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||
res.send(filament);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Filament:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editFilamentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the filament with the given remote address
|
||||
const filament = await filamentModel.findOne({ _id: id });
|
||||
|
||||
if (!filament) { // Error handling
|
||||
logger.warn(`Filament not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Filament with ID: ${id}:`, filament);
|
||||
|
||||
try {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } = req.body;
|
||||
|
||||
const result = await filamentModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData }
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No Filament updated.");
|
||||
res.status(500).send({ error: "No filaments updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating filament:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching filament:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newFilamentRouteHandler = async (req, res) => {
|
||||
|
||||
try {
|
||||
let { ...newFilament } = req.body;
|
||||
newFilament = { ...newFilament, createdAt: new Date(), updatedAt: new Date() }
|
||||
|
||||
const result = await filamentModel.create(newFilament);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No filament created.");
|
||||
res.status(500).send({ error: "No filament created." });
|
||||
}
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating filament:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
136
src/services/materials/index.js
Normal file
136
src/services/materials/index.js
Normal file
@ -0,0 +1,136 @@
|
||||
import dotenv from "dotenv";
|
||||
import { materialModel } from "../../schemas/material.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Materials");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listMaterialsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let material;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
material = await materialModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of materials (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
material,
|
||||
);
|
||||
res.send(material);
|
||||
} catch (error) {
|
||||
logger.error("Error listing materials:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getMaterialRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the material with the given remote address
|
||||
const material = await materialModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!material) {
|
||||
logger.warn(`Material not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Material with ID: ${id}:`, material);
|
||||
res.send(material);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Material:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editMaterialRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the material with the given remote address
|
||||
const material = await materialModel.findOne({ _id: id });
|
||||
|
||||
if (!material) {
|
||||
// Error handling
|
||||
logger.warn(`Material not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Material with ID: ${id}:`, material);
|
||||
|
||||
try {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
const result = await materialModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No Material updated.");
|
||||
res.status(500).send({ error: "No materials updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating material:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching material:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newMaterialRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newMaterial } = req.body;
|
||||
newMaterial = {
|
||||
...newMaterial,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const result = await materialModel.create(newMaterial);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No material created.");
|
||||
res.status(500).send({ error: "No material created." });
|
||||
}
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating material:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
252
src/services/parts/index.js
Normal file
252
src/services/parts/index.js
Normal file
@ -0,0 +1,252 @@
|
||||
import dotenv from "dotenv";
|
||||
import { partModel } from "../../schemas/part.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
import multer from "multer";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Parts");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
// Set storage engine
|
||||
const partsStorage = multer.diskStorage({
|
||||
destination: process.env.PART_STORAGE,
|
||||
filename: async function (req, file, cb) {
|
||||
// Retrieve custom file name from request body
|
||||
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
|
||||
// Create the final filename ensuring it ends with .g
|
||||
const finalFilename = `${customFileName}.stl`;
|
||||
|
||||
// Call callback with the final filename
|
||||
cb(null, finalFilename);
|
||||
},
|
||||
});
|
||||
|
||||
// Initialise upload
|
||||
const partUpload = multer({
|
||||
storage: partsStorage,
|
||||
limits: { fileSize: 500000000 }, // 50MB limit
|
||||
fileFilter: function (req, file, cb) {
|
||||
checkFileType(file, cb);
|
||||
},
|
||||
}).single("partFile"); // The name attribute of the file input in the HTML form
|
||||
|
||||
// Check file type
|
||||
function checkFileType(file, cb) {
|
||||
// Allowed ext
|
||||
const filetypes = /stl|stl|stl/;
|
||||
// Check ext
|
||||
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
|
||||
|
||||
if (extname) {
|
||||
console.log(file);
|
||||
return cb(null, true);
|
||||
} else {
|
||||
cb("Error: .stl files only!");
|
||||
}
|
||||
}
|
||||
|
||||
export const listPartsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let part;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
part = await partModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of parts (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
part,
|
||||
);
|
||||
res.send(part);
|
||||
} catch (error) {
|
||||
logger.error("Error listing parts:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!part) {
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
res.send(part);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Part:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel.findOne({ _id: id });
|
||||
|
||||
if (!part) {
|
||||
// Error handling
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Part with ID: ${id}:`, part);
|
||||
|
||||
try {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
const result = await partModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No Part updated.");
|
||||
res.status(500).send({ error: "No parts updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating part:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching part:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newPartRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newPart } = req.body;
|
||||
newPart = { ...newPart, createdAt: new Date(), updatedAt: new Date() };
|
||||
|
||||
const result = await partModel.create(newPart);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No part created.");
|
||||
res.status(500).send({ error: "No part created." });
|
||||
}
|
||||
res.status(200).send(result);
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating part:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const uploadPartFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given id
|
||||
const part = await partModel.findOne({ _id: id });
|
||||
if (!part) {
|
||||
// Error handling
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
logger.trace(`Part with ID: ${id}`);
|
||||
try {
|
||||
partUpload(req, res, async (err) => {
|
||||
if (err) {
|
||||
res.status(500).send({
|
||||
error: err,
|
||||
});
|
||||
} else {
|
||||
if (req.file == undefined) {
|
||||
res.send({
|
||||
message: "No file selected!",
|
||||
});
|
||||
} else {
|
||||
res.send({
|
||||
status: "OK",
|
||||
file: `${req.file.filename}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating part:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching part:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const getPartFileContentRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the part with the given remote address
|
||||
const part = await partModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!part) {
|
||||
logger.warn(`Part not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Part not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Returning part file contents with ID: ${id}:`);
|
||||
|
||||
const filePath = path.join(process.env.PART_STORAGE, id + ".stl");
|
||||
|
||||
// Read the file
|
||||
fs.readFile(filePath, "utf8", (err, data) => {
|
||||
if (err) {
|
||||
if (err.code === "ENOENT") {
|
||||
// File not found
|
||||
return res.status(404).send({ error: "File not found!" });
|
||||
} else {
|
||||
// Other errors
|
||||
return res.status(500).send({ error: "Error reading file." });
|
||||
}
|
||||
}
|
||||
|
||||
// Send the file contents in the response
|
||||
res.send(data);
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Part:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
135
src/services/products/index.js
Normal file
135
src/services/products/index.js
Normal file
@ -0,0 +1,135 @@
|
||||
import dotenv from "dotenv";
|
||||
import { productModel } from "../../schemas/product.schema.js";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Products");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listProductsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let product;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
product = await productModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of products (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
product,
|
||||
);
|
||||
res.send(product);
|
||||
} catch (error) {
|
||||
logger.error("Error listing products:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getProductRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the product with the given remote address
|
||||
const product = await productModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!product) {
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
res.send(product);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Product:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editProductRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the product with the given remote address
|
||||
const product = await productModel.findOne({ _id: id });
|
||||
|
||||
if (!product) {
|
||||
// Error handling
|
||||
logger.warn(`Product not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Product with ID: ${id}:`, product);
|
||||
|
||||
try {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
const result = await productModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No Product updated.");
|
||||
res.status(500).send({ error: "No products updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating product:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching product:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newProductRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newProduct } = req.body;
|
||||
newProduct = {
|
||||
...newProduct,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
const result = await productModel.create(newProduct);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No product created.");
|
||||
res.status(500).send({ error: "No product created." });
|
||||
}
|
||||
res.status(200).send(result);
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating product:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
113
src/services/spotlight/index.js
Normal file
113
src/services/spotlight/index.js
Normal file
@ -0,0 +1,113 @@
|
||||
import dotenv from "dotenv";
|
||||
import { printJobModel } from "../../schemas/printjob.schema.js";
|
||||
import { printSubJobModel } from "../../schemas/printsubjob.schema.js";
|
||||
import log4js from "log4js";
|
||||
import { printerModel } from "../../schemas/printer.schema.js";
|
||||
import { filamentModel } from "../../schemas/filament.schema.js";
|
||||
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("PrintJobs");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
const formatPrintersResponse = (printers) => {
|
||||
return printers.map((printer) => ({
|
||||
id: printer.id,
|
||||
name: printer.printerName,
|
||||
link: `/production/printers/info?printerId=${printer.id}`,
|
||||
printer: printer,
|
||||
}));
|
||||
};
|
||||
|
||||
const formatJobsResponse = (jobs) => {
|
||||
return jobs.map((job) => ({
|
||||
id: job.id,
|
||||
name: job.gcodeFile.name,
|
||||
link: `/production/printjobs/info?printJobId=${job.id}`,
|
||||
job: job,
|
||||
}));
|
||||
};
|
||||
|
||||
const formatFilamentsResponse = (filaments) => {
|
||||
return filaments.map((filament) => ({
|
||||
id: filament.id,
|
||||
name: filament.name,
|
||||
link: `/management/filaments/info?filamentId=${filament.id}`,
|
||||
filament: filament,
|
||||
}));
|
||||
};
|
||||
|
||||
const formatGCodeFilesResponse = (gcodeFiles) => {
|
||||
return gcodeFiles.map((gcodeFile) => ({
|
||||
id: gcodeFile.id,
|
||||
name: gcodeFile.name,
|
||||
link: `/management/gcodefiles/info?gcodeFileId=${gcodeFile.id}`,
|
||||
gcodeFile: gcodeFile,
|
||||
}));
|
||||
};
|
||||
|
||||
export const getSpotlightRouteHandler = async (req, res) => {
|
||||
try {
|
||||
const query = req.params.query;
|
||||
if (query.length <= 4) {
|
||||
res.status(200).send([]);
|
||||
return;
|
||||
}
|
||||
const prefix = query.substring(0, 3);
|
||||
const delimiter = query.substring(3, 4);
|
||||
const suffix = query.substring(4);
|
||||
|
||||
if (delimiter == ":") {
|
||||
switch (prefix) {
|
||||
case "PRN":
|
||||
const printer = await printerModel.findOne({ id: suffix });
|
||||
if (!printer) {
|
||||
res.status(404).send({ error: "Job not found" });
|
||||
} else {
|
||||
res.status(200).send(formatPrintersResponse([printer]));
|
||||
}
|
||||
break;
|
||||
case "JOB":
|
||||
const job = await printJobModel
|
||||
.findOne({ _id: suffix })
|
||||
.populate("gcodeFile", "name");
|
||||
if (!job) {
|
||||
res.status(404).send({ error: "Job not found" });
|
||||
} else {
|
||||
res.status(200).send(formatJobsResponse([job]));
|
||||
}
|
||||
break;
|
||||
case "FIL":
|
||||
const filament = await filamentModel.findOne({ _id: suffix });
|
||||
if (!filament) {
|
||||
res.status(404).send({ error: "Filament not found" });
|
||||
} else {
|
||||
res.status(200).send(formatFilamentsResponse([filament]));
|
||||
}
|
||||
break;
|
||||
case "GCF":
|
||||
const gcodeFile = await gcodeFileModel.findOne({ _id: suffix });
|
||||
if (!gcodeFile) {
|
||||
res.status(404).send({ error: "Filament not found" });
|
||||
} else {
|
||||
res.status(200).send(formatGCodeFilesResponse([gcodeFile]));
|
||||
}
|
||||
break;
|
||||
case "SBJ":
|
||||
const subJob = await printSubJobModel.findOne({ id: suffix });
|
||||
if (!subJob) {
|
||||
res.status(404).send({ error: "SubJob not found" });
|
||||
} else {
|
||||
res.status(200).send([subJob]);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
res.status(400).send({ error: "Invalid prefix" });
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error("Error listing print jobs:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
132
src/services/vendors/index.js
vendored
Normal file
132
src/services/vendors/index.js
vendored
Normal file
@ -0,0 +1,132 @@
|
||||
import dotenv from "dotenv";
|
||||
import { vendorModel } from "../../schemas/vendor.schema.js";
|
||||
import jwt from "jsonwebtoken";
|
||||
import log4js from "log4js";
|
||||
import mongoose from "mongoose";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const logger = log4js.getLogger("Vendors");
|
||||
logger.level = process.env.LOG_LEVEL;
|
||||
|
||||
export const listVendorsRouteHandler = async (
|
||||
req,
|
||||
res,
|
||||
page = 1,
|
||||
limit = 25,
|
||||
property = "",
|
||||
filter = {},
|
||||
) => {
|
||||
try {
|
||||
// Calculate the skip value based on the page number and limit
|
||||
const skip = (page - 1) * limit;
|
||||
|
||||
let vendor;
|
||||
let aggregateCommand = [];
|
||||
|
||||
if (filter != {}) {
|
||||
// use filtering if present
|
||||
aggregateCommand.push({ $match: filter });
|
||||
}
|
||||
|
||||
if (property != "") {
|
||||
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
|
||||
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
|
||||
} else {
|
||||
aggregateCommand.push({ $project: { image: 0, url: 0 } });
|
||||
}
|
||||
|
||||
aggregateCommand.push({ $skip: skip });
|
||||
aggregateCommand.push({ $limit: Number(limit) });
|
||||
|
||||
console.log(aggregateCommand);
|
||||
|
||||
vendor = await vendorModel.aggregate(aggregateCommand);
|
||||
|
||||
logger.trace(
|
||||
`List of vendors (Page ${page}, Limit ${limit}, Property ${property}):`,
|
||||
vendor,
|
||||
);
|
||||
res.send(vendor);
|
||||
} catch (error) {
|
||||
logger.error("Error listing vendors:", error);
|
||||
res.status(500).send({ error: error });
|
||||
}
|
||||
};
|
||||
|
||||
export const getVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the vendor with the given remote address
|
||||
const vendor = await vendorModel.findOne({
|
||||
_id: id,
|
||||
});
|
||||
|
||||
if (!vendor) {
|
||||
logger.warn(`Vendor not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||
res.send(vendor);
|
||||
} catch (error) {
|
||||
logger.error("Error fetching Vendor:", error);
|
||||
res.status(500).send({ error: error.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const editVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
// Get ID from params
|
||||
const id = new mongoose.Types.ObjectId(req.params.id);
|
||||
// Fetch the vendor with the given remote address
|
||||
const vendor = await vendorModel.findOne({ _id: id });
|
||||
|
||||
if (!vendor) {
|
||||
// Error handling
|
||||
logger.warn(`Vendor not found with supplied id.`);
|
||||
return res.status(404).send({ error: "Print job not found." });
|
||||
}
|
||||
|
||||
logger.trace(`Vendor with ID: ${id}:`, vendor);
|
||||
|
||||
try {
|
||||
const { createdAt, updatedAt, started_at, status, ...updateData } =
|
||||
req.body;
|
||||
|
||||
const result = await vendorModel.updateOne(
|
||||
{ _id: id },
|
||||
{ $set: updateData },
|
||||
);
|
||||
if (result.nModified === 0) {
|
||||
logger.error("No Vendor updated.");
|
||||
res.status(500).send({ error: "No vendors updated." });
|
||||
}
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating vendor:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
res.send("OK");
|
||||
} catch (fetchError) {
|
||||
logger.error("Error fetching vendor:", fetchError);
|
||||
res.status(500).send({ error: fetchError.message });
|
||||
}
|
||||
};
|
||||
|
||||
export const newVendorRouteHandler = async (req, res) => {
|
||||
try {
|
||||
let { ...newVendor } = req.body;
|
||||
newVendor = { ...newVendor, createdAt: new Date(), updatedAt: new Date() };
|
||||
|
||||
const result = await vendorModel.create(newVendor);
|
||||
if (result.nCreated === 0) {
|
||||
logger.error("No vendor created.");
|
||||
res.status(500).send({ error: "No vendor created." });
|
||||
}
|
||||
res.status(200).send({ status: "ok" });
|
||||
} catch (updateError) {
|
||||
logger.error("Error updating vendor:", updateError);
|
||||
res.status(500).send({ error: updateError.message });
|
||||
}
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user