Added more functionality

This commit is contained in:
Tom Butcher 2025-05-09 22:18:00 +01:00
parent 204964a44c
commit af15fc0dbe
25 changed files with 3749 additions and 908 deletions

2516
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -7,17 +7,22 @@
"@simplewebauthn/server": "^10.0.0",
"@tremor/react": "^3.17.2",
"antd": "*",
"axios": "^1.8.4",
"bcrypt": "*",
"body-parser": "*",
"cors": "^2.8.5",
"dotenv": "*",
"express": "*",
"express": "^4.19.2",
"express-session": "^1.18.0",
"i": "^0.3.7",
"jsonwebtoken": "*",
"keycloak-connect": "^26.1.1",
"log4js": "^6.9.1",
"mongodb": "*",
"mongoose": "*",
"mongoose-sequence": "^6.0.1",
"mongoose-unique-array": "^0.4.2",
"multer": "^1.4.5-lts.1",
"mysql": "^2.18.1",
"mysql2": "^2.3.3",
"node-cron": "^3.0.2",
@ -43,7 +48,7 @@
"standard": "^17.1.0"
},
"scripts": {
"start:dev": "nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js",
"dev": "nodemon --exec babel-node --experimental-specifier-resolution=node src/index.js",
"test": "echo \"Error: no test specified\" && exit 1",
"seed": "node src/mongo/seedData.js",
"clear": "node src/mongo/clearDbs.js"

View File

@ -1,60 +1,81 @@
import express from "express";
import bodyParser from "body-parser";
import cors from "cors";
import dotenv from "dotenv";
import "./passport.js";
import { dbConnect } from "./mongo/index.js";
import { apiRoutes, authRoutes, printerRoutes, printJobRoutes, gcodeFileRoutes, fillamentRoutes } from "./routes/index.js";
import path from "path";
import * as fs from "fs";
import cron from "node-cron";
import ReseedAction from "./mongo/ReseedAction.js";
import log4js from "log4js";
dotenv.config();
const PORT = process.env.PORT || 8080;
const app = express();
const logger = log4js.getLogger("App");
logger.level = process.env.LOG_LEVEL;
app.use(log4js.connectLogger(logger, { level: "trace" }));
const whitelist = [process.env.APP_URL_CLIENT];
const corsOptions = {
origin: function (origin, callback) {
if (!origin || whitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
callback(new Error("Not allowed by CORS"));
}
},
credentials: true,
};
dbConnect();
app.use(cors(corsOptions));
app.use(bodyParser.json({ type: "application/json", strict: false, limit: '50mb' }));
app.use(express.json());
app.get("/", function (req, res) {
const __dirname = fs.realpathSync(".");
res.sendFile(path.join(__dirname, "/src/landing/index.html"));
});
app.use("/auth", authRoutes);
app.use("/overview", apiRoutes);
app.use("/printers", printerRoutes);
app.use("/printjobs", printJobRoutes);
app.use("/gcodefiles", gcodeFileRoutes);
app.use("/fillaments", fillamentRoutes);
if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
ReseedAction();
});
}
app.listen(PORT, () => logger.info(`Server listening to port ${PORT}`));
import express from "express";
import bodyParser from "body-parser";
import cors from "cors";
import dotenv from "dotenv";
import { expressSession, keycloak } from "./keycloak.js";
import { dbConnect } from "./mongo/index.js";
import {
apiRoutes,
authRoutes,
printerRoutes,
printJobRoutes,
gcodeFileRoutes,
filamentRoutes,
spotlightRoutes,
partRoutes,
productRoutes,
vendorRoutes,
materialRoutes,
} from "./routes/index.js";
import path from "path";
import * as fs from "fs";
import cron from "node-cron";
import ReseedAction from "./mongo/ReseedAction.js";
import log4js from "log4js";
dotenv.config();
const PORT = process.env.PORT || 8080;
const app = express();
const logger = log4js.getLogger("App");
logger.level = process.env.LOG_LEVEL;
app.use(log4js.connectLogger(logger, { level: "trace" }));
const whitelist = [process.env.APP_URL_CLIENT];
const corsOptions = {
origin: function (origin, callback) {
if (!origin || whitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
callback(new Error("Not allowed by CORS"));
}
},
credentials: true,
};
dbConnect();
app.use(cors(corsOptions));
app.use(
bodyParser.json({ type: "application/json", strict: false, limit: "50mb" }),
);
app.use(express.json());
app.use(expressSession);
app.use(keycloak.middleware());
app.get("/", function (req, res) {
const __dirname = fs.realpathSync(".");
res.sendFile(path.join(__dirname, "/src/landing/index.html"));
});
app.use("/auth", authRoutes);
app.use("/overview", apiRoutes);
app.use("/spotlight", spotlightRoutes);
app.use("/printers", printerRoutes);
app.use("/printjobs", printJobRoutes);
app.use("/gcodefiles", gcodeFileRoutes);
app.use("/filaments", filamentRoutes);
app.use("/parts", partRoutes);
app.use("/products", productRoutes);
app.use("/vendors", vendorRoutes);
app.use("/materials", materialRoutes);
if (process.env.SCHEDULE_HOUR) {
cron.schedule(`0 */${process.env.SCHEDULE_HOUR} * * *'`, () => {
ReseedAction();
});
}
app.listen(PORT, () => logger.info(`Server listening to port ${PORT}`));

View File

@ -20,7 +20,7 @@ const ReseedAction = () => {
name: "Admin",
email: "admin@jsonapi.com",
password: hashPassword,
created_at: new Date(),
createdAt: new Date(),
profile_image: "../../images/admin.jpg",
};

View File

@ -14,7 +14,7 @@ async function seedDB() {
name: "Admin",
email: "admin@jsonapi.com",
password: hashPassword,
created_at: new Date(),
createdAt: new Date(),
profile_image: "../../images/admin.jpg",
};
@ -26,8 +26,8 @@ async function seedDB() {
status : {
type: "Queued"
},
created_at: new Date(),
updated_at: new Date(),
createdAt: new Date(),
updatedAt: new Date(),
started_at: new Date(),
};

View File

@ -4,6 +4,8 @@ import dotenv from "dotenv";
import passport from "passport";
import { userModel } from "./schemas/user.schema.js";
import { hostModel } from "./schemas/host.schema.js";
const JWTStrategy = passportJWT.Strategy;
dotenv.config();
@ -14,14 +16,25 @@ passport.use(
secretOrKey: process.env.JWT_SECRET,
},
function (jwtPayload, done) {
return userModel
.findOne({ _id: jwtPayload.id })
.then((user) => {
return done(null, user);
})
.catch((err) => {
return done(err);
});
if (jwtPayload.hostId) {
return hostModel
.findOne({ hostId: jwtPayload.hostId })
.then((host) => {
return done(null, host);
})
.catch((err) => {
return done(err);
});
} else {
return userModel
.findOne({ _id: jwtPayload.id })
.then((user) => {
return done(null, user);
})
.catch((err) => {
return done(err);
});
}
}
)
);

View File

@ -1,22 +1,26 @@
import express from "express";
import passport from "passport";
import jwt from 'jsonwebtoken';
import { keycloak, isAuthenticated } from "../../keycloak.js";
const router = express.Router();
import { getProfileRouteHandler, patchProfileRouteHandler, getDashboardRouteHandler } from "../../services/api/index.js";
import {
getProfileRouteHandler,
patchProfileRouteHandler,
getDashboardRouteHandler,
} from "../../services/api/index.js";
// get main dashboard info profile
router.get("/", passport.authenticate('jwt',{session: false}), (req, res) => {
router.get("/", keycloak.protect(), (req, res) => {
getDashboardRouteHandler(req, res);
});
// get user's profile
router.get("/user", passport.authenticate('jwt',{session: false}), (req, res) => {
router.get("/user", isAuthenticated, (req, res) => {
getProfileRouteHandler(req, res);
});
// update user's profile
router.patch("/", passport.authenticate('jwt',{session: false}), async (req, res) => {
router.patch("/", isAuthenticated, async (req, res) => {
patchProfileRouteHandler(req, res);
});

View File

@ -1,50 +1,34 @@
import express from "express";
import passport from "passport";
import { isAuthenticated, keycloak } from "../../keycloak.js";
import {
getAuthModesHandler,
forgotPasswordRouteHandler,
loginRouteHandler,
registerPasskeyRouteHandler,
loginPasskeyRouteHandler,
registerRouteHandler,
resetPasswordRouteHandler,
validateTokenRouteHandler,
loginCallbackRouteHandler,
userRouteHandler,
logoutRouteHandler,
refreshTokenRouteHandler,
} from "../../services/auth/index.js";
const router = express.Router();
router.post("/modes", async (req, res, next) => {
const { email } = req.body;
await getAuthModesHandler(req, res, email);
router.get("/login", async (req, res) => {
loginRouteHandler(req, res);
});
router.post("/login", async (req, res, next) => {
const { email, password } = req.body;
await loginRouteHandler(req, res, email, password);
router.get("/callback", async (req, res) => {
loginCallbackRouteHandler(req, res);
});
router.post("/validate-token", async (req, res, next) => {
const { token } = req.body;
await validateTokenRouteHandler(req, res, token);
router.get("/refresh", async (req, res) => {
refreshTokenRouteHandler(req, res);
});
router.post("/logout", (req, res) => {
return res.sendStatus(204);
router.get("/user", isAuthenticated, async (req, res) => {
userRouteHandler(req, res);
});
router.post("/register", async (req, res) => {
const { name, email, password } = req.body;
await registerRouteHandler(req, res, name, email, password);
});
router.post("/passkey/register", passport.authenticate('jwt',{session: false}), async (req, res) => {
await registerPasskeyRouteHandler(req, res);
});
router.post("/passkey/login", async (req, res) => {
const { email, attestationResponse } = req.body;
await loginPasskeyRouteHandler(req, res, email, attestationResponse);
router.get("/logout", (req, res) => {
logoutRouteHandler(req, res);
});
router.post("/password-forgot", async (req, res) => {
@ -52,8 +36,4 @@ router.post("/password-forgot", async (req, res) => {
await forgotPasswordRouteHandler(req, res, email);
});
router.post("/password-reset", async (req, res) => {
await resetPasswordRouteHandler(req, res);
});
export default router;

View File

@ -1,47 +0,0 @@
import express from "express";
import passport from "passport";
import jwt from 'jsonwebtoken';
import { parseStringIfNumber } from '../../util/index.js'
const router = express.Router();
import { listFillamentsRouteHandler, getFillamentRouteHandler, editFillamentRouteHandler, newFillamentRouteHandler } from "../../services/fillaments/index.js";
// list of fillaments
router.get("/", passport.authenticate('jwt',{session: false}), (req, res) => {
const { page, limit, property } = req.query;
const allowedFilters = [
'type',
'brand',
'diameter',
'color'
]
const filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value);
}
}
}
listFillamentsRouteHandler(req, res, page, limit, property, filter);
});
router.post("/", passport.authenticate('jwt',{session: false}), (req, res) => {
newFillamentRouteHandler(req, res);
});
router.get("/:id", passport.authenticate('jwt',{session: false}), (req, res) => {
getFillamentRouteHandler(req, res);
});
// update printer info
router.put("/:id", passport.authenticate('jwt',{session: false}), async (req, res) => {
editFillamentRouteHandler(req, res);
});
export default router;

View File

@ -1,23 +1,66 @@
import express from "express";
import passport from "passport";
import jwt from 'jsonwebtoken';
import { isAuthenticated } from "../../keycloak.js";
import { parseStringIfNumber } from "../../util/index.js";
const router = express.Router();
import { listGCodeFilesRouteHandler, getGCodeFileRouteHandler, editGCodeFileRouteHandler } from "../../services/gcodefiles/index.js";
import {
listGCodeFilesRouteHandler,
getGCodeFileRouteHandler,
editGCodeFileRouteHandler,
newGCodeFileRouteHandler,
parseGCodeFileHandler,
uploadGCodeFileContentRouteHandler,
getGCodeFileContentRouteHandler,
} from "../../services/gcodefiles/index.js";
// list of printers
router.get("/", passport.authenticate('jwt',{session: false}), (req, res) => {
const { page, limit } = req.body;
listGCodeFilesRouteHandler(req, res, page, limit);
router.get("/", isAuthenticated, (req, res) => {
const { page, limit, property, search } = req.query;
const allowedFilters = [
"filament.type",
"filament.brand",
"filament.diameter",
"filament.color",
];
const filter = {};
for (const [key, value] of Object.entries(req.query)) {
for (var i = 0; i < allowedFilters.length; i++) {
if (key == allowedFilters[i]) {
filter[key] = parseStringIfNumber(value);
}
}
}
listGCodeFilesRouteHandler(req, res, page, limit, property, filter, search);
});
router.get("/:id", passport.authenticate('jwt',{session: false}), (req, res) => {
// new pritner
router.post("/", isAuthenticated, (req, res) => {
newGCodeFileRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
getGCodeFileRouteHandler(req, res);
});
// update printer info
router.put("/:id", passport.authenticate('jwt',{session: false}), async (req, res) => {
router.put("/:id", isAuthenticated, async (req, res) => {
editGCodeFileRouteHandler(req, res);
});
router.post("/:id/content", isAuthenticated, (req, res) => {
uploadGCodeFileContentRouteHandler(req, res);
});
router.post("/content", isAuthenticated, (req, res) => {
parseGCodeFileHandler(req, res);
});
router.get("/:id/content", isAuthenticated, (req, res) => {
getGCodeFileContentRouteHandler(req, res);
});
export default router;

View File

@ -1,9 +1,27 @@
import userRoutes from './users/index.js';
import apiRoutes from './api/index.js';
import authRoutes from './auth/index.js';
import printerRoutes from './printers/index.js';
import printJobRoutes from './printjobs/index.js';
import gcodeFileRoutes from './gcodefiles/index.js'
import fillamentRoutes from './fillaments/index.js'
export { userRoutes, apiRoutes, authRoutes, printerRoutes, printJobRoutes, gcodeFileRoutes, fillamentRoutes };
import userRoutes from "./users/index.js";
import apiRoutes from "./api/index.js";
import authRoutes from "./auth/index.js";
import printerRoutes from "./printers/index.js";
import printJobRoutes from "./printjobs/index.js";
import gcodeFileRoutes from "./gcodefiles/index.js";
import filamentRoutes from "./filaments/index.js";
import spotlightRoutes from "./spotlight/index.js";
import partRoutes from "./parts/index.js";
import productRoutes from "./products/index.js";
import vendorRoutes from "./vendors/index.js";
import materialRoutes from "./materials/index.js";
export {
userRoutes,
apiRoutes,
authRoutes,
printerRoutes,
printJobRoutes,
gcodeFileRoutes,
filamentRoutes,
spotlightRoutes,
partRoutes,
productRoutes,
vendorRoutes,
materialRoutes,
};

View File

@ -1,25 +1,33 @@
import express from "express";
import passport from "passport";
import jwt from 'jsonwebtoken';
import { keycloak, isAuthenticated } from "../../keycloak.js";
const router = express.Router();
import { listPrintersRouteHandler, editPrinterRouteHandler, getPrinterRouteHandler } from "../../services/printers/index.js";
import {
listPrintersRouteHandler,
editPrinterRouteHandler,
getPrinterRouteHandler,
createPrinterRouteHandler,
} from "../../services/printers/index.js";
// list of printers
router.get("/", passport.authenticate('jwt',{session: false}), (req, res) => {
router.get("/", isAuthenticated, (req, res) => {
const { page, limit } = req.body;
listPrintersRouteHandler(req, res, page, limit);
});
router.get("/:remoteAddress", passport.authenticate('jwt',{session: false}), (req, res) => {
// create new printer
router.post("/", isAuthenticated, (req, res) => {
createPrinterRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
getPrinterRouteHandler(req, res);
});
// update printer info
router.put("/:remoteAddress", passport.authenticate('jwt',{session: false}), async (req, res) => {
router.put("/:id", isAuthenticated, async (req, res) => {
editPrinterRouteHandler(req, res);
});
export default router;

View File

@ -1,22 +1,31 @@
import express from "express";
import passport from "passport";
import jwt from 'jsonwebtoken';
import { isAuthenticated } from "../../keycloak.js";
const router = express.Router();
import { listPrintJobsRouteHandler, getPrintJobRouteHandler, editPrintJobRouteHandler } from "../../services/printjobs/index.js";
import {
listPrintJobsRouteHandler,
getPrintJobRouteHandler,
editPrintJobRouteHandler,
createPrintJobRouteHandler,
} from "../../services/printjobs/index.js";
// list of printers
router.get("/", passport.authenticate('jwt',{session: false}), (req, res) => {
// list of print jobs
router.get("/", isAuthenticated, (req, res) => {
const { page, limit } = req.body;
listPrintJobsRouteHandler(req, res, page, limit);
});
router.get("/:jobNumber", passport.authenticate('jwt',{session: false}), (req, res) => {
// create new print job
router.post("/", isAuthenticated, (req, res) => {
createPrintJobRouteHandler(req, res);
});
router.get("/:id", isAuthenticated, (req, res) => {
getPrintJobRouteHandler(req, res);
});
// update printer info
router.put("/:jobNumber", passport.authenticate('jwt',{session: false}), async (req, res) => {
// update job info
router.put("/:id", isAuthenticated, async (req, res) => {
editPrintJobRouteHandler(req, res);
});

View File

@ -1,23 +0,0 @@
import mongoose from "mongoose";
const fillamentSchema = new mongoose.Schema({
name: { required: true, type: String },
barcode: { required: false, type: String },
url: { required: false, type: String },
image: { required: false, type: Buffer },
color: { required: true, type: String },
brand: { required: true, type: String },
type: { required: true, type: String },
price: { required: true, type: Number },
diameter: { required: true, type: Number },
created_at: { required: true, type: Date },
updated_at: { required: true, type: Date },
});
fillamentSchema.virtual("id").get(function () {
return this._id.toHexString();
});
fillamentSchema.set("toJSON", { virtuals: true });
export const fillamentModel = mongoose.model("Fillament", fillamentSchema);

View File

@ -3,16 +3,18 @@ const { Schema } = mongoose;
const gcodeFileSchema = new mongoose.Schema({
name: { required: true, type: String },
gcodeFileName: { required: true, type: String },
gcodeFileName: { required: false, type: String },
gcodeFileInfo: { required: true, type: Object },
size: { type: Number, required: false },
lines: { type: Number, required: false },
fillament: { type: Schema.Types.ObjectId, ref: 'Fillament', required: true },
image: { type: Buffer, required: false },
printTimeMins: { type: Number, required: false },
created_at: { type: Date },
updated_at: { type: Date },
filament: { type: Schema.Types.ObjectId, ref: "Filament", required: true },
parts: [{ type: Schema.Types.ObjectId, ref: "Part", required: true }],
price: { type: Number, required: false },
createdAt: { type: Date },
updatedAt: { type: Date },
});
gcodeFileSchema.index({ name: "text", brand: "text" });
gcodeFileSchema.virtual("id").get(function () {
return this._id.toHexString();
});

View File

@ -3,7 +3,7 @@ import mongoose from "mongoose";
const passwordResetSchema = new mongoose.Schema({
email: { required: true, type: String },
token: { required: true, type: String },
created_at: { type: Date },
createdAt: { type: Date },
});
passwordResetSchema.virtual("id").get(function () {

View File

@ -1,22 +1,49 @@
import mongoose from "mongoose";
const { Schema } = mongoose;
const printerSchema = new mongoose.Schema({
friendlyName: { required: true, type: String },
online: { required: true, type: Boolean },
status: {
type: { required: true, type: String },
percent: { required: false, type: Number },
},
remoteAddress: { required: true, type: String },
hostId: { required: true, type: String },
connectedAt: { required: true, type: Date },
loadedFillament: { required: true, type: Object }
});
// Define the moonraker connection schema
const moonrakerSchema = new Schema(
{
host: { type: String, required: true },
port: { type: Number, required: true },
protocol: { type: String, required: true },
apiKey: { type: String, default: null, required: false },
},
{ _id: false },
);
// Define the main printer schema
const printerSchema = new Schema(
{
printerName: { type: String, required: true },
online: { type: Boolean, required: true, default: false },
state: {
type: { type: String, required: true, default: "Offline" },
percent: { type: Number, required: false },
},
connectedAt: { type: Date, default: null },
loadedFilament: {
type: Schema.Types.ObjectId,
ref: "Filament",
default: null,
},
moonraker: { type: moonrakerSchema, required: true },
tags: [{ type: String }],
firmware: { type: String },
currentJob: { type: Schema.Types.ObjectId, ref: "PrintJob" },
currentSubJob: { type: Schema.Types.ObjectId, ref: "PrintSubJob" },
subJobs: [{ type: Schema.Types.ObjectId, ref: "PrintSubJob" }],
},
{ timestamps: true },
);
// Add virtual id getter
printerSchema.virtual("id").get(function () {
return this._id.toHexString();
});
// Configure JSON serialization to include virtuals
printerSchema.set("toJSON", { virtuals: true });
// Create and export the model
export const printerModel = mongoose.model("Printer", printerSchema);

View File

@ -2,14 +2,27 @@ import mongoose from "mongoose";
const { Schema } = mongoose;
const printJobSchema = new mongoose.Schema({
status: {
state: {
type: { required: true, type: String },
printer: { type: Schema.Types.ObjectId, ref: 'Printer', required: false },
},
created_at: { required: true, type: Date },
updated_at: { required: true, type: Date },
started_at: { required: true, type: Date },
gcode_file: { type: Schema.Types.ObjectId, ref: 'GCodeFile', required: false }
},
printers: [{ type: Schema.Types.ObjectId, ref: "Printer", required: false }],
createdAt: { required: true, type: Date },
updatedAt: { required: true, type: Date },
startedAt: { required: true, type: Date },
gcodeFile: {
type: Schema.Types.ObjectId,
ref: "GCodeFile",
required: false,
},
quantity: {
type: Number,
required: true,
default: 1,
min: 1,
},
subJobs: [
{ type: Schema.Types.ObjectId, ref: "PrintSubJob", required: false },
],
});
printJobSchema.virtual("id").get(function () {

View File

@ -4,19 +4,21 @@ import mongoose from "mongoose";
const userSchema = new mongoose.Schema({
name: { required: true, type: String },
email: { required: true, type: String },
email_verified_at: { type: Date },
emailVerifiedAt: { type: Date },
password: { required: true, type: String },
webAuthnCredentials: [{
id: String,
publicKey: Buffer,
counter: Number,
deviceType: String,
backedUp: Boolean,
transports: [String]
}],
profile_image: { type: String },
created_at: { type: Date },
updated_at: { type: Date },
webAuthnCredentials: [
{
id: String,
publicKey: Buffer,
counter: Number,
deviceType: String,
backedUp: Boolean,
transports: [String],
},
],
profileImage: { type: String },
createdAt: { type: Date },
updatedAt: { type: Date },
});
userSchema.virtual("id").get(function () {

View File

@ -1,376 +1,297 @@
import dotenv from "dotenv";
import nodemailer from "nodemailer";
import randomToken from "random-token";
import bcrypt from "bcrypt";
import url from "url";
import { userModel } from "../../schemas/user.schema.js";
import { passwordResetModel } from "../../schemas/passwordResets.schema.js";
import {
generateRegistrationOptions,
verifyRegistrationResponse,
generateAuthenticationOptions,
verifyAuthenticationResponse,
} from "@simplewebauthn/server";
import { isoUint8Array } from "@simplewebauthn/server/helpers";
import jwt from "jsonwebtoken";
import { keycloak } from "../../keycloak.js";
import log4js from "log4js";
import axios from "axios";
dotenv.config();
const logger = log4js.getLogger("Auth");
logger.level = process.env.LOG_LEVEL;
dotenv.config();
// Login handler
export const loginRouteHandler = (req, res) => {
// Get the redirect URL from form data or default to production overview
const redirectUrl = req.query.redirect_uri || "/production/overview";
let challenges = {};
// Store the original URL to redirect after login
const authUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/auth`;
const callbackUrl = encodeURIComponent(
`${process.env.APP_URL_API}/auth/callback`,
);
const state = encodeURIComponent(redirectUrl);
const rpName = "Farm Control";
const rpID = url.parse(process.env.APP_URL_CLIENT).host;
const origin = `https://${rpID}`;
logger.warn(req.query.redirect_uri);
const transporter = nodemailer.createTransport({
host: "smtp.mailtrap.io",
port: 2525,
auth: {
user: process.env.MAILTRAP_USER,
pass: process.env.MAILTRAP_PASSWORD,
},
});
function generateToken() {
}
export const getAuthModesHandler = async (req, res, email) => {
let foundUser = await userModel.findOne({ email: email });
if (foundUser == null) {
return res.status(400).json({
error: "Invalid email address.",
});
}
if (foundUser.webAuthnCredentials.length > 0) {
return res.status(200).json({
authModes: ["password", "passkey"],
});
} else {
return res.status(200).json({
authModes: ["password"],
});
}
res.redirect(
`${authUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${callbackUrl}&response_type=code&scope=openid&state=${state}`,
);
};
export const loginRouteHandler = async (req, res, email, password) => {
//Check If User Exists
let foundUser = await userModel.findOne({ email: email });
if (foundUser == null) {
return res.status(400).json({
error: "Invalid credentials.",
// Login callback handler
export const loginCallbackRouteHandler = (req, res) => {
// Don't use keycloak.protect() here as it expects an already authenticated session
// Extract the code and state from the query parameters
const code = req.query.code;
const state = req.query.state || "/production/overview";
if (!code) {
return res.status(400).send("Authorization code missing");
}
// Exchange the code for tokens manually
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
const redirectUri = `${process.env.APP_URL_API || "http://localhost:8080"}/auth/callback`;
// Make a POST request to exchange the code for tokens
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: "authorization_code",
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
code: code,
redirect_uri: redirectUri,
}).toString(),
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
},
)
.then((response) => {
// Store tokens in session
req.session["keycloak-token"] = {
access_token: response.data.access_token,
refresh_token: response.data.refresh_token,
id_token: response.data.id_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
};
// Save session and redirect to the original URL
req.session.save(() => {
res.redirect(
(process.env.APP_URL_CLIENT || "http://localhost:3000") + state,
);
});
})
.catch((error) => {
console.error(
"Token exchange error:",
error.response?.data || error.message,
);
res.status(500).send("Authentication failed");
});
} else {
const validPassword = await bcrypt.compare(password, foundUser.password);
if (validPassword) {
// Generate JWT token
const token = jwt.sign(
{ id: foundUser.id, email: foundUser.email },
process.env.JWT_SECRET,
};
export const userRouteHandler = (req, res) => {
if (req.session && req.session["keycloak-token"]) {
const token = req.session["keycloak-token"];
const userInfoUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/userinfo`;
// User is authenticated
// Extract user info from the token
//
logger.info("Fetching user from keycloak...");
axios
.post(
userInfoUrl,
new URLSearchParams({
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
}),
{
expiresIn: "24h",
}
);
return res.json({
user: {
id: foundUser.id,
name: foundUser.name,
email: foundUser.email,
headers: {
Authorization: `Bearer ${token.access_token}`,
},
},
access_token: token,
)
.then((response) => {
const userInfo = {
// Extract user details from token
// This depends on your token structure
access_token: token.access_token,
expires_at: token.expires_at,
roles: token.realm_access?.roles || [],
username: response.data.preferred_username,
email: response.data.email,
name: response.data.name,
firstName: response.data.given_name,
lastName: response.data.family_name,
};
res.json(userInfo);
})
.catch((error) => {
logger.error(
"Token exchange error:",
error.response?.data || error.message,
);
res.status(500).send("Authentication failed");
});
} else {
return res.status(400).json({
error: "Invalid credentials.",
});
}
}
};
export const validateTokenRouteHandler = async (req, res, token) => {
try {
jwt.verify(token, process.env.JWT_SECRET);
res.status(200).send({
status: "OK",
});
} catch (err) {
console.error("Token verification error:", err);
res.status(401).send("Invalid token");
}
};
export const registerPasskeyRouteHandler = async (req, res) => {
// check to see if the request has provided a user
const user = req.user;
if (!user) {
// if no user exists
return res.status(400).json({ error: "User not specified." });
}
if (req.body.token) {
const options = await generateRegistrationOptions({
rpName: rpName,
rpID: rpID,
userName: user.email,
userDisplayName: user.name,
excludeCredentials: user.webAuthnCredentials.map(
(webAuthnCredential) => ({
id: webAuthnCredential.id,
transports: webAuthnCredential.transports,
})
),
attestationType: "none",
authenticatorSelection: {
residentKey: "preferred",
userVerification: "preferred",
authenticatorAttachment: "platform",
},
});
challenges[user.id] = options.challenge;
return res.status(200).send(options);
}
const expectedChallenge = challenges[user.id];
const attestationResponse = req.body;
let verification;
try {
verification = await verifyRegistrationResponse({
response: attestationResponse,
expectedChallenge,
expectedOrigin: process.env.APP_URL_CLIENT,
expectedRPID: url.parse(process.env.APP_URL_CLIENT).host,
});
const { registrationInfo } = verification;
const {
credentialID,
credentialPublicKey,
counter,
credentialDeviceType,
credentialBackedUp,
} = registrationInfo;
const webAuthnCredential = {
id: credentialID,
publicKey: Buffer.from(new Uint8Array(credentialPublicKey)),
counter,
deviceType: credentialDeviceType,
backedUp: credentialBackedUp,
transports: attestationResponse.response.transports,
};
console.log(webAuthnCredential);
user.webAuthnCredentials.push(webAuthnCredential);
await user.save();
res.status(200).send({ status: "OK" });
} catch (error) {
console.log(error);
return res.status(400).json({ error: error.message });
}
if (verification.verified) {
} else {
res.status(400).send({ error: "Not verified." });
// User is not authenticated
res.status(401).json({ error: "Not authenticated" });
}
};
export const loginPasskeyRouteHandler = async (
req,
res,
email,
attestationResponse
) => {
if (!email) {
return;
}
let user = await userModel.findOne({ email: email });
if (user == null) {
return res.status(400).json({
error: "Invalid email address.",
});
}
if (attestationResponse) {
logger.info("Verfifying challenge...");
const expectedChallenge = challenges[user.id];
let verification;
try {
const webAuthnCredentialIndex = user.webAuthnCredentials.findIndex(
(cred) => cred.id === attestationResponse.id
);
const webAuthnCredential = user.webAuthnCredentials[webAuthnCredentialIndex];
verification = await verifyAuthenticationResponse({
response: attestationResponse,
expectedChallenge,
expectedOrigin: process.env.APP_URL_CLIENT,
expectedRPID: url.parse(process.env.APP_URL_CLIENT).host,
authenticator: {
credentialID: webAuthnCredential.id,
credentialPublicKey: new Uint8Array(webAuthnCredential.publicKey),
counter: webAuthnCredential.counter,
transports: webAuthnCredential.transports,
},
});
user.webAuthnCredentials[webAuthnCredentialIndex].counter = verification.authenticationInfo.newCounter; // Update connection counter
await user.save();
// Logout handler
export const logoutRouteHandler = (req, res) => {
// Get the redirect URL from query or default to login page
const redirectUrl = req.query.redirect_uri || "/login";
// Generate JWT token
const token = jwt.sign(
{ id: user.id, email: user.email },
process.env.JWT_SECRET,
{
expiresIn: "24h",
}
);
return res.json({
user: {
id: user.id,
name: user.name,
email: user.email,
},
access_token: token,
});
} catch (error) {
console.log(error);
res.status(400).send({ error });
}
} else {
// Get options
logger.info("Sending authentication options...");
const options = await generateAuthenticationOptions({
rpID: url.parse(process.env.APP_URL_CLIENT).host,
allowCredentials: user.webAuthnCredentials.map((cred) => ({
id: cred.id,
type: "public-key",
transports: cred.transports,
})),
});
challenges[user.id] = options.challenge;
res.status(200).send(options);
}
};
export const registerRouteHandler = async (req, res, name, email, password) => {
// check if user already exists
let foundUser = await userModel.findOne({ email: email });
if (foundUser) {
// does not get the error
return res.status(400).json({ message: "Email is already in use" });
}
// check password to exist and be at least 8 characters long
if (!password || password.length < 8) {
return res
.status(400)
.json({ message: "Password must be at least 8 characters long." });
}
// hash password to save in db
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash(password, salt);
const newUser = new userModel({
name: name,
email: email,
password: hashPassword,
});
await newUser.save();
// Generate JWT token
const token = jwt.sign({ id: newUser.id, email: newUser.email }, "token", {
expiresIn: "24h",
});
return res.status(200).json({
token_type: "Bearer",
expires_in: "24h",
access_token: token,
refresh_token: token,
});
};
export const forgotPasswordRouteHandler = async (req, res, email) => {
let foundUser = await userModel.findOne({ email: email });
if (!foundUser) {
return res.status(400).json({
errors: { email: ["The email does not match any existing user."] },
});
} else {
let token = randomToken(20);
// send mail with defined transport object
let info = await transporter.sendMail({
from: "admin@jsonapi.com", // sender address
to: email, // list of receivers
subject: "Reset Password", // Subject line
html: `<p>You requested to change your password.If this request was not made by you please contact us. Access <a href='${process.env.APP_URL_CLIENT}/auth/reset-password?token=${token}&email=${email}'>this link</a> to reste your password </p>`, // html body
});
const dataSent = {
data: "password-forgot",
attributes: {
redirect_url: `${process.env.APP_URL_API}/password-reset`,
email: email,
},
};
// save token in db
await passwordResetModel.create({
email: foundUser.email,
token: token,
created_at: new Date(),
});
return res.status(204).json(dataSent);
}
};
export const resetPasswordRouteHandler = async (req, res) => {
const foundUser = await userModel.findOne({
email: req.body.data.attributes.email,
});
if (!foundUser || !foundToken) {
return res.status(400).json({
errors: {
email: ["The email or token does not match any existing user."],
},
});
} else {
const { password, password_confirmation } = req.body.data.attributes;
// validate password
if (password.length < 8) {
return res.status(400).json({
errors: {
password: ["The password should have at lest 8 characters."],
},
});
// Destroy the session
req.session.destroy((err) => {
if (err) {
logger.error("Error destroying session:", err);
return res.status(500).json({ error: "Failed to logout" });
}
if (password != password_confirmation) {
return res.status(400).json({
errors: {
password: ["The password and password confirmation must match."],
},
});
}
const salt = await bcrypt.genSalt(10);
const hashPassword = await bcrypt.hash(password, salt);
await passwordResetModel.deleteOne({ email: foundUser.email });
await userModel.updateOne(
{ email: foundUser.email },
{ $set: { password: hashPassword } }
// Construct the Keycloak logout URL with the redirect URI
const logoutUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/logout`;
const encodedRedirectUri = encodeURIComponent(
`${process.env.APP_URL_CLIENT}${redirectUrl}`,
);
return res.sendStatus(204);
}
// Redirect to Keycloak logout with the redirect URI
res.redirect(
`${logoutUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&post_logout_redirect_uri=${encodedRedirectUri}`,
);
});
};
// Token validation - protected route middleware
export const validateTokenMiddleware = keycloak.protect();
// Check if user has a specific role
export const hasRole = (role) => {
return keycloak.protect((token) => {
return token && token.hasRole(role);
});
};
// Get user info from the token
export const getUserInfoHandler = (req, res) => {
if (req.kauth && req.kauth.grant) {
const token = req.kauth.grant.access_token;
const userInfo = {
id: token.content.sub,
email: token.content.email,
name:
token.content.name ||
`${token.content.given_name || ""} ${token.content.family_name || ""}`.trim(),
roles: token.content.realm_access?.roles || [],
};
return res.json(userInfo);
}
return res.status(401).json({ error: "Not authenticated" });
};
// Register route - Since we're using Keycloak, registration should be handled there
// This endpoint will redirect to Keycloak's registration page
export const registerRouteHandler = (req, res) => {
const registrationUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/registrations`;
const redirectUri = encodeURIComponent(
process.env.APP_URL_CLIENT + "/auth/login",
);
res.redirect(
`${registrationUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
);
};
// Forgot password handler - redirect to Keycloak's reset password page
export const forgotPasswordRouteHandler = (req, res) => {
const resetUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/login-actions/reset-credentials`;
const redirectUri = encodeURIComponent(
process.env.APP_URL_CLIENT + "/auth/login",
);
res.redirect(
`${resetUrl}?client_id=${process.env.KEYCLOAK_CLIENT_ID}&redirect_uri=${redirectUri}`,
);
};
// Refresh token handler
export const refreshTokenRouteHandler = (req, res) => {
if (
!req.session ||
!req.session["keycloak-token"] ||
!req.session["keycloak-token"].refresh_token
) {
return res.status(401).json({ error: "No refresh token available" });
}
const refreshToken = req.session["keycloak-token"].refresh_token;
const tokenUrl = `${process.env.KEYCLOAK_URL}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`;
axios
.post(
tokenUrl,
new URLSearchParams({
grant_type: "refresh_token",
client_id: process.env.KEYCLOAK_CLIENT_ID,
client_secret: process.env.KEYCLOAK_CLIENT_SECRET,
refresh_token: refreshToken,
}).toString(),
{
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
},
)
.then((response) => {
// Update session with new tokens
req.session["keycloak-token"] = {
...req.session["keycloak-token"],
access_token: response.data.access_token,
refresh_token: response.data.refresh_token,
expires_at: new Date().getTime() + response.data.expires_in * 1000,
};
// Save session and return new token info
req.session.save(() => {
res.json({
access_token: response.data.access_token,
expires_at: req.session["keycloak-token"].expires_at,
});
});
})
.catch((error) => {
logger.error(
"Token refresh error:",
error.response?.data || error.message,
);
// If refresh token is invalid, clear the session
if (error.response?.status === 400) {
req.session.destroy();
}
res.status(500).json({ error: "Failed to refresh token" });
});
};
// Example of how to set up your routes in Express
/*
import express from "express";
const app = express();
// Apply session middleware
app.use(sessionMiddleware);
// Initialize Keycloak middleware
app.use(keycloak.middleware());
// Set up routes
app.get('/auth/login', loginRouteHandler);
app.get('/auth/logout', logoutRouteHandler);
app.get('/auth/register', registerRouteHandler);
app.get('/auth/forgot-password', forgotPasswordRouteHandler);
// Protected route example
app.get('/api/profile', validateTokenMiddleware, getUserInfoHandler);
// Admin-only route example
app.get('/api/admin', hasRole('admin'), (req, res) => {
res.json({ message: 'Admin access granted' });
});
*/

View File

@ -1,121 +0,0 @@
import dotenv from "dotenv";
import { fillamentModel } from "../../schemas/fillament.schema.js"
import jwt from "jsonwebtoken";
import log4js from "log4js";
import mongoose from "mongoose";
dotenv.config();
const logger = log4js.getLogger("Fillaments");
logger.level = process.env.LOG_LEVEL;
export const listFillamentsRouteHandler = async (req, res, page = 1, limit = 25, property = "", filter = {}) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
let fillament;
let aggregateCommand = [];
if (filter != {}) { // use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } }) // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" }}); // rename _id to the property name
} else {
aggregateCommand.push({ $project: { image: 0, url: 0 }});
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand)
fillament = await fillamentModel.aggregate(aggregateCommand)
logger.trace(`List of filaments (Page ${page}, Limit ${limit}, Property ${property}):`, fillament);
res.send(fillament);
} catch (error) {
logger.error("Error listing filaments:", error);
res.status(500).send({ error: error });
}
};
export const getFillamentRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the fillament with the given remote address
const fillament = await fillamentModel.findOne({
_id: id
});
if (!fillament) {
logger.warn(`Fillament not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`Fillament with ID: ${id}:`, fillament);
res.send(fillament);
} catch (error) {
logger.error("Error fetching Fillament:", error);
res.status(500).send({ error: error.message });
}
};
export const editFillamentRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the fillament with the given remote address
const fillament = await fillamentModel.findOne({ _id: id });
if (!fillament) { // Error handling
logger.warn(`Fillament not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`Fillament with ID: ${id}:`, fillament);
try {
const { created_at, updated_at, started_at, status, ...updateData } = req.body;
const result = await fillamentModel.updateOne(
{ _id: id },
{ $set: updateData }
);
if (result.nModified === 0) {
logger.error("No Fillament updated.");
res.status(500).send({ error: "No fillaments updated." });
}
} catch (updateError) {
logger.error("Error updating fillament:", updateError);
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching fillament:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};
export const newFillamentRouteHandler = async (req, res) => {
try {
let { ...newFillament } = req.body;
newFillament = { ...newFillament, created_at: new Date(), updated_at: new Date() }
const result = await fillamentModel.create(newFillament);
if (result.nCreated === 0) {
logger.error("No fillament created.");
res.status(500).send({ error: "No fillament created." });
}
res.status(200).send({ status: "ok" });
} catch (updateError) {
logger.error("Error updating fillament:", updateError);
res.status(500).send({ error: updateError.message });
}
};

View File

@ -1,55 +1,182 @@
import dotenv from "dotenv";
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js"
import { gcodeFileModel } from "../../schemas/gcodefile.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
import multer from "multer";
import crypto from "crypto";
import path from "path";
import fs from "fs";
import mongoose from "mongoose";
import { extractConfigBlock } from "../../util/index.js";
dotenv.config();
const logger = log4js.getLogger("GCodeFiles");
logger.level = process.env.LOG_LEVEL;
// Set storage engine
const gcodeStorage = multer.diskStorage({
destination: process.env.GCODE_STORAGE,
filename: async function (req, file, cb) {
// Retrieve custom file name from request body
const customFileName = req.params.id || "default"; // Default to 'default' if not provided
// Create the final filename ensuring it ends with .gcode
const finalFilename = `${customFileName}.gcode`;
// Call callback with the final filename
cb(null, finalFilename);
},
});
// Initialise upload
const gcodeUpload = multer({
storage: gcodeStorage,
limits: { fileSize: 500000000 }, // 50MB limit
fileFilter: function (req, file, cb) {
checkFileType(file, cb);
},
}).single("gcodeFile"); // The name attribute of the file input in the HTML form
// Check file type
function checkFileType(file, cb) {
// Allowed ext
const filetypes = /g|gco|gcode/;
// Check ext
const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
if (extname) {
console.log(file);
return cb(null, true);
} else {
cb("Error: .g, .gco, and .gcode files only!");
}
}
export const listGCodeFilesRouteHandler = async (
req,
res,) => {
res,
page = 1,
limit = 25,
property = "",
filter = {},
search = "",
) => {
try {
// Fetch gcode files and group
const gcodeFiles = await gcodeFileModel.aggregate([
{
$group: {
_id: "$status",
totalQuantity: { $sum: "$quantity" },
totalPrice: { $sum: "$price" },
orders: { $push: "$$ROOT" }
}
}
]);
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
let gcodeFile;
let aggregateCommand = [];
if (search) {
// Add a text search match stage for name and brand fields
aggregateCommand.push({
$match: {
$text: {
$search: search,
},
},
});
}
aggregateCommand.push({
$lookup: {
from: "filaments", // The name of the Filament collection
localField: "filament",
foreignField: "_id",
as: "filament",
},
});
aggregateCommand.push({
$unwind: {
path: "$filament",
preserveNullAndEmptyArrays: true, // Keep documents without a matching filament
},
});
aggregateCommand.push({
$addFields: {
filament: "$filament",
},
});
if (filter != {}) {
// use filtering if present
aggregateCommand.push({ $match: filter });
}
if (property != "") {
aggregateCommand.push({ $group: { _id: `$${property}` } }); // group all same properties
aggregateCommand.push({ $project: { _id: 0, [property]: "$_id" } }); // rename _id to the property name
} else {
aggregateCommand.push({
$project: {
"filament.gcodeFileInfo.estimatedPrintingTimeNormalMode": 0,
url: 0,
"filament.image": 0,
"filament.createdAt": 0,
"filament.updatedAt": 0,
},
});
}
aggregateCommand.push({ $skip: skip });
aggregateCommand.push({ $limit: Number(limit) });
console.log(aggregateCommand);
gcodeFile = await gcodeFileModel.aggregate(aggregateCommand);
logger.trace(
`List of gcode files (Page ${page}, Limit ${limit}, Property ${property}):`,
gcodeFile,
);
res.send(gcodeFile);
} catch (error) {
logger.error("Error listing print jobs:", error);
logger.error("Error listing gcode files:", error);
res.status(500).send({ error: error });
}
};
export const getGCodeFileRouteHandler = async (req, res) => {
export const getGCodeFileContentRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the gcodeFile with the given remote address
const gcodeFile = await gcodeFileModel.findOne({
_id: id
const gcodeFile = await gcodeFileModel.findOne({
_id: id,
});
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
res.send(gcodeFile);
logger.trace(`Returning GCode File contents with ID: ${id}:`);
const filePath = path.join(
process.env.GCODE_STORAGE,
gcodeFile.gcodeFileName,
);
// Read the file
fs.readFile(filePath, "utf8", (err, data) => {
if (err) {
if (err.code === "ENOENT") {
// File not found
return res.status(404).send({ error: "File not found!" });
} else {
// Other errors
return res.status(500).send({ error: "Error reading file." });
}
}
// Send the file contents in the response
res.send(data);
});
} catch (error) {
logger.error("Error fetching GCodeFile:", error);
res.status(500).send({ error: error.message });
@ -63,19 +190,22 @@ export const editGCodeFileRouteHandler = async (req, res) => {
// Fetch the gcodeFile with the given remote address
const gcodeFile = await gcodeFileModel.findOne({ _id: id });
if (!gcodeFile) { // Error handling
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`GCodeFile with ID: ${id}:`, gcodeFile);
try {
const { created_at, updated_at, started_at, status, ...updateData } = req.body;
const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body;
console.log("Update data", updateData);
const result = await gcodeFileModel.updateOne(
{ _id: id },
{ $set: updateData }
{ $set: updateData },
);
if (result.nModified === 0) {
logger.error("No gcodeFile updated.");
@ -86,8 +216,157 @@ export const editGCodeFileRouteHandler = async (req, res) => {
res.status(500).send({ error: updateError.message });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching gcodeFile:", fetchError);
//res.status(500).send({ error: fetchError.message });
}
};
export const newGCodeFileRouteHandler = async (req, res) => {
try {
let { ...newGCodeFile } = req.body;
newGCodeFile = {
...newGCodeFile,
createdAt: new Date(),
updatedAt: new Date(),
};
const result = await gcodeFileModel.create(newGCodeFile);
if (result.nCreated === 0) {
logger.error("No gcode file created.");
res.status(500).send({ error: "No filament created." });
}
res.status(200).send(result);
} catch (updateError) {
logger.error("Error updating filament:", updateError);
res.status(500).send({ error: updateError.message });
}
};
export const parseGCodeFileHandler = async (req, res) => {
try {
// Use the same upload middleware as the uploadGCodeFileContentRouteHandler
gcodeUpload(req, res, async (err) => {
if (err) {
return res.status(500).send({
error: err,
});
}
if (req.file == undefined) {
return res.send({
message: "No file selected!",
});
}
try {
// Get the path to the uploaded file
const filePath = path.join(req.file.destination, req.file.filename);
// Read the file content
const fileContent = fs.readFileSync(filePath, "utf8");
// Extract the config block
const configInfo = extractConfigBlock(fileContent);
// Return the config as JSON
res.json(configInfo);
// Optionally clean up the file after processing if it's not needed
fs.unlinkSync(filePath);
} catch (parseError) {
logger.error("Error parsing GCode file:", parseError);
res.status(500).send({ error: parseError.message });
}
});
} catch (error) {
logger.error("Error in parseGCodeFileHandler:", error);
res.status(500).send({ error: error.message });
}
};
export const uploadGCodeFileContentRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the gcodeFile with the given remote address
const gcodeFile = await gcodeFileModel.findOne({ _id: id });
if (!gcodeFile) {
// Error handling
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`GCodeFile with ID: ${id}`);
try {
gcodeUpload(req, res, async (err) => {
if (err) {
res.status(500).send({
error: err,
});
} else {
if (req.file == undefined) {
res.send({
message: "No file selected!",
});
} else {
// Get the path to the uploaded file
const filePath = path.join(req.file.destination, req.file.filename);
// Read the file content
const fileContent = fs.readFileSync(filePath, "utf8");
// Update the gcodeFile document with the filename and the extracted config
const result = await gcodeFileModel.updateOne(
{ _id: id },
{
$set: {
gcodeFileName: req.file.filename,
},
},
);
if (result.nModified === 0) {
logger.error("No gcodeFile updated.");
res.status(500).send({ error: "No gcodeFiles updated." });
}
res.send({
status: "OK",
file: `${req.file.filename}`,
});
}
}
});
} catch (updateError) {
logger.error("Error updating gcodeFile:", updateError);
res.status(500).send({ error: updateError.message });
}
} catch (fetchError) {
logger.error("Error fetching gcodeFile:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};
};
export const getGCodeFileRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the gcodeFile with the given remote address
const gcodeFile = await gcodeFileModel
.findOne({
_id: id,
})
.populate("filament");
if (!gcodeFile) {
logger.warn(`GCodeFile not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`GCodeFile with ID: ${id}:`);
res.send(gcodeFile);
} catch (error) {
logger.error("Error fetching GCodeFile:", error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,8 +1,5 @@
import bcrypt from "bcrypt";
import dotenv from "dotenv";
import { userModel } from "../../schemas/user.schema.js";
import { printerModel } from "../../schemas/printer.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
dotenv.config();
@ -14,7 +11,7 @@ export const listPrintersRouteHandler = async (
req,
res,
page = 1,
limit = 25
limit = 25,
) => {
try {
// Calculate the skip value based on the page number and limit
@ -32,18 +29,33 @@ export const listPrintersRouteHandler = async (
};
export const getPrinterRouteHandler = async (req, res) => {
const remoteAddress = req.params.remoteAddress;
const id = req.params.id;
try {
// Fetch the printer with the given remote address
const printer = await printerModel.findOne({ remoteAddress });
const printer = await printerModel.findOne({ _id: id })
.populate('subJobs')
.populate('currentJob')
.populate({
path: 'currentJob',
populate: {
path: 'gcodeFile'
}
})
.populate('currentSubJob')
.populate({
path: 'subJobs',
populate: {
path: 'printJob'
}
});
if (!printer) {
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
logger.warn(`Printer with id ${id} not found.`);
return res.status(404).send({ error: "Printer not found" });
}
logger.trace(`Printer with remote address ${remoteAddress}:`, printer);
logger.trace(`Printer with id ${id}:`, printer);
res.send(printer);
} catch (error) {
logger.error("Error fetching printer:", error);
@ -52,23 +64,13 @@ export const getPrinterRouteHandler = async (req, res) => {
};
export const editPrinterRouteHandler = async (req, res) => {
const remoteAddress = req.params.remoteAddress;
const { friendlyName } = req.body;
const id = req.params.id;
try {
// Fetch the printer with the given remote address
const printer = await printerModel.findOne({ remoteAddress });
if (!printer) {
logger.warn(`Printer with remote address ${remoteAddress} not found.`);
return res.status(404).send({ error: "Printer not found" });
}
logger.trace(`Editing printer with remote address ${remoteAddress}:`, printer);
try {
const result = await printerModel.updateOne(
{ remoteAddress: remoteAddress },
{ $set: req.body }
{ _id: id },
{ $set: req.body },
);
if (result.nModified === 0) {
logger.error("No printers updated.");
@ -83,4 +85,52 @@ export const editPrinterRouteHandler = async (req, res) => {
logger.error("Error fetching printer:", fetchError);
res.status(500).send({ error: fetchError.message });
}
};
};
export const createPrinterRouteHandler = async (req, res) => {
try {
const {
printerName,
moonraker,
tags = [],
firmware = "n/a",
} = req.body;
// Validate required fields
if (!printerName || !moonraker) {
logger.warn("Missing required fields in printer creation request");
return res.status(400).send({
error: "Missing required fields. printerName and moonraker configuration are required."
});
}
// Validate moonraker configuration
if (!moonraker.host || !moonraker.port || !moonraker.protocol) {
logger.warn("Invalid moonraker configuration in printer creation request");
return res.status(400).send({
error: "Invalid moonraker configuration. host, port, protocol are required."
});
}
// Create new printer instance
const newPrinter = new printerModel({
printerName,
moonraker,
tags,
firmware,
online: false,
state: {
type: "offline"
}
});
// Save the printer
const savedPrinter = await newPrinter.save();
logger.info(`Created new printer: ${printerName}`);
res.status(201).send(savedPrinter);
} catch (error) {
logger.error("Error creating printer:", error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,5 +1,7 @@
import dotenv from "dotenv";
import { printJobModel } from "../../schemas/printjob.schema.js"
import mongoose from "mongoose";
import { printJobModel } from "../../schemas/printjob.schema.js";
import { printSubJobModel } from "../../schemas/printsubjob.schema.js";
import jwt from "jsonwebtoken";
import log4js from "log4js";
@ -12,14 +14,20 @@ export const listPrintJobsRouteHandler = async (
req,
res,
page = 1,
limit = 25
limit = 25,
) => {
try {
// Calculate the skip value based on the page number and limit
const skip = (page - 1) * limit;
// Fetch users with pagination
const printJobs = await printJobModel.find().skip(skip).limit(limit);
const printJobs = await printJobModel
.find()
.sort({ createdAt: -1 })
.skip(skip)
.limit(limit)
.populate("subJobs", "state")
.populate("gcodeFile", "name");
logger.trace(`List of print jobs (Page ${page}, Limit ${limit}):`);
res.send(printJobs);
@ -34,10 +42,14 @@ export const getPrintJobRouteHandler = async (req, res) => {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the printJob with the given remote address
const printJob = await printJobModel.findOne({
_id: id
});
const printJob = await printJobModel
.findOne({
_id: id,
})
.populate("printers", "printerName state")
.populate("gcodeFile")
.populate("subJobs");
if (!printJob) {
logger.warn(`PrintJob not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
@ -55,34 +67,92 @@ export const editPrintJobRouteHandler = async (req, res) => {
try {
// Get ID from params
const id = new mongoose.Types.ObjectId(req.params.id);
// Fetch the printJob with the given remote address
const printJob = await printJobModel.findOne({ _id: id });
if (!printJob) { // Error handling
if (!printJob) {
logger.warn(`PrintJob not found with supplied id.`);
return res.status(404).send({ error: "Print job not found." });
}
logger.trace(`PrintJob with ID: ${id}:`, printJob);
try {
const { created_at, updated_at, started_at, status, ...updateData } = req.body;
const result = await printJobModel.updateOne(
{ _id: id },
{ $set: updateData }
);
if (result.nModified === 0) {
logger.error("No printJobs updated.");
res.status(500).send({ error: "No printJobs updated." });
}
} catch (updateError) {
logger.error("Error updating printJob:", updateError);
res.status(500).send({ error: updateError.message });
const { createdAt, updatedAt, started_at, status, ...updateData } =
req.body;
const result = await printJobModel.updateOne(
{ _id: id },
{ $set: updateData },
);
if (result.nModified === 0) {
logger.warn("No printJobs updated.");
return res.status(400).send({ error: "No printJobs updated." });
}
res.send("OK");
} catch (fetchError) {
logger.error("Error fetching printJob:", fetchError);
res.status(500).send({ error: fetchError.message });
res.send({ message: "Print job updated successfully" });
} catch (error) {
logger.error("Error updating printJob:", error);
res.status(500).send({ error: error.message });
}
};
};
export const createPrintJobRouteHandler = async (req, res) => {
try {
const { gcodeFile, printers, quantity = 1 } = req.body;
if (!printers || printers.length === 0) {
return res
.status(400)
.send({ error: "At least one printer must be specified" });
}
// Convert printer IDs to ObjectIds
const printerIds = printers.map((id) => new mongoose.Types.ObjectId(id));
// Create new print job
const newPrintJob = new printJobModel({
state: { type: "draft" },
printers: printerIds,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
quantity,
subJobs: [], // Initialize empty array for subjob references
createdAt: new Date(),
updatedAt: new Date(),
startedAt: new Date(),
});
// Save the print job first to get its ID
const savedPrintJob = await newPrintJob.save();
// Create subjobs array with sequential numbers based on quantity
const subJobs = await Promise.all(
Array.from({ length: quantity }, (_, index) => {
const subJob = new printSubJobModel({
printer: printerIds[index % printerIds.length], // Distribute across available printers
printJob: savedPrintJob._id,
gcodeFile: gcodeFile ? new mongoose.Types.ObjectId(gcodeFile) : null,
subJobId: `subjob-${index + 1}`,
state: { type: "draft" },
number: index + 1,
createdAt: new Date(),
updatedAt: new Date(),
});
return subJob.save();
}),
);
// Update the print job with the subjob references
savedPrintJob.subJobs = subJobs.map((subJob) => subJob._id);
await savedPrintJob.save();
logger.trace(
`Created new print job with ID: ${savedPrintJob._id} and ${subJobs.length} subjobs`,
);
res.status(201).send({ printJob: savedPrintJob, subJobs });
} catch (error) {
logger.error("Error creating print job:", error);
res.status(500).send({ error: error.message });
}
};

View File

@ -1,8 +1,251 @@
function parseStringIfNumber(input) {
if (typeof input === 'string' && !isNaN(input) && !isNaN(parseFloat(input))) {
return parseFloat(input);
if (typeof input === "string" && !isNaN(input) && !isNaN(parseFloat(input))) {
return parseFloat(input);
}
return input;
}
export {parseStringIfNumber};
function convertToCamelCase(obj) {
const result = {};
for (const key in obj) {
if (Object.prototype.hasOwnProperty.call(obj, key)) {
const value = obj[key];
// Convert the key to camelCase
let camelKey = key
// First handle special cases with spaces, brackets and other characters
.replace(/\s*\[.*?\]\s*/g, "") // Remove brackets and their contents
.replace(/\s+/g, " ") // Normalize spaces
.trim()
// Split by common separators (space, underscore, hyphen)
.split(/[\s_-]/)
// Convert to camelCase
.map((word, index) => {
// Remove any non-alphanumeric characters
word = word.replace(/[^a-zA-Z0-9]/g, "");
// Lowercase first word, uppercase others
return index === 0
? word.toLowerCase()
: word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
})
.join("");
// Handle values that are objects recursively
if (
value !== null &&
typeof value === "object" &&
!Array.isArray(value)
) {
result[camelKey] = convertToCamelCase(value);
} else {
result[camelKey] = value;
}
}
}
return result;
}
function extractConfigBlock(fileContent, useCamelCase = true) {
const configObject = {};
// Extract header information
const headerBlockRegex =
/; HEADER_BLOCK_START([\s\S]*?)(?:; HEADER_BLOCK_END|$)/;
const headerBlockMatch = fileContent.match(headerBlockRegex);
if (headerBlockMatch && headerBlockMatch[1]) {
const headerLines = headerBlockMatch[1].split("\n");
headerLines.forEach((line) => {
// Match lines with info after semicolon
const headerLineRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const keyValueRegex = /^\s*;\s*([^:]+?):\s*(.*?)\s*$/;
const simpleValueRegex = /^\s*;\s*(.*?)\s*$/;
// Try key-value format first
let match = line.match(keyValueRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (!isNaN(value) && value !== "") {
value = Number(value);
}
configObject[key] = value;
} else {
// Try the simple format like "; generated by OrcaSlicer 2.1.1 on 2025-04-28 at 13:30:11"
match = line.match(simpleValueRegex);
if (match && match[1] && !match[1].includes("HEADER_BLOCK")) {
const text = match[1].trim();
// Extract slicer info
const slicerMatch = text.match(
/generated by (.*?) on (.*?) at (.*?)$/,
);
if (slicerMatch) {
configObject["slicer"] = slicerMatch[1].trim();
configObject["date"] = slicerMatch[2].trim();
configObject["time"] = slicerMatch[3].trim();
} else {
// Just add as a general header entry if it doesn't match any specific pattern
const key = `header_${Object.keys(configObject).length}`;
configObject[key] = text;
}
}
}
});
}
// Extract thumbnail data
const thumbnailBlockRegex =
/; THUMBNAIL_BLOCK_START([\s\S]*?)(?:; THUMBNAIL_BLOCK_END|$)/;
const thumbnailBlockMatch = fileContent.match(thumbnailBlockRegex);
if (thumbnailBlockMatch && thumbnailBlockMatch[1]) {
const thumbnailLines = thumbnailBlockMatch[1].split("\n");
let base64Data = "";
let thumbnailInfo = {};
thumbnailLines.forEach((line) => {
// Extract thumbnail dimensions and size from the line "thumbnail begin 640x640 27540"
const thumbnailHeaderRegex = /^\s*;\s*thumbnail begin (\d+)x(\d+) (\d+)/;
const match = line.match(thumbnailHeaderRegex);
if (match) {
thumbnailInfo.width = parseInt(match[1], 10);
thumbnailInfo.height = parseInt(match[2], 10);
thumbnailInfo.size = parseInt(match[3], 10);
} else if (
line.trim().startsWith("; ") &&
!line.includes("THUMBNAIL_BLOCK")
) {
// Collect base64 data (remove the leading semicolon and space and thumbnail end)
const dataLine = line.trim().substring(2);
if (dataLine && dataLine != "thumbnail end") {
base64Data += dataLine;
}
}
});
// Add thumbnail data to config object
if (base64Data) {
configObject.thumbnail = {
data: base64Data,
...thumbnailInfo,
};
}
}
// Extract CONFIG_BLOCK
const configBlockRegex =
/; CONFIG_BLOCK_START([\s\S]*?)(?:; CONFIG_BLOCK_END|$)/;
const configBlockMatch = fileContent.match(configBlockRegex);
if (configBlockMatch && configBlockMatch[1]) {
// Extract each config line
const configLines = configBlockMatch[1].split("\n");
// Process each line
configLines.forEach((line) => {
// Check if the line starts with a semicolon and has an equals sign
const configLineRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
const match = line.match(configLineRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === "true" || value === "false") {
value = value === "true";
} else if (!isNaN(value) && value !== "") {
// Check if it's a number (but not a percentage)
if (!value.includes("%")) {
value = Number(value);
}
}
configObject[key] = value;
}
});
}
// Extract additional variables that appear after EXECUTABLE_BLOCK_END
const additionalVarsRegex =
/; EXECUTABLE_BLOCK_(?:START|END)([\s\S]*?)(?:; CONFIG_BLOCK_START|$)/i;
const additionalVarsMatch = fileContent.match(additionalVarsRegex);
if (additionalVarsMatch && additionalVarsMatch[1]) {
const additionalLines = additionalVarsMatch[1].split("\n");
additionalLines.forEach((line) => {
// Match both standard format and the special case for "total filament cost"
const varRegex =
/^\s*;\s*((?:filament used|filament cost|total filament used|total filament cost|total layers count|estimated printing time)[^=]*?)\s*=\s*(.*?)\s*$/;
const match = line.match(varRegex);
if (match) {
const key = match[1].replace(/\[([^\]]+)\]/g, "$1").trim();
let value = match[2].trim();
// Clean up values - remove units in brackets and handle special cases
if (key.includes("filament used")) {
// Extract just the numeric value, ignoring units in brackets
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes("filament cost")) {
// Extract just the numeric value
const numMatch = value.match(/(\d+\.\d+)/);
if (numMatch) {
value = parseFloat(numMatch[1]);
}
} else if (key.includes("total layers count")) {
value = parseInt(value, 10);
} else if (key.includes("estimated printing time")) {
// Keep as string but trim any additional whitespace
value = value.trim();
}
configObject[key] = value;
}
});
}
// Also extract extrusion width settings
const extrusionWidthRegex = /;\s*(.*?)\s*extrusion width\s*=\s*(.*?)mm/g;
let extrusionMatch;
while ((extrusionMatch = extrusionWidthRegex.exec(fileContent)) !== null) {
const settingName = extrusionMatch[1].trim();
const settingValue = parseFloat(extrusionMatch[2].trim());
configObject[`${settingName} extrusion width`] = settingValue;
}
// Extract additional parameters after CONFIG_BLOCK_END if they exist
const postConfigParams = /; CONFIG_BLOCK_END\s*\n([\s\S]*?)$/;
const postConfigMatch = fileContent.match(postConfigParams);
if (postConfigMatch && postConfigMatch[1]) {
const postConfigLines = postConfigMatch[1].split("\n");
postConfigLines.forEach((line) => {
// Match lines with format "; parameter_name = value"
const paramRegex = /^\s*;\s*([^=]+?)\s*=\s*(.*?)\s*$/;
const match = line.match(paramRegex);
if (match) {
const key = match[1].trim();
let value = match[2].trim();
// Try to convert value to appropriate type
if (value === "true" || value === "false") {
value = value === "true";
} else if (!isNaN(value) && value !== "") {
// Check if it's a number (but not a percentage)
if (!value.includes("%")) {
value = Number(value);
}
}
// Add to config object if not already present
if (!configObject[key]) {
configObject[key] = value;
}
}
});
}
// Apply camelCase conversion if requested
return useCamelCase ? convertToCamelCase(configObject) : configObject;
}
export { parseStringIfNumber, convertToCamelCase, extractConfigBlock };