Inital Work, web server broke again, trying to get scale and rotation setup
This commit is contained in:
86
backend/Dockerfile
Normal file
86
backend/Dockerfile
Normal file
@@ -0,0 +1,86 @@
|
||||
# ---------- Stage 1: build frontend ----------
|
||||
FROM node:20-bookworm-slim AS fe
|
||||
|
||||
WORKDIR /fe
|
||||
RUN corepack enable && corepack prepare pnpm@9.12.0 --activate
|
||||
|
||||
# deps
|
||||
COPY frontend/package.json ./
|
||||
RUN pnpm install --no-frozen-lockfile
|
||||
|
||||
# sources
|
||||
COPY frontend/index.html ./
|
||||
COPY frontend/vite.config.ts ./
|
||||
COPY frontend/src ./src
|
||||
|
||||
# build to /fe/dist
|
||||
RUN pnpm run build
|
||||
|
||||
|
||||
# ---------- Stage 2: backend + slicer ----------
|
||||
FROM node:20-bookworm-slim
|
||||
|
||||
# OS deps: Qt libs for Orca + native build deps for better-sqlite3 + tini
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
libglib2.0-0 \
|
||||
libx11-6 libxext6 libxrender1 libsm6 \
|
||||
libxkbcommon0 libfontconfig1 libfreetype6 libnss3 libxi6 libxrandr2 \
|
||||
libxfixes3 libdrm2 libxdamage1 libxcomposite1 libwayland-client0 libxcb1 \
|
||||
python3 make g++ libsqlite3-dev \
|
||||
curl tini \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
RUN corepack enable && corepack prepare pnpm@9.12.0 --activate
|
||||
|
||||
# backend deps
|
||||
COPY backend/package.json ./
|
||||
RUN pnpm install --no-frozen-lockfile
|
||||
|
||||
# backend sources
|
||||
COPY backend/tsconfig.json ./
|
||||
COPY backend/src ./src
|
||||
RUN pnpm exec tsc
|
||||
|
||||
# copy built frontend from stage 1
|
||||
COPY --from=fe /fe/dist /app/www
|
||||
|
||||
# slicer AppImage (from repo root; compose build.context must be repo root)
|
||||
COPY slicer/ /app/slicer/
|
||||
RUN set -eux; \
|
||||
F="$(ls -1 /app/slicer | head -n1)"; \
|
||||
test -n "$F"; \
|
||||
chmod +x "/app/slicer/$F"; \
|
||||
cd /app/slicer; \
|
||||
"/app/slicer/$F" --appimage-extract; \
|
||||
ln -sf /app/slicer/squashfs-root/AppRun /app/slicer/orca
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
libglib2.0-0 \
|
||||
libx11-6 libxext6 libxrender1 libsm6 \
|
||||
libxkbcommon0 libfontconfig1 libfreetype6 libnss3 libxi6 libxrandr2 \
|
||||
libxfixes3 libdrm2 libxdamage1 libxcomposite1 libwayland-client0 libxcb1 \
|
||||
python3 make g++ libsqlite3-dev \
|
||||
curl tini \
|
||||
# 👇 add these for libGL.so.1 (Mesa)
|
||||
libgl1 libgl1-mesa-dri libglu1-mesa \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# runtime dirs
|
||||
RUN mkdir -p /app/data/uploads /app/data/outputs /app/profiles
|
||||
|
||||
# env
|
||||
ENV QT_QPA_PLATFORM=offscreen
|
||||
ENV NODE_ENV=production
|
||||
ENV SLICER_CMD=/app/slicer/orca
|
||||
ENV LIBGL_ALWAYS_SOFTWARE=1
|
||||
# Orca 2.3.1 CLI template (plate-sliced 3MF)
|
||||
ENV SLICER_ARGS="--debug 2 --arrange 1 --load-settings {MACHINE};{PROCESS} --load-filaments {FILAMENT} --slice 0 --export-3mf {OUTPUT} {INPUT}"
|
||||
ENV SUPPORTS_ON=--support-material
|
||||
ENV SUPPORTS_OFF=--no-support-material
|
||||
|
||||
EXPOSE 8080
|
||||
ENTRYPOINT ["/usr/bin/tini","--"]
|
||||
CMD ["node","dist/server.js"]
|
||||
28
backend/package.json
Normal file
28
backend/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "print-webui-backend",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "tsx watch src/server.ts",
|
||||
"build": "tsc",
|
||||
"start": "node dist/server.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"cors": "2.8.5",
|
||||
"dotenv": "16.4.5",
|
||||
"express": "4.19.2",
|
||||
"mime-types": "2.1.35",
|
||||
"multer": "1.4.5-lts.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/better-sqlite3": "7.6.9",
|
||||
"@types/cors": "2.8.17",
|
||||
"@types/express": "4.17.21",
|
||||
"@types/mime-types": "2.1.4",
|
||||
"@types/multer": "1.4.12",
|
||||
"tsx": "4.19.1",
|
||||
"typescript": "5.6.3"
|
||||
}
|
||||
}
|
||||
46
backend/src/db.ts
Normal file
46
backend/src/db.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import Database from "better-sqlite3";
|
||||
import { JobRecord } from "./types.js";
|
||||
|
||||
const dbPath = process.env.DB_PATH || "/app/data/printjobs.sqlite";
|
||||
const db = new Database(dbPath);
|
||||
|
||||
db.pragma("journal_mode = WAL");
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS jobs (
|
||||
id TEXT PRIMARY KEY,
|
||||
filename TEXT NOT NULL,
|
||||
ext TEXT NOT NULL,
|
||||
machine TEXT,
|
||||
filament TEXT,
|
||||
process TEXT,
|
||||
profile TEXT, -- keep if you used single-profile earlier
|
||||
supports TEXT NOT NULL CHECK (supports IN ('on','off')),
|
||||
input_path TEXT NOT NULL,
|
||||
output_path TEXT,
|
||||
status TEXT NOT NULL CHECK (status IN ('queued','processing','done','error')),
|
||||
error_msg TEXT,
|
||||
created_at INTEGER NOT NULL,
|
||||
finished_at INTEGER
|
||||
);
|
||||
`);
|
||||
|
||||
// lightweight migrations (ignore errors if columns already exist)
|
||||
for (const sql of [
|
||||
"ALTER TABLE jobs ADD COLUMN rot_x REAL DEFAULT 0",
|
||||
"ALTER TABLE jobs ADD COLUMN rot_y REAL DEFAULT 0",
|
||||
"ALTER TABLE jobs ADD COLUMN rot_z REAL DEFAULT 0",
|
||||
"ALTER TABLE jobs ADD COLUMN scale REAL DEFAULT 1"
|
||||
]) { try { db.exec(sql); } catch {} }
|
||||
|
||||
export const insertJob = db.prepare(`
|
||||
INSERT INTO jobs (id, filename, ext, profile, supports, input_path, output_path, status, error_msg, created_at, finished_at)
|
||||
VALUES (@id, @filename, @ext, @profile, @supports, @inputPath, @outputPath, @status, @errorMsg, @createdAt, @finishedAt)
|
||||
`);
|
||||
|
||||
export const updateStatus = db.prepare(`
|
||||
UPDATE jobs SET status=@status, output_path=@outputPath, error_msg=@errorMsg, finished_at=@finishedAt WHERE id=@id
|
||||
`);
|
||||
|
||||
export const getJob = db.prepare(`SELECT * FROM jobs WHERE id = ?`);
|
||||
export const listJobs = db.prepare(`SELECT * FROM jobs ORDER BY created_at DESC LIMIT 200`);
|
||||
export default db;
|
||||
267
backend/src/server.ts
Normal file
267
backend/src/server.ts
Normal file
@@ -0,0 +1,267 @@
|
||||
import "dotenv/config";
|
||||
import express from "express";
|
||||
import cors from "cors";
|
||||
import multer from "multer";
|
||||
import { randomUUID } from "crypto";
|
||||
import { spawn } from "child_process";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import mime from "mime-types";
|
||||
import { JobRecord } from "./types.js";
|
||||
import db, { insertJob, updateStatus, getJob, listJobs } from "./db.js";
|
||||
import os from "os";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const app = express();
|
||||
app.use(cors());
|
||||
app.use(express.json());
|
||||
|
||||
const UPLOAD_DIR = process.env.UPLOAD_DIR || "/app/data/uploads";
|
||||
const OUTPUT_DIR = process.env.OUTPUT_DIR || "/app/data/outputs";
|
||||
const PROFILES_DIR = "/app/profiles";
|
||||
|
||||
const SLICER_CMD = process.env.SLICER_CMD || "/app/slicer/OrcaSlicer.AppImage";
|
||||
const SLICER_ARGS = process.env.SLICER_ARGS || `--headless --load-config "{PROFILE}" {SUPPORTS} -o "{OUTPUT}" "{INPUT}"`;
|
||||
const SUPPORTS_ON = process.env.SUPPORTS_ON || "--support-material";
|
||||
const SUPPORTS_OFF = process.env.SUPPORTS_OFF || "--no-support-material";
|
||||
|
||||
await fs.mkdir(UPLOAD_DIR, { recursive: true });
|
||||
await fs.mkdir(OUTPUT_DIR, { recursive: true });
|
||||
|
||||
const storage = multer.diskStorage({
|
||||
destination: (_req, _file, cb) => cb(null, UPLOAD_DIR),
|
||||
filename: (_req, file, cb) => {
|
||||
const id = randomUUID();
|
||||
const ext = (mime.extension(file.mimetype) || path.extname(file.originalname).slice(1) || "stl").toLowerCase();
|
||||
cb(null, `${id}.${ext}`);
|
||||
}
|
||||
});
|
||||
const upload = multer({
|
||||
storage,
|
||||
limits: { fileSize: 200 * 1024 * 1024 }, // 200MB cap
|
||||
fileFilter: (_req, file, cb) => {
|
||||
const allowed = ["model/stl", "application/vnd.ms-3mfdocument", "model/obj", "application/octet-stream"];
|
||||
if (allowed.includes(file.mimetype) || /\.(stl|3mf|obj|amf)$/i.test(file.originalname)) cb(null, true);
|
||||
else cb(new Error("Unsupported file type"));
|
||||
}
|
||||
});
|
||||
|
||||
/** List available profiles (files in /profiles) */
|
||||
app.get("/api/profiles", async (_req, res) => {
|
||||
try {
|
||||
const files = await fs.readdir(PROFILES_DIR);
|
||||
const only = files.filter(f => /\.(ini|json)$/i.test(f));
|
||||
res.json(only);
|
||||
} catch (e: any) {
|
||||
res.status(500).json({ error: e?.message || "Failed to list profiles" });
|
||||
}
|
||||
});
|
||||
|
||||
/** Create a job: upload + slice */
|
||||
app.post("/api/jobs", upload.single("model"), async (req, res) => {
|
||||
try {
|
||||
const profileName = String(req.body.profile || "").trim();
|
||||
const supports = (String(req.body.supports || "off").toLowerCase() === "on") ? "on" : "off";
|
||||
if (!req.file) throw new Error("No model file uploaded");
|
||||
if (!profileName) throw new Error("Profile is required");
|
||||
|
||||
const profilePath = path.join(PROFILES_DIR, profileName);
|
||||
const profileStat = await fs.stat(profilePath).catch(() => null);
|
||||
if (!profileStat?.isFile()) throw new Error("Profile not found");
|
||||
|
||||
// Persist job row
|
||||
const id = path.parse(req.file.filename).name; // filename was <uuid>.<ext>
|
||||
const ext = path.extname(req.file.filename).slice(1).toLowerCase();
|
||||
const inputPath = path.join(UPLOAD_DIR, req.file.filename);
|
||||
const outputDir = path.join(OUTPUT_DIR, id);
|
||||
await fs.mkdir(outputDir, { recursive: true });
|
||||
const outputPath = path.join(outputDir, "result.3mf");
|
||||
|
||||
// read transform fields from the form (defaults)
|
||||
const rotX = Number(req.body.rotX ?? 0) || 0;
|
||||
const rotY = Number(req.body.rotY ?? 0) || 0;
|
||||
const rotZ = Number(req.body.rotZ ?? 0) || 0;
|
||||
const scale = Number(req.body.scale ?? 1) || 1;
|
||||
|
||||
// create a path the slicer will use as input (transformed if needed)
|
||||
const transformedInput = path.join(outputDir, "transformed_input.stl");
|
||||
await transformStlIfNeeded(inputPath, transformedInput, {x:rotX, y:rotY, z:rotZ}, scale);
|
||||
|
||||
const job: JobRecord = {
|
||||
id,
|
||||
filename: req.file.originalname,
|
||||
ext,
|
||||
profile: profilePath,
|
||||
supports,
|
||||
inputPath,
|
||||
outputPath: null,
|
||||
status: "queued",
|
||||
errorMsg: null,
|
||||
createdAt: Date.now(),
|
||||
finishedAt: null
|
||||
};
|
||||
insertJob.run(job);
|
||||
|
||||
// Kick slicing async
|
||||
runSlicing(job, outputPath, supports, profilePath).catch(() => { /* already captured in DB */ });
|
||||
|
||||
res.json({ id });
|
||||
} catch (e: any) {
|
||||
res.status(400).json({ error: e?.message || "Failed to create job" });
|
||||
}
|
||||
});
|
||||
|
||||
/** Get one job */
|
||||
app.get("/api/jobs/:id", (req, res) => {
|
||||
const row = getJob.get(req.params.id) as any;
|
||||
if (!row) return res.status(404).json({ error: "Not found" });
|
||||
res.json(mapRow(row));
|
||||
});
|
||||
|
||||
/** List recent jobs */
|
||||
app.get("/api/jobs", (_req, res) => {
|
||||
const rows = listJobs.all() as any[];
|
||||
res.json(rows.map(mapRow));
|
||||
});
|
||||
|
||||
/** Serve outputs as static files */
|
||||
app.use("/outputs", express.static(OUTPUT_DIR, { fallthrough: true }));
|
||||
|
||||
const FRONTEND_DIR = "/app/www";
|
||||
app.use(express.static(FRONTEND_DIR));
|
||||
|
||||
/** Health */
|
||||
app.get("/api/health", (_req, res) => res.json({ ok: true }));
|
||||
|
||||
const port = 8080;
|
||||
app.listen(port, () => console.log(`Backend listening on :${port}`));
|
||||
|
||||
|
||||
app.get(/^(?!\/api\/|\/outputs\/).*/, (_req, res) => {
|
||||
res.sendFile(path.join(FRONTEND_DIR, "index.html"));
|
||||
});
|
||||
|
||||
|
||||
/** Helpers */
|
||||
function mapRow(r: any) {
|
||||
return {
|
||||
id: r.id,
|
||||
filename: r.filename,
|
||||
ext: r.ext,
|
||||
profile: path.basename(r.profile),
|
||||
supports: r.supports,
|
||||
inputPath: r.input_path,
|
||||
outputPath: r.output_path ? `/outputs/${r.id}/result.gcode` : null,
|
||||
status: r.status,
|
||||
errorMsg: r.error_msg,
|
||||
createdAt: r.created_at,
|
||||
finishedAt: r.finished_at
|
||||
};
|
||||
}
|
||||
|
||||
async function transformStlIfNeeded(srcPath: string, dstPath: string, rot: {x:number;y:number;z:number}, scale: number): Promise<void> {
|
||||
const ext = path.extname(srcPath).toLowerCase();
|
||||
if (ext !== ".stl" || (rot.x===0 && rot.y===0 && rot.z===0 && scale===1)) {
|
||||
// no transform: just copy
|
||||
await fs.copyFile(srcPath, dstPath);
|
||||
return;
|
||||
}
|
||||
|
||||
const buf = await fs.readFile(srcPath);
|
||||
const isAscii = buf.slice(0,5).toString().toLowerCase() === "solid" && buf.includes(0x0a); // rough check
|
||||
const rx = (rot.x*Math.PI)/180, ry = (rot.y*Math.PI)/180, rz = (rot.z*Math.PI)/180;
|
||||
const sx = scale, sy = scale, sz = scale;
|
||||
|
||||
const apply = (x:number,y:number,z:number) => {
|
||||
// scale
|
||||
let X = x*sx, Y=y*sy, Z=z*sz;
|
||||
// rotate Z, Y, X (intrinsic)
|
||||
// Z
|
||||
let x1 = X*Math.cos(rz) - Y*Math.sin(rz);
|
||||
let y1 = X*Math.sin(rz) + Y*Math.cos(rz);
|
||||
let z1 = Z;
|
||||
// Y
|
||||
let x2 = x1*Math.cos(ry) + z1*Math.sin(ry);
|
||||
let y2 = y1;
|
||||
let z2 = -x1*Math.sin(ry) + z1*Math.cos(ry);
|
||||
// X
|
||||
let x3 = x2;
|
||||
let y3 = y2*Math.cos(rx) - z2*Math.sin(rx);
|
||||
let z3 = y2*Math.sin(rx) + z2*Math.cos(rx);
|
||||
return [x3,y3,z3] as const;
|
||||
};
|
||||
|
||||
if (!isAscii) {
|
||||
// binary STL
|
||||
const dv = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
const triangles = dv.getUint32(80, true);
|
||||
let off = 84;
|
||||
const out = Buffer.allocUnsafe(buf.byteLength);
|
||||
buf.copy(out, 0, 0, 84); // header + count
|
||||
for (let i=0;i<triangles;i++) {
|
||||
// normal (we won't recompute normals; slicer will)
|
||||
dv.getFloat32(off, true); dv.getFloat32(off+4, true); dv.getFloat32(off+8, true);
|
||||
out.writeFloatLE(0, off); out.writeFloatLE(0, off+4); out.writeFloatLE(1, off+8);
|
||||
off += 12;
|
||||
for (let v=0; v<3; v++) {
|
||||
const x = dv.getFloat32(off, true);
|
||||
const y = dv.getFloat32(off+4, true);
|
||||
const z = dv.getFloat32(off+8, true);
|
||||
const [nx,ny,nz] = apply(x,y,z);
|
||||
out.writeFloatLE(nx, off);
|
||||
out.writeFloatLE(ny, off+4);
|
||||
out.writeFloatLE(nz, off+8);
|
||||
off += 12;
|
||||
}
|
||||
// attr
|
||||
out.writeUInt16LE(0, off); off += 2;
|
||||
}
|
||||
await fs.writeFile(dstPath, out);
|
||||
} else {
|
||||
// basic ASCII STL (slow but fine for small files)
|
||||
const txt = buf.toString("utf8");
|
||||
const out = txt.replace(/^vertex\s+([^\s]+)\s+([^\s]+)\s+([^\s]+)/gm, (_m, a, b, c) => {
|
||||
const [x,y,z] = [parseFloat(a), parseFloat(b), parseFloat(c)];
|
||||
const [nx,ny,nz] = apply(x,y,z);
|
||||
return `vertex ${nx} ${ny} ${nz}`;
|
||||
});
|
||||
await fs.writeFile(dstPath, out, "utf8");
|
||||
}
|
||||
}
|
||||
|
||||
async function runSlicing(job: JobRecord, outPath: string, supports: "on"|"off", profilePath: string) {
|
||||
// Update to processing
|
||||
updateStatus.run({ id: job.id, status: "processing", outputPath: null, errorMsg: null, finishedAt: null });
|
||||
|
||||
// Build args string from env template
|
||||
const supportsFlag = (supports === "on") ? SUPPORTS_ON : SUPPORTS_OFF;
|
||||
const argTemplate = SLICER_ARGS
|
||||
.replaceAll("{INPUT}", job.inputPath)
|
||||
.replaceAll("{OUTPUT}", outPath)
|
||||
.replaceAll("{PROFILE}", profilePath)
|
||||
.replaceAll("{SUPPORTS}", supportsFlag);
|
||||
|
||||
// Split into argv carefully (simple split on space; if you need complex quoting, switch to shell:true)
|
||||
const argv = argTemplate.match(/(?:[^\s"]+|"[^"]*")+/g)?.map(s => s.replace(/^"|"$/g, "")) || [];
|
||||
|
||||
const proc = spawn(SLICER_CMD, argv, { stdio: ["ignore","pipe","pipe"] });
|
||||
|
||||
let stderr = "";
|
||||
proc.stderr.on("data", (d) => { stderr += d.toString(); });
|
||||
proc.stdout.on("data", () => { /* could stream progress here */ });
|
||||
|
||||
await new Promise<void>((resolve) => proc.on("close", () => resolve()));
|
||||
|
||||
// Verify output exists
|
||||
const ok = await fs.stat(outPath).then(st => st.isFile()).catch(() => false);
|
||||
|
||||
if (ok) {
|
||||
updateStatus.run({ id: job.id, status: "done", outputPath: outPath, errorMsg: null, finishedAt: Date.now() });
|
||||
} else {
|
||||
const msg = (stderr || "Slicing failed or produced no output").slice(0, 1000);
|
||||
updateStatus.run({ id: job.id, status: "error", outputPath: null, errorMsg: msg, finishedAt: Date.now() });
|
||||
}
|
||||
}
|
||||
|
||||
13
backend/src/types.ts
Normal file
13
backend/src/types.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export interface JobRecord {
|
||||
id: string;
|
||||
filename: string; // original filename
|
||||
ext: string; // stl/3mf/obj
|
||||
profile: string; // file path we used
|
||||
supports: "on" | "off";
|
||||
inputPath: string; // stored upload path
|
||||
outputPath: string | null; // gcode path after slicing
|
||||
status: "queued" | "processing" | "done" | "error";
|
||||
errorMsg: string | null;
|
||||
createdAt: number; // epoch ms
|
||||
finishedAt: number | null;
|
||||
}
|
||||
13
backend/tsconfig.json
Normal file
13
backend/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "ES2022",
|
||||
"moduleResolution": "Bundler",
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
Reference in New Issue
Block a user